repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
PurelyApplied/geode | geode-core/src/integrationTest/java/org/apache/geode/management/internal/cli/GfshParserConverterTest.java | 8758 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doReturn;
import java.util.Arrays;
import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.springframework.shell.event.ParseResult;
import org.apache.geode.cache.ExpirationAction;
import org.apache.geode.management.internal.cli.converters.DiskStoreNameConverter;
import org.apache.geode.management.internal.cli.converters.FilePathConverter;
import org.apache.geode.management.internal.cli.converters.FilePathStringConverter;
import org.apache.geode.management.internal.cli.converters.RegionPathConverter;
import org.apache.geode.test.junit.categories.GfshTest;
import org.apache.geode.test.junit.rules.GfshParserRule;
@Category({GfshTest.class})
public class GfshParserConverterTest {
private GfshParserRule.CommandCandidate commandCandidate;
@ClassRule
public static GfshParserRule parser = new GfshParserRule();
@Test
public void testStringArrayConverter() {
String command = "create disk-store --name=foo --dir=bar";
GfshParseResult result = parser.parse(command);
assertThat(result).isNotNull();
assertThat(result.getParamValueAsString("dir")).isEqualTo("bar");
}
@Test
public void testDirConverter() {
String command = "compact offline-disk-store --name=foo --disk-dirs=bar";
GfshParseResult result = parser.parse(command);
assertThat(result).isNotNull();
assertThat(result.getParamValueAsString("disk-dirs")).isEqualTo("bar");
}
@Test
public void testMultiDirInvalid() throws Exception {
String command = "create disk-store --name=testCreateDiskStore1 --group=Group1 "
+ "--allow-force-compaction=true --auto-compact=false --compaction-threshold=67 "
+ "--max-oplog-size=355 --queue-size=5321 --time-interval=2023 --write-buffer-size=3110 "
+ "--dir=/testCreateDiskStore1.1#1452637463 " + "--dir=/testCreateDiskStore1.2";
GfshParseResult result = parser.parse(command);
assertThat(result).isNull();
}
@Test
public void testMultiDirValid() throws Exception {
String command = "create disk-store --name=testCreateDiskStore1 --group=Group1 "
+ "--allow-force-compaction=true --auto-compact=false --compaction-threshold=67 "
+ "--max-oplog-size=355 --queue-size=5321 --time-interval=2023 --write-buffer-size=3110 "
+ "--dir=/testCreateDiskStore1.1#1452637463,/testCreateDiskStore1.2";
GfshParseResult result = parser.parse(command);
assertThat(result).isNotNull();
assertThat(result.getParamValueAsString("dir"))
.isEqualTo("/testCreateDiskStore1.1#1452637463,/testCreateDiskStore1.2");
}
@Test
public void testEmptyKey() throws Exception {
String command = "remove --key=\"\" --region=/GemfireDataCommandsTestRegion";
GfshParseResult result = parser.parse(command);
assertThat(result).isNotNull();
assertThat(result.getParamValueAsString("key")).isEqualTo("");
}
@Test
public void testJsonKey() throws Exception {
String command = "get --key=('id':'testKey0') --region=regionA";
GfshParseResult result = parser.parse(command);
assertThat(result).isNotNull();
}
@Test
public void testUnspecifiedValueToStringArray() {
String command = "change loglevel --loglevel=finer --groups=group1,group2";
ParseResult result = parser.parse(command);
String[] memberIdValue = (String[]) result.getArguments()[0];
assertThat(memberIdValue).isNull();
}
@Test
public void testHelpConverterWithNo() {
String command = "help --command=";
commandCandidate = parser.complete(command);
Set<String> commands = parser.getCommandManager().getHelper().getCommands();
assertThat(commandCandidate.size()).isEqualTo(commands.size());
}
@Test
public void testHelpConverter() {
String command = "help --command=conn";
commandCandidate = parser.complete(command);
assertThat(commandCandidate.size()).isEqualTo(1);
assertThat(commandCandidate.getFirstCandidate()).isEqualTo(command + "ect");
}
@Test
public void testHintConverter() {
String command = "hint --topic=";
commandCandidate = parser.complete(command);
Set<String> topics = parser.getCommandManager().getHelper().getTopicNames();
assertThat(commandCandidate.size()).isEqualTo(topics.size());
assertThat(commandCandidate.getFirstCandidate()).isEqualTo("hint --topic=Client");
}
@Test
public void testDiskStoreNameConverter() throws Exception {
// spy the DiskStoreNameConverter
DiskStoreNameConverter spy = parser.spyConverter(DiskStoreNameConverter.class);
Set<String> diskStores = Arrays.stream("name1,name2".split(",")).collect(Collectors.toSet());
doReturn(diskStores).when(spy).getCompletionValues();
String command = "compact disk-store --name=";
commandCandidate = parser.complete(command);
assertThat(commandCandidate.size()).isEqualTo(2);
}
@Test
public void testFilePathConverter() throws Exception {
FilePathStringConverter spy = parser.spyConverter(FilePathStringConverter.class);
List<String> roots = Arrays.stream("/vol,/logs".split(",")).collect(Collectors.toList());
List<String> siblings =
Arrays.stream("sibling1,sibling11,test1".split(",")).collect(Collectors.toList());
doReturn(roots).when(spy).getRoots();
doReturn(siblings).when(spy).getSiblings(any());
String command = "start server --cache-xml-file=";
commandCandidate = parser.complete(command);
assertThat(commandCandidate.size()).isEqualTo(2);
assertThat(commandCandidate.getFirstCandidate()).isEqualTo(command + "/logs");
command = "start server --cache-xml-file=sibling";
commandCandidate = parser.complete(command);
assertThat(commandCandidate.size()).isEqualTo(2);
assertThat(commandCandidate.getFirstCandidate()).isEqualTo(command + "1");
FilePathConverter spyFilePathConverter = parser.spyConverter(FilePathConverter.class);
spyFilePathConverter.setDelegate(spy);
command = "run --file=test";
commandCandidate = parser.complete(command);
assertThat(commandCandidate.size()).isEqualTo(1);
assertThat(commandCandidate.getFirstCandidate()).isEqualTo(command + "1");
}
@Test
public void testRegionPathConverter() throws Exception {
RegionPathConverter spy = parser.spyConverter(RegionPathConverter.class);
Set<String> regions = Arrays.stream("/regionA,/regionB".split(",")).collect(Collectors.toSet());
doReturn(regions).when(spy).getAllRegionPaths();
String command = "describe region --name=";
commandCandidate = parser.complete(command);
assertThat(commandCandidate.size()).isEqualTo(regions.size());
assertThat(commandCandidate.getFirstCandidate()).isEqualTo(command + "/regionA");
}
@Test
public void testExpirationAction() {
String command = "create region --name=A --type=PARTITION --entry-idle-time-expiration-action=";
commandCandidate = parser.complete(command);
assertThat(commandCandidate.size()).isEqualTo(4);
assertThat(commandCandidate.getFirstCandidate()).isEqualTo(command + "DESTROY");
GfshParseResult result = parser.parse(command + "DESTROY");
assertThat(result.getParamValue("entry-idle-time-expiration-action"))
.isEqualTo(ExpirationAction.DESTROY);
result = parser.parse(command + "local-destroy");
assertThat(result.getParamValue("entry-idle-time-expiration-action"))
.isEqualTo(ExpirationAction.LOCAL_DESTROY);
result = parser.parse(command + "LOCAL_INVALIDATE");
assertThat(result.getParamValue("entry-idle-time-expiration-action"))
.isEqualTo(ExpirationAction.LOCAL_INVALIDATE);
result = parser.parse(command + "invalid_action");
assertThat(result).isNull();
}
}
| apache-2.0 |
googleapis/google-cloud-php | SqlAdmin/src/V1beta4/SqlTiersServiceClient.php | 1216 | <?php
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* GENERATED CODE WARNING
* Generated by gapic-generator-php from the file
* https://github.com/googleapis/googleapis/blob/master/google/cloud/sql/v1beta4/cloud_sql_tiers.proto
* Updates to the above are reflected here through a refresh process.
*
* @experimental
*/
namespace Google\Cloud\Sql\V1beta4;
use Google\Cloud\Sql\V1beta4\Gapic\SqlTiersServiceGapicClient;
/** {@inheritdoc} */
class SqlTiersServiceClient extends SqlTiersServiceGapicClient
{
// This class is intentionally empty, and is intended to hold manual additions to
// the generated {@see SqlTiersServiceGapicClient} class.
}
| apache-2.0 |
MarcoLotz/docker-client | src/main/java/com/spotify/docker/client/EventReader.java | 1825 | /*-
* -\-\-
* docker-client
* --
* Copyright (C) 2016 Spotify AB
* --
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* -/-/-
*/
package com.spotify.docker.client;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.spotify.docker.client.messages.Event;
import java.io.Closeable;
import java.io.IOException;
import org.apache.http.client.methods.CloseableHttpResponse;
public class EventReader implements Closeable {
private final ObjectMapper objectMapper;
private final CloseableHttpResponse response;
private JsonParser parser;
public EventReader(final CloseableHttpResponse response, final ObjectMapper objectMapper) {
this.response = response;
this.objectMapper = objectMapper;
}
public Event nextMessage() throws IOException {
if (this.parser == null) {
this.parser = objectMapper.getFactory().createParser(response.getEntity().getContent());
}
// If the parser is closed, there's no new event
if (this.parser.isClosed()) {
return null;
}
// Read tokens until we get a start object
if (parser.nextToken() == null) {
return null;
}
return parser.readValueAs(Event.class);
}
@Override
public void close() throws IOException {
response.close();
}
}
| apache-2.0 |
yush1ga/pulsar | pulsar-client-cpp/lib/PartitionedProducerImpl.cc | 9564 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
#include "PartitionedProducerImpl.h"
#include "LogUtils.h"
#include <boost/bind.hpp>
#include <sstream>
#include "RoundRobinMessageRouter.h"
#include "SinglePartitionMessageRouter.h"
#include "DestinationName.h"
#include "MessageImpl.h"
DECLARE_LOG_OBJECT()
namespace pulsar {
const std::string PartitionedProducerImpl::PARTITION_NAME_SUFFIX = "-partition-";
PartitionedProducerImpl::PartitionedProducerImpl(ClientImplPtr client,
const DestinationNamePtr destinationName,
const unsigned int numPartitions,
const ProducerConfiguration& config):client_(client),
destinationName_(destinationName),
topic_(destinationName_->toString()),
numPartitions_(numPartitions),
conf_(config),
state_(Pending)
{
numProducersCreated_ = 0;
cleanup_ = false;
if(config.getPartitionsRoutingMode() == ProducerConfiguration::RoundRobinDistribution) {
routerPolicy_ = boost::make_shared<RoundRobinMessageRouter>(numPartitions);
} else if (config.getPartitionsRoutingMode() == ProducerConfiguration::UseSinglePartition) {
routerPolicy_ = boost::make_shared<SinglePartitionMessageRouter>(numPartitions);
} else {
routerPolicy_ = config.getMessageRouterPtr();
}
}
PartitionedProducerImpl::~PartitionedProducerImpl() {
}
//override
const std::string& PartitionedProducerImpl::getTopic() const {
return topic_;
}
//override
void PartitionedProducerImpl::start() {
boost::shared_ptr<ProducerImpl> producer;
// create producer per partition
for (unsigned int i = 0; i < numPartitions_; i++) {
std::string topicPartitionName = destinationName_->getTopicPartitionName(i);
producer = boost::make_shared<ProducerImpl>(client_, topicPartitionName, conf_);
producer->getProducerCreatedFuture().addListener(boost::bind(&PartitionedProducerImpl::handleSinglePartitionProducerCreated,
shared_from_this(), _1, _2, i));
producers_.push_back(producer);
LOG_DEBUG("Creating Producer for single Partition - " << topicPartitionName);
}
for (ProducerList::const_iterator prod = producers_.begin(); prod != producers_.end(); prod++) {
(*prod)->start();
}
}
void PartitionedProducerImpl::handleSinglePartitionProducerCreated(Result result,
ProducerImplBaseWeakPtr producerWeakPtr,
unsigned int partitionIndex) {
// to indicate, we are doing cleanup using closeAsync after producer create
// has failed and the invocation of closeAsync is not from client
CloseCallback closeCallback = NULL;
Lock lock(mutex_);
if (state_ == Failed) {
// Ignore, we have already informed client that producer creation failed
return;
}
assert(numProducersCreated_ <= numPartitions_);
if (result != ResultOk) {
state_ = Failed;
lock.unlock();
closeAsync(closeCallback);
partitionedProducerCreatedPromise_.setFailed(result);
LOG_DEBUG("Unable to create Producer for partition - " << partitionIndex << " Error - " << result);
return;
}
assert(partitionIndex <= numPartitions_);
numProducersCreated_++;
if(numProducersCreated_ == numPartitions_) {
lock.unlock();
partitionedProducerCreatedPromise_.setValue(shared_from_this());
}
}
//override
void PartitionedProducerImpl::sendAsync(const Message& msg, SendCallback callback) {
//get partition for this message from router policy
short partition = (short)(routerPolicy_->getPartition(msg));
if (partition >= numPartitions_ || partition >= producers_.size()) {
LOG_ERROR("Got Invalid Partition for message from Router Policy, Partition - " << partition);
//change me: abort or notify failure in callback?
// change to appropriate error if callback
callback(ResultUnknownError, msg);
return;
}
//find a producer for that partition, index should start from 0
ProducerImplPtr& producer = producers_[partition];
msg.impl_->messageId.partition_ = partition;
//send message on that partition
producer->sendAsync(msg, callback);
}
//override
void PartitionedProducerImpl::shutdown() {
setState(Closed);
}
void PartitionedProducerImpl::setState(const PartitionedProducerState state) {
Lock lock(mutex_);
state_ = state;
lock.unlock();
}
/*
* if createProducerCallback is set, it means the closeAsync is called from CreateProducer API which failed to create
* one or many producers for partitions. So, we have to notify with ERROR on createProducerFailure
*/
void PartitionedProducerImpl::closeAsync(CloseCallback closeCallback) {
int producerIndex = 0;
unsigned int producerAlreadyClosed = 0;
for (ProducerList::const_iterator i = producers_.begin(); i != producers_.end(); i++) {
ProducerImplPtr prod = *i;
if(!prod->isClosed()) {
prod->closeAsync(boost::bind(&PartitionedProducerImpl::handleSinglePartitionProducerClose,
shared_from_this(), _1, producerIndex, closeCallback));
} else {
producerAlreadyClosed++;
}
}
/*
* No need to set state since:-
* a. If closeAsync before creation then state == Closed, since producers_.size() = producerAlreadyClosed = 0
* b. If closeAsync called after all sub partitioned producer connected - handleSinglePartitionProducerClose handles the closing
* c. If closeAsync called due to failure in creating just one sub producer then state is set by handleSinglePartitionProducerCreated
*/
if (producerAlreadyClosed == producers_.size() && closeCallback) {
setState(Closed);
closeCallback(ResultOk);
}
}
void PartitionedProducerImpl::handleSinglePartitionProducerClose(Result result,
const unsigned int partitionIndex,
CloseCallback callback) {
Lock lock(mutex_);
if (state_ == Failed) {
// we should have already notified the client by callback
return;
}
if (result != ResultOk) {
state_ = Failed;
lock.unlock();
LOG_ERROR("Closing the producer failed for partition - " << partitionIndex);
if (callback) {
callback(result);
}
return;
}
assert (partitionIndex < numPartitions_);
if(numProducersCreated_ > 0) {
numProducersCreated_--;
}
// closed all successfully
if(!numProducersCreated_) {
state_ = Closed;
lock.unlock();
// set the producerCreatedPromise to failure, if client called
// closeAsync and it's not failure to create producer, the promise
// is set second time here, first time it was successful. So check
// if there's any adverse effect of setting it again. It should not
// be but must check. MUSTCHECK changeme
partitionedProducerCreatedPromise_.setFailed(ResultUnknownError);
if (callback) {
callback(result);
}
return;
}
}
//override
Future<Result, ProducerImplBaseWeakPtr> PartitionedProducerImpl::getProducerCreatedFuture() {
return partitionedProducerCreatedPromise_.getFuture();
}
//override
bool PartitionedProducerImpl::isClosed() {
return state_ == Closed;
}
}
| apache-2.0 |
jt70471/aws-sdk-cpp | aws-cpp-sdk-medialive/source/model/M2tsArib.cpp | 1897 | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/medialive/model/M2tsArib.h>
#include <aws/core/utils/HashingUtils.h>
#include <aws/core/Globals.h>
#include <aws/core/utils/EnumParseOverflowContainer.h>
using namespace Aws::Utils;
namespace Aws
{
namespace MediaLive
{
namespace Model
{
namespace M2tsAribMapper
{
static const int DISABLED_HASH = HashingUtils::HashString("DISABLED");
static const int ENABLED_HASH = HashingUtils::HashString("ENABLED");
M2tsArib GetM2tsAribForName(const Aws::String& name)
{
int hashCode = HashingUtils::HashString(name.c_str());
if (hashCode == DISABLED_HASH)
{
return M2tsArib::DISABLED;
}
else if (hashCode == ENABLED_HASH)
{
return M2tsArib::ENABLED;
}
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
overflowContainer->StoreOverflow(hashCode, name);
return static_cast<M2tsArib>(hashCode);
}
return M2tsArib::NOT_SET;
}
Aws::String GetNameForM2tsArib(M2tsArib enumValue)
{
switch(enumValue)
{
case M2tsArib::DISABLED:
return "DISABLED";
case M2tsArib::ENABLED:
return "ENABLED";
default:
EnumParseOverflowContainer* overflowContainer = Aws::GetEnumOverflowContainer();
if(overflowContainer)
{
return overflowContainer->RetrieveOverflow(static_cast<int>(enumValue));
}
return {};
}
}
} // namespace M2tsAribMapper
} // namespace Model
} // namespace MediaLive
} // namespace Aws
| apache-2.0 |
klmitch/nova | nova/virt/libvirt/volume/fs.py | 4449 | # Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import os
from nova import utils
from nova.virt.libvirt.volume import mount
from nova.virt.libvirt.volume import volume as libvirt_volume
class LibvirtBaseFileSystemVolumeDriver(
libvirt_volume.LibvirtBaseVolumeDriver, metaclass=abc.ABCMeta):
"""The base class for file system type volume drivers"""
def __init__(self, host):
super(LibvirtBaseFileSystemVolumeDriver,
self).__init__(host, is_block_dev=False)
@abc.abstractmethod
def _get_mount_point_base(self):
"""Return the mount point path prefix.
This is used to build the device path.
:returns: The mount point path prefix.
"""
raise NotImplementedError('_get_mount_point_base')
def _normalize_export(self, export):
"""Normalize the export (share) if necessary.
Subclasses should override this method if they have a non-standard
export value, e.g. if the export is a URL. By default this method just
returns the export value passed in unchanged.
:param export: The export (share) value to normalize.
:returns: The normalized export value.
"""
return export
def _get_mount_path(self, connection_info):
"""Returns the mount path prefix using the mount point base and share.
:param connection_info: dict of the form
::
connection_info = {
'data': {
'export': the file system share,
...
}
...
}
:returns: The mount path prefix.
"""
share = self._normalize_export(connection_info['data']['export'])
return os.path.join(self._get_mount_point_base(),
utils.get_hash_str(share))
def _get_device_path(self, connection_info):
"""Returns the hashed path to the device.
:param connection_info: dict of the form
::
connection_info = {
'data': {
'export': the file system share,
'name': the name of the device,
...
}
...
}
:returns: The full path to the device.
"""
mount_path = self._get_mount_path(connection_info)
return os.path.join(mount_path, connection_info['data']['name'])
class LibvirtMountedFileSystemVolumeDriver(LibvirtBaseFileSystemVolumeDriver,
metaclass=abc.ABCMeta):
# NOTE(mdbooth): Hopefully we'll get to the point where everything which
# previously subclassed LibvirtBaseFileSystemVolumeDriver now subclasses
# LibvirtMountedFileSystemVolumeDriver. If we get there, we should fold
# this class into the base class.
def __init__(self, host, fstype):
super(LibvirtMountedFileSystemVolumeDriver, self).__init__(host)
self.fstype = fstype
def connect_volume(self, connection_info, instance):
"""Connect the volume."""
export = connection_info['data']['export']
vol_name = connection_info['data']['name']
mountpoint = self._get_mount_path(connection_info)
mount.mount(self.fstype, export, vol_name, mountpoint, instance,
self._mount_options(connection_info))
connection_info['data']['device_path'] = \
self._get_device_path(connection_info)
def disconnect_volume(self, connection_info, instance):
"""Disconnect the volume."""
vol_name = connection_info['data']['name']
mountpoint = self._get_mount_path(connection_info)
mount.umount(vol_name, mountpoint, instance)
@abc.abstractmethod
def _mount_options(self, connection_info):
"""Return a list of additional arguments to pass to the mount command.
"""
pass
| apache-2.0 |
renmeng8875/projects | Hibernate-source/源代码及重要说明/Hibernate相关资料/hibernate-3.2.0.ga/hibernate-3.2/src/org/hibernate/transaction/JRun4TransactionManagerLookup.java | 356 | package org.hibernate.transaction;
/**
* TransactionManager lookup strategy for JRun4
* @author Joseph Bissen
*/
public class JRun4TransactionManagerLookup extends JNDITransactionManagerLookup {
protected String getName() {
return "java:/TransactionManager";
}
public String getUserTransactionName() {
return "java:comp/UserTransaction";
}
}
| apache-2.0 |
moander/kubernetes | pkg/kubelet/dockertools/docker_test.go | 26813 | /*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package dockertools
import (
"encoding/json"
"fmt"
"hash/adler32"
"math/rand"
"path"
"reflect"
"sort"
"strconv"
"strings"
"testing"
"github.com/docker/docker/pkg/jsonmessage"
dockertypes "github.com/docker/engine-api/types"
dockernat "github.com/docker/go-connections/nat"
cadvisorapi "github.com/google/cadvisor/info/v1"
"github.com/stretchr/testify/assert"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/apis/componentconfig"
"k8s.io/kubernetes/pkg/client/record"
"k8s.io/kubernetes/pkg/credentialprovider"
kubecontainer "k8s.io/kubernetes/pkg/kubelet/container"
containertest "k8s.io/kubernetes/pkg/kubelet/container/testing"
"k8s.io/kubernetes/pkg/kubelet/images"
"k8s.io/kubernetes/pkg/kubelet/network"
nettest "k8s.io/kubernetes/pkg/kubelet/network/testing"
"k8s.io/kubernetes/pkg/types"
hashutil "k8s.io/kubernetes/pkg/util/hash"
)
func verifyCalls(t *testing.T, fakeDocker *FakeDockerClient, calls []string) {
assert.New(t).NoError(fakeDocker.AssertCalls(calls))
}
func verifyStringArrayEquals(t *testing.T, actual, expected []string) {
invalid := len(actual) != len(expected)
if !invalid {
for ix, value := range actual {
if expected[ix] != value {
invalid = true
}
}
}
if invalid {
t.Errorf("Expected: %#v, Actual: %#v", expected, actual)
}
}
func findPodContainer(dockerContainers []*dockertypes.Container, podFullName string, uid types.UID, containerName string) (*dockertypes.Container, bool, uint64) {
for _, dockerContainer := range dockerContainers {
if len(dockerContainer.Names) == 0 {
continue
}
dockerName, hash, err := ParseDockerName(dockerContainer.Names[0])
if err != nil {
continue
}
if dockerName.PodFullName == podFullName &&
(uid == "" || dockerName.PodUID == uid) &&
dockerName.ContainerName == containerName {
return dockerContainer, true, hash
}
}
return nil, false, 0
}
func TestGetContainerID(t *testing.T) {
fakeDocker := NewFakeDockerClient()
fakeDocker.SetFakeRunningContainers([]*FakeContainer{
{
ID: "foobar",
Name: "/k8s_foo_qux_ns_1234_42",
},
{
ID: "barbar",
Name: "/k8s_bar_qux_ns_2565_42",
},
})
dockerContainers, err := GetKubeletDockerContainers(fakeDocker, false)
if err != nil {
t.Errorf("Expected no error, Got %#v", err)
}
if len(dockerContainers) != 2 {
t.Errorf("Expected %#v, Got %#v", fakeDocker.RunningContainerList, dockerContainers)
}
verifyCalls(t, fakeDocker, []string{"list"})
dockerContainer, found, _ := findPodContainer(dockerContainers, "qux_ns", "", "foo")
if dockerContainer == nil || !found {
t.Errorf("Failed to find container %#v", dockerContainer)
}
fakeDocker.ClearCalls()
dockerContainer, found, _ = findPodContainer(dockerContainers, "foobar", "", "foo")
verifyCalls(t, fakeDocker, []string{})
if dockerContainer != nil || found {
t.Errorf("Should not have found container %#v", dockerContainer)
}
}
func verifyPackUnpack(t *testing.T, podNamespace, podUID, podName, containerName string) {
container := &api.Container{Name: containerName}
hasher := adler32.New()
hashutil.DeepHashObject(hasher, *container)
computedHash := uint64(hasher.Sum32())
podFullName := fmt.Sprintf("%s_%s", podName, podNamespace)
_, name, _ := BuildDockerName(KubeletContainerName{podFullName, types.UID(podUID), container.Name}, container)
returned, hash, err := ParseDockerName(name)
if err != nil {
t.Errorf("Failed to parse Docker container name %q: %v", name, err)
}
if podFullName != returned.PodFullName || podUID != string(returned.PodUID) || containerName != returned.ContainerName || computedHash != hash {
t.Errorf("For (%s, %s, %s, %d), unpacked (%s, %s, %s, %d)", podFullName, podUID, containerName, computedHash, returned.PodFullName, returned.PodUID, returned.ContainerName, hash)
}
}
func TestContainerNaming(t *testing.T) {
podUID := "12345678"
verifyPackUnpack(t, "file", podUID, "name", "container")
verifyPackUnpack(t, "file", podUID, "name-with-dashes", "container")
// UID is same as pod name
verifyPackUnpack(t, "file", podUID, podUID, "container")
// No Container name
verifyPackUnpack(t, "other", podUID, "name", "")
container := &api.Container{Name: "container"}
podName := "foo"
podNamespace := "test"
name := fmt.Sprintf("k8s_%s_%s_%s_%s_42", container.Name, podName, podNamespace, podUID)
podFullName := fmt.Sprintf("%s_%s", podName, podNamespace)
returned, hash, err := ParseDockerName(name)
if err != nil {
t.Errorf("Failed to parse Docker container name %q: %v", name, err)
}
if returned.PodFullName != podFullName || string(returned.PodUID) != podUID || returned.ContainerName != container.Name || hash != 0 {
t.Errorf("unexpected parse: %s %s %s %d", returned.PodFullName, returned.PodUID, returned.ContainerName, hash)
}
}
func TestMatchImageTagOrSHA(t *testing.T) {
for _, testCase := range []struct {
Inspected dockertypes.ImageInspect
Image string
Output bool
}{
{
Inspected: dockertypes.ImageInspect{RepoTags: []string{"ubuntu:latest"}},
Image: "ubuntu",
Output: true,
},
{
Inspected: dockertypes.ImageInspect{RepoTags: []string{"ubuntu:14.04"}},
Image: "ubuntu:latest",
Output: false,
},
{
Inspected: dockertypes.ImageInspect{RepoTags: []string{"colemickens/hyperkube-amd64:217.9beff63"}},
Image: "colemickens/hyperkube-amd64:217.9beff63",
Output: true,
},
{
Inspected: dockertypes.ImageInspect{RepoTags: []string{"colemickens/hyperkube-amd64:217.9beff63"}},
Image: "docker.io/colemickens/hyperkube-amd64:217.9beff63",
Output: true,
},
{
Inspected: dockertypes.ImageInspect{
ID: "sha256:2208f7a29005d226d1ee33a63e33af1f47af6156c740d7d23c7948e8d282d53d",
},
Image: "myimage@sha256:2208f7a29005d226d1ee33a63e33af1f47af6156c740d7d23c7948e8d282d53d",
Output: true,
},
{
Inspected: dockertypes.ImageInspect{
ID: "sha256:2208f7a29005d226d1ee33a63e33af1f47af6156c740d7d23c7948e8d282d53d",
},
Image: "myimage@sha256:2208f7a29005",
Output: false,
},
{
Inspected: dockertypes.ImageInspect{
ID: "sha256:2208f7a29005d226d1ee33a63e33af1f47af6156c740d7d23c7948e8d282d53d",
},
Image: "myimage@sha256:2208",
Output: false,
},
} {
match := matchImageTagOrSHA(testCase.Inspected, testCase.Image)
assert.Equal(t, testCase.Output, match, testCase.Image+" is not a match")
}
}
func TestApplyDefaultImageTag(t *testing.T) {
for _, testCase := range []struct {
Input string
Output string
}{
{Input: "root", Output: "root:latest"},
{Input: "root:tag", Output: "root:tag"},
{Input: "root@sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", Output: "root@sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"},
} {
image, err := applyDefaultImageTag(testCase.Input)
if err != nil {
t.Errorf("applyDefaultTag(%s) failed: %v", testCase.Input, err)
} else if image != testCase.Output {
t.Errorf("Expected image reference: %q, got %q", testCase.Output, image)
}
}
}
func TestPullWithNoSecrets(t *testing.T) {
tests := []struct {
imageName string
expectedImage string
}{
{"ubuntu", "ubuntu:latest using {}"},
{"ubuntu:2342", "ubuntu:2342 using {}"},
{"ubuntu:latest", "ubuntu:latest using {}"},
{"foo/bar:445566", "foo/bar:445566 using {}"},
{"registry.example.com:5000/foobar", "registry.example.com:5000/foobar:latest using {}"},
{"registry.example.com:5000/foobar:5342", "registry.example.com:5000/foobar:5342 using {}"},
{"registry.example.com:5000/foobar:latest", "registry.example.com:5000/foobar:latest using {}"},
}
for _, test := range tests {
fakeKeyring := &credentialprovider.FakeKeyring{}
fakeClient := NewFakeDockerClient()
dp := dockerPuller{
client: fakeClient,
keyring: fakeKeyring,
}
err := dp.Pull(test.imageName, []api.Secret{})
if err != nil {
t.Errorf("unexpected non-nil err: %s", err)
continue
}
if e, a := 1, len(fakeClient.pulled); e != a {
t.Errorf("%s: expected 1 pulled image, got %d: %v", test.imageName, a, fakeClient.pulled)
continue
}
if e, a := test.expectedImage, fakeClient.pulled[0]; e != a {
t.Errorf("%s: expected pull of %q, but got %q", test.imageName, e, a)
}
}
}
func TestPullWithJSONError(t *testing.T) {
tests := map[string]struct {
imageName string
err error
expectedError string
}{
"Json error": {
"ubuntu",
&jsonmessage.JSONError{Code: 50, Message: "Json error"},
"Json error",
},
"Bad gateway": {
"ubuntu",
&jsonmessage.JSONError{Code: 502, Message: "<!doctype html>\n<html class=\"no-js\" lang=\"\">\n <head>\n </head>\n <body>\n <h1>Oops, there was an error!</h1>\n <p>We have been contacted of this error, feel free to check out <a href=\"http://status.docker.com/\">status.docker.com</a>\n to see if there is a bigger issue.</p>\n\n </body>\n</html>"},
images.RegistryUnavailable.Error(),
},
}
for i, test := range tests {
fakeKeyring := &credentialprovider.FakeKeyring{}
fakeClient := NewFakeDockerClient()
fakeClient.InjectError("pull", test.err)
puller := &dockerPuller{
client: fakeClient,
keyring: fakeKeyring,
}
err := puller.Pull(test.imageName, []api.Secret{})
if err == nil || !strings.Contains(err.Error(), test.expectedError) {
t.Errorf("%s: expect error %s, got : %s", i, test.expectedError, err)
continue
}
}
}
func TestPullWithSecrets(t *testing.T) {
// auth value is equivalent to: "username":"passed-user","password":"passed-password"
dockerCfg := map[string]map[string]string{"index.docker.io/v1/": {"email": "passed-email", "auth": "cGFzc2VkLXVzZXI6cGFzc2VkLXBhc3N3b3Jk"}}
dockercfgContent, err := json.Marshal(dockerCfg)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
dockerConfigJson := map[string]map[string]map[string]string{"auths": dockerCfg}
dockerConfigJsonContent, err := json.Marshal(dockerConfigJson)
if err != nil {
t.Errorf("unexpected error: %v", err)
}
tests := map[string]struct {
imageName string
passedSecrets []api.Secret
builtInDockerConfig credentialprovider.DockerConfig
expectedPulls []string
}{
"no matching secrets": {
"ubuntu",
[]api.Secret{},
credentialprovider.DockerConfig(map[string]credentialprovider.DockerConfigEntry{}),
[]string{"ubuntu:latest using {}"},
},
"default keyring secrets": {
"ubuntu",
[]api.Secret{},
credentialprovider.DockerConfig(map[string]credentialprovider.DockerConfigEntry{
"index.docker.io/v1/": {Username: "built-in", Password: "password", Email: "email", Provider: nil},
}),
[]string{`ubuntu:latest using {"username":"built-in","password":"password","email":"email"}`},
},
"default keyring secrets unused": {
"ubuntu",
[]api.Secret{},
credentialprovider.DockerConfig(map[string]credentialprovider.DockerConfigEntry{
"extraneous": {Username: "built-in", Password: "password", Email: "email", Provider: nil},
}),
[]string{`ubuntu:latest using {}`},
},
"builtin keyring secrets, but use passed": {
"ubuntu",
[]api.Secret{{Type: api.SecretTypeDockercfg, Data: map[string][]byte{api.DockerConfigKey: dockercfgContent}}},
credentialprovider.DockerConfig(map[string]credentialprovider.DockerConfigEntry{
"index.docker.io/v1/": {Username: "built-in", Password: "password", Email: "email", Provider: nil},
}),
[]string{`ubuntu:latest using {"username":"passed-user","password":"passed-password","email":"passed-email"}`},
},
"builtin keyring secrets, but use passed with new docker config": {
"ubuntu",
[]api.Secret{{Type: api.SecretTypeDockerConfigJson, Data: map[string][]byte{api.DockerConfigJsonKey: dockerConfigJsonContent}}},
credentialprovider.DockerConfig(map[string]credentialprovider.DockerConfigEntry{
"index.docker.io/v1/": {Username: "built-in", Password: "password", Email: "email", Provider: nil},
}),
[]string{`ubuntu:latest using {"username":"passed-user","password":"passed-password","email":"passed-email"}`},
},
}
for i, test := range tests {
builtInKeyRing := &credentialprovider.BasicDockerKeyring{}
builtInKeyRing.Add(test.builtInDockerConfig)
fakeClient := NewFakeDockerClient()
dp := dockerPuller{
client: fakeClient,
keyring: builtInKeyRing,
}
err := dp.Pull(test.imageName, test.passedSecrets)
if err != nil {
t.Errorf("%s: unexpected non-nil err: %s", i, err)
continue
}
if e, a := 1, len(fakeClient.pulled); e != a {
t.Errorf("%s: expected 1 pulled image, got %d: %v", i, a, fakeClient.pulled)
continue
}
if e, a := test.expectedPulls, fakeClient.pulled; !reflect.DeepEqual(e, a) {
t.Errorf("%s: expected pull of %v, but got %v", i, e, a)
}
}
}
func TestDockerKeyringLookupFails(t *testing.T) {
fakeKeyring := &credentialprovider.FakeKeyring{}
fakeClient := NewFakeDockerClient()
fakeClient.InjectError("pull", fmt.Errorf("test error"))
dp := dockerPuller{
client: fakeClient,
keyring: fakeKeyring,
}
err := dp.Pull("host/repository/image:version", []api.Secret{})
if err == nil {
t.Errorf("unexpected non-error")
}
msg := "image pull failed for host/repository/image:version, this may be because there are no credentials on this request. details: (test error)"
if err.Error() != msg {
t.Errorf("expected: %s, saw: %s", msg, err.Error())
}
}
func TestDockerKeyringLookup(t *testing.T) {
ada := credentialprovider.LazyAuthConfiguration{
AuthConfig: dockertypes.AuthConfig{
Username: "ada",
Password: "smash",
Email: "ada@example.com",
},
}
grace := credentialprovider.LazyAuthConfiguration{
AuthConfig: dockertypes.AuthConfig{
Username: "grace",
Password: "squash",
Email: "grace@example.com",
},
}
dk := &credentialprovider.BasicDockerKeyring{}
dk.Add(credentialprovider.DockerConfig{
"bar.example.com/pong": credentialprovider.DockerConfigEntry{
Username: grace.Username,
Password: grace.Password,
Email: grace.Email,
},
"bar.example.com": credentialprovider.DockerConfigEntry{
Username: ada.Username,
Password: ada.Password,
Email: ada.Email,
},
})
tests := []struct {
image string
match []credentialprovider.LazyAuthConfiguration
ok bool
}{
// direct match
{"bar.example.com", []credentialprovider.LazyAuthConfiguration{ada}, true},
// direct match deeper than other possible matches
{"bar.example.com/pong", []credentialprovider.LazyAuthConfiguration{grace, ada}, true},
// no direct match, deeper path ignored
{"bar.example.com/ping", []credentialprovider.LazyAuthConfiguration{ada}, true},
// match first part of path token
{"bar.example.com/pongz", []credentialprovider.LazyAuthConfiguration{grace, ada}, true},
// match regardless of sub-path
{"bar.example.com/pong/pang", []credentialprovider.LazyAuthConfiguration{grace, ada}, true},
// no host match
{"example.com", []credentialprovider.LazyAuthConfiguration{}, false},
{"foo.example.com", []credentialprovider.LazyAuthConfiguration{}, false},
}
for i, tt := range tests {
match, ok := dk.Lookup(tt.image)
if tt.ok != ok {
t.Errorf("case %d: expected ok=%t, got %t", i, tt.ok, ok)
}
if !reflect.DeepEqual(tt.match, match) {
t.Errorf("case %d: expected match=%#v, got %#v", i, tt.match, match)
}
}
}
// This validates that dockercfg entries with a scheme and url path are properly matched
// by images that only match the hostname.
// NOTE: the above covers the case of a more specific match trumping just hostname.
func TestIssue3797(t *testing.T) {
rex := credentialprovider.LazyAuthConfiguration{
AuthConfig: dockertypes.AuthConfig{
Username: "rex",
Password: "tiny arms",
Email: "rex@example.com",
},
}
dk := &credentialprovider.BasicDockerKeyring{}
dk.Add(credentialprovider.DockerConfig{
"https://quay.io/v1/": credentialprovider.DockerConfigEntry{
Username: rex.Username,
Password: rex.Password,
Email: rex.Email,
},
})
tests := []struct {
image string
match []credentialprovider.LazyAuthConfiguration
ok bool
}{
// direct match
{"quay.io", []credentialprovider.LazyAuthConfiguration{rex}, true},
// partial matches
{"quay.io/foo", []credentialprovider.LazyAuthConfiguration{rex}, true},
{"quay.io/foo/bar", []credentialprovider.LazyAuthConfiguration{rex}, true},
}
for i, tt := range tests {
match, ok := dk.Lookup(tt.image)
if tt.ok != ok {
t.Errorf("case %d: expected ok=%t, got %t", i, tt.ok, ok)
}
if !reflect.DeepEqual(tt.match, match) {
t.Errorf("case %d: expected match=%#v, got %#v", i, tt.match, match)
}
}
}
type imageTrackingDockerClient struct {
*FakeDockerClient
imageName string
}
func (f *imageTrackingDockerClient) InspectImage(name string) (image *dockertypes.ImageInspect, err error) {
image, err = f.FakeDockerClient.InspectImage(name)
f.imageName = name
return
}
func TestIsImagePresent(t *testing.T) {
cl := &imageTrackingDockerClient{NewFakeDockerClient(), ""}
puller := &dockerPuller{
client: cl,
}
_, _ = puller.IsImagePresent("abc:123")
if cl.imageName != "abc:123" {
t.Errorf("expected inspection of image abc:123, instead inspected image %v", cl.imageName)
}
}
type podsByID []*kubecontainer.Pod
func (b podsByID) Len() int { return len(b) }
func (b podsByID) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
func (b podsByID) Less(i, j int) bool { return b[i].ID < b[j].ID }
type containersByID []*kubecontainer.Container
func (b containersByID) Len() int { return len(b) }
func (b containersByID) Swap(i, j int) { b[i], b[j] = b[j], b[i] }
func (b containersByID) Less(i, j int) bool { return b[i].ID.ID < b[j].ID.ID }
func TestFindContainersByPod(t *testing.T) {
tests := []struct {
runningContainerList []dockertypes.Container
exitedContainerList []dockertypes.Container
all bool
expectedPods []*kubecontainer.Pod
}{
{
[]dockertypes.Container{
{
ID: "foobar",
Names: []string{"/k8s_foobar.1234_qux_ns_1234_42"},
},
{
ID: "barbar",
Names: []string{"/k8s_barbar.1234_qux_ns_2343_42"},
},
{
ID: "baz",
Names: []string{"/k8s_baz.1234_qux_ns_1234_42"},
},
},
[]dockertypes.Container{
{
ID: "barfoo",
Names: []string{"/k8s_barfoo.1234_qux_ns_1234_42"},
},
{
ID: "bazbaz",
Names: []string{"/k8s_bazbaz.1234_qux_ns_5678_42"},
},
},
false,
[]*kubecontainer.Pod{
{
ID: "1234",
Name: "qux",
Namespace: "ns",
Containers: []*kubecontainer.Container{
{
ID: kubecontainer.DockerID("foobar").ContainerID(),
Name: "foobar",
Hash: 0x1234,
State: kubecontainer.ContainerStateUnknown,
},
{
ID: kubecontainer.DockerID("baz").ContainerID(),
Name: "baz",
Hash: 0x1234,
State: kubecontainer.ContainerStateUnknown,
},
},
},
{
ID: "2343",
Name: "qux",
Namespace: "ns",
Containers: []*kubecontainer.Container{
{
ID: kubecontainer.DockerID("barbar").ContainerID(),
Name: "barbar",
Hash: 0x1234,
State: kubecontainer.ContainerStateUnknown,
},
},
},
},
},
{
[]dockertypes.Container{
{
ID: "foobar",
Names: []string{"/k8s_foobar.1234_qux_ns_1234_42"},
},
{
ID: "barbar",
Names: []string{"/k8s_barbar.1234_qux_ns_2343_42"},
},
{
ID: "baz",
Names: []string{"/k8s_baz.1234_qux_ns_1234_42"},
},
},
[]dockertypes.Container{
{
ID: "barfoo",
Names: []string{"/k8s_barfoo.1234_qux_ns_1234_42"},
},
{
ID: "bazbaz",
Names: []string{"/k8s_bazbaz.1234_qux_ns_5678_42"},
},
},
true,
[]*kubecontainer.Pod{
{
ID: "1234",
Name: "qux",
Namespace: "ns",
Containers: []*kubecontainer.Container{
{
ID: kubecontainer.DockerID("foobar").ContainerID(),
Name: "foobar",
Hash: 0x1234,
State: kubecontainer.ContainerStateUnknown,
},
{
ID: kubecontainer.DockerID("barfoo").ContainerID(),
Name: "barfoo",
Hash: 0x1234,
State: kubecontainer.ContainerStateUnknown,
},
{
ID: kubecontainer.DockerID("baz").ContainerID(),
Name: "baz",
Hash: 0x1234,
State: kubecontainer.ContainerStateUnknown,
},
},
},
{
ID: "2343",
Name: "qux",
Namespace: "ns",
Containers: []*kubecontainer.Container{
{
ID: kubecontainer.DockerID("barbar").ContainerID(),
Name: "barbar",
Hash: 0x1234,
State: kubecontainer.ContainerStateUnknown,
},
},
},
{
ID: "5678",
Name: "qux",
Namespace: "ns",
Containers: []*kubecontainer.Container{
{
ID: kubecontainer.DockerID("bazbaz").ContainerID(),
Name: "bazbaz",
Hash: 0x1234,
State: kubecontainer.ContainerStateUnknown,
},
},
},
},
},
{
[]dockertypes.Container{},
[]dockertypes.Container{},
true,
nil,
},
}
fakeClient := NewFakeDockerClient()
np, _ := network.InitNetworkPlugin([]network.NetworkPlugin{}, "", nettest.NewFakeHost(nil), componentconfig.HairpinNone, "10.0.0.0/8")
// image back-off is set to nil, this test should not pull images
containerManager := NewFakeDockerManager(fakeClient, &record.FakeRecorder{}, nil, nil, &cadvisorapi.MachineInfo{}, "", 0, 0, "", &containertest.FakeOS{}, np, nil, nil, nil)
for i, test := range tests {
fakeClient.RunningContainerList = test.runningContainerList
fakeClient.ExitedContainerList = test.exitedContainerList
result, _ := containerManager.GetPods(test.all)
for i := range result {
sort.Sort(containersByID(result[i].Containers))
}
for i := range test.expectedPods {
sort.Sort(containersByID(test.expectedPods[i].Containers))
}
sort.Sort(podsByID(result))
sort.Sort(podsByID(test.expectedPods))
if !reflect.DeepEqual(test.expectedPods, result) {
t.Errorf("%d: expected: %#v, saw: %#v", i, test.expectedPods, result)
}
}
}
func TestMakePortsAndBindings(t *testing.T) {
portMapping := func(container, host int, protocol api.Protocol, ip string) kubecontainer.PortMapping {
return kubecontainer.PortMapping{
ContainerPort: container,
HostPort: host,
Protocol: protocol,
HostIP: ip,
}
}
portBinding := func(port, ip string) dockernat.PortBinding {
return dockernat.PortBinding{
HostPort: port,
HostIP: ip,
}
}
ports := []kubecontainer.PortMapping{
portMapping(80, 8080, "", "127.0.0.1"),
portMapping(443, 443, "tcp", ""),
portMapping(444, 444, "udp", ""),
portMapping(445, 445, "foobar", ""),
portMapping(443, 446, "tcp", ""),
portMapping(443, 446, "udp", ""),
}
exposedPorts, bindings := makePortsAndBindings(ports)
// Count the expected exposed ports and bindings
expectedExposedPorts := map[string]struct{}{}
for _, binding := range ports {
dockerKey := strconv.Itoa(binding.ContainerPort) + "/" + string(binding.Protocol)
expectedExposedPorts[dockerKey] = struct{}{}
}
// Should expose right ports in docker
if len(expectedExposedPorts) != len(exposedPorts) {
t.Errorf("Unexpected ports and bindings, %#v %#v %#v", ports, exposedPorts, bindings)
}
// Construct expected bindings
expectPortBindings := map[string][]dockernat.PortBinding{
"80/tcp": {
portBinding("8080", "127.0.0.1"),
},
"443/tcp": {
portBinding("443", ""),
portBinding("446", ""),
},
"443/udp": {
portBinding("446", ""),
},
"444/udp": {
portBinding("444", ""),
},
"445/tcp": {
portBinding("445", ""),
},
}
// interate the bindings by dockerPort, and check its portBindings
for dockerPort, portBindings := range bindings {
switch dockerPort {
case "80/tcp", "443/tcp", "443/udp", "444/udp", "445/tcp":
if !reflect.DeepEqual(expectPortBindings[string(dockerPort)], portBindings) {
t.Errorf("Unexpected portbindings for %#v, expected: %#v, but got: %#v",
dockerPort, expectPortBindings[string(dockerPort)], portBindings)
}
default:
t.Errorf("Unexpected docker port: %#v with portbindings: %#v", dockerPort, portBindings)
}
}
}
func TestMilliCPUToQuota(t *testing.T) {
testCases := []struct {
input int64
quota int64
period int64
}{
{
input: int64(0),
quota: int64(0),
period: int64(0),
},
{
input: int64(5),
quota: int64(1000),
period: int64(100000),
},
{
input: int64(9),
quota: int64(1000),
period: int64(100000),
},
{
input: int64(10),
quota: int64(1000),
period: int64(100000),
},
{
input: int64(200),
quota: int64(20000),
period: int64(100000),
},
{
input: int64(500),
quota: int64(50000),
period: int64(100000),
},
{
input: int64(1000),
quota: int64(100000),
period: int64(100000),
},
{
input: int64(1500),
quota: int64(150000),
period: int64(100000),
},
}
for _, testCase := range testCases {
quota, period := milliCPUToQuota(testCase.input)
if quota != testCase.quota || period != testCase.period {
t.Errorf("Input %v, expected quota %v period %v, but got quota %v period %v", testCase.input, testCase.quota, testCase.period, quota, period)
}
}
}
const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
func randStringBytes(n int) string {
b := make([]byte, n)
for i := range b {
b[i] = letterBytes[rand.Intn(len(letterBytes))]
}
return string(b)
}
func TestLogSymLink(t *testing.T) {
as := assert.New(t)
containerLogsDir := "/foo/bar"
podFullName := randStringBytes(128)
containerName := randStringBytes(70)
dockerId := randStringBytes(80)
// The file name cannot exceed 255 characters. Since .log suffix is required, the prefix cannot exceed 251 characters.
expectedPath := path.Join(containerLogsDir, fmt.Sprintf("%s_%s-%s", podFullName, containerName, dockerId)[:251]+".log")
as.Equal(expectedPath, LogSymlink(containerLogsDir, podFullName, containerName, dockerId))
}
| apache-2.0 |
simonhorlick/grpc-java | protobuf-lite/src/test/java/io/grpc/protobuf/lite/ProtoLiteUtilsTest.java | 8497 | /*
* Copyright 2015, gRPC Authors All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.grpc.protobuf.lite;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.fail;
import com.google.common.io.ByteStreams;
import com.google.protobuf.ByteString;
import com.google.protobuf.Empty;
import com.google.protobuf.Enum;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Type;
import io.grpc.Drainable;
import io.grpc.KnownLength;
import io.grpc.Metadata;
import io.grpc.MethodDescriptor.Marshaller;
import io.grpc.MethodDescriptor.PrototypeMarshaller;
import io.grpc.Status;
import io.grpc.StatusRuntimeException;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Unit tests for {@link ProtoLiteUtils}. */
@RunWith(JUnit4.class)
public class ProtoLiteUtilsTest {
@Rule public final ExpectedException thrown = ExpectedException.none();
private Marshaller<Type> marshaller = ProtoLiteUtils.marshaller(Type.getDefaultInstance());
private Type proto = Type.newBuilder().setName("name").build();
@Test
public void testPassthrough() {
assertSame(proto, marshaller.parse(marshaller.stream(proto)));
}
@Test
public void testRoundtrip() throws Exception {
InputStream is = marshaller.stream(proto);
is = new ByteArrayInputStream(ByteStreams.toByteArray(is));
assertEquals(proto, marshaller.parse(is));
}
@Test
public void testInvalidatedMessage() throws Exception {
InputStream is = marshaller.stream(proto);
// Invalidates message, and drains all bytes
byte[] unused = ByteStreams.toByteArray(is);
try {
((ProtoInputStream) is).message();
fail("Expected exception");
} catch (IllegalStateException ex) {
// expected
}
// Zero bytes is the default message
assertEquals(Type.getDefaultInstance(), marshaller.parse(is));
}
@Test
public void parseInvalid() throws Exception {
InputStream is = new ByteArrayInputStream(new byte[] {-127});
try {
marshaller.parse(is);
fail("Expected exception");
} catch (StatusRuntimeException ex) {
assertEquals(Status.Code.INTERNAL, ex.getStatus().getCode());
assertNotNull(((InvalidProtocolBufferException) ex.getCause()).getUnfinishedMessage());
}
}
@Test
public void testMismatch() throws Exception {
Marshaller<Enum> enumMarshaller = ProtoLiteUtils.marshaller(Enum.getDefaultInstance());
// Enum's name and Type's name are both strings with tag 1.
Enum altProto = Enum.newBuilder().setName(proto.getName()).build();
assertEquals(proto, marshaller.parse(enumMarshaller.stream(altProto)));
}
@Test
public void introspection() throws Exception {
Marshaller<Enum> enumMarshaller = ProtoLiteUtils.marshaller(Enum.getDefaultInstance());
PrototypeMarshaller<Enum> prototypeMarshaller = (PrototypeMarshaller<Enum>) enumMarshaller;
assertSame(Enum.getDefaultInstance(), prototypeMarshaller.getMessagePrototype());
assertSame(Enum.class, prototypeMarshaller.getMessageClass());
}
@Test
public void marshallerShouldNotLimitProtoSize() throws Exception {
// The default limit is 64MB. Using a larger proto to verify that the limit is not enforced.
byte[] bigName = new byte[70 * 1024 * 1024];
Arrays.fill(bigName, (byte) 32);
proto = Type.newBuilder().setNameBytes(ByteString.copyFrom(bigName)).build();
// Just perform a round trip to verify that it works.
testRoundtrip();
}
@Test
public void testAvailable() throws Exception {
InputStream is = marshaller.stream(proto);
assertEquals(proto.getSerializedSize(), is.available());
is.read();
assertEquals(proto.getSerializedSize() - 1, is.available());
while (is.read() != -1) {}
assertEquals(-1, is.read());
assertEquals(0, is.available());
}
@Test
public void testEmpty() throws IOException {
Marshaller<Empty> marshaller = ProtoLiteUtils.marshaller(Empty.getDefaultInstance());
InputStream is = marshaller.stream(Empty.getDefaultInstance());
assertEquals(0, is.available());
byte[] b = new byte[10];
assertEquals(-1, is.read(b));
assertArrayEquals(new byte[10], b);
// Do the same thing again, because the internal state may be different
assertEquals(-1, is.read(b));
assertArrayEquals(new byte[10], b);
assertEquals(-1, is.read());
assertEquals(0, is.available());
}
@Test
public void testDrainTo_all() throws Exception {
byte[] golden = ByteStreams.toByteArray(marshaller.stream(proto));
InputStream is = marshaller.stream(proto);
Drainable d = (Drainable) is;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int drained = d.drainTo(baos);
assertEquals(baos.size(), drained);
assertArrayEquals(golden, baos.toByteArray());
assertEquals(0, is.available());
}
@Test
public void testDrainTo_partial() throws Exception {
final byte[] golden;
{
InputStream is = marshaller.stream(proto);
is.read();
golden = ByteStreams.toByteArray(is);
}
InputStream is = marshaller.stream(proto);
is.read();
Drainable d = (Drainable) is;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
int drained = d.drainTo(baos);
assertEquals(baos.size(), drained);
assertArrayEquals(golden, baos.toByteArray());
assertEquals(0, is.available());
}
@Test
public void testDrainTo_none() throws Exception {
InputStream is = marshaller.stream(proto);
byte[] unused = ByteStreams.toByteArray(is);
Drainable d = (Drainable) is;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
assertEquals(0, d.drainTo(baos));
assertArrayEquals(new byte[0], baos.toByteArray());
assertEquals(0, is.available());
}
@Test
public void metadataMarshaller_roundtrip() {
Metadata.BinaryMarshaller<Type> metadataMarshaller =
ProtoLiteUtils.metadataMarshaller(Type.getDefaultInstance());
assertEquals(proto, metadataMarshaller.parseBytes(metadataMarshaller.toBytes(proto)));
}
@Test
public void metadataMarshaller_invalid() {
Metadata.BinaryMarshaller<Type> metadataMarshaller =
ProtoLiteUtils.metadataMarshaller(Type.getDefaultInstance());
try {
metadataMarshaller.parseBytes(new byte[] {-127});
fail("Expected exception");
} catch (IllegalArgumentException ex) {
assertNotNull(((InvalidProtocolBufferException) ex.getCause()).getUnfinishedMessage());
}
}
@Test
public void extensionRegistry_notNull() {
thrown.expect(NullPointerException.class);
thrown.expectMessage("newRegistry");
ProtoLiteUtils.setExtensionRegistry(null);
}
@Test
public void parseFromKnowLengthInputStream() throws Exception {
Marshaller<Type> marshaller = ProtoLiteUtils.marshaller(Type.getDefaultInstance());
Type expect = Type.newBuilder().setName("expected name").build();
Type result = marshaller.parse(new CustomKnownLengthInputStream(expect.toByteArray()));
assertEquals(expect, result);
}
private static class CustomKnownLengthInputStream extends InputStream implements KnownLength {
private int position = 0;
private byte[] source;
private CustomKnownLengthInputStream(byte[] source) {
this.source = source;
}
@Override
public int available() throws IOException {
return source.length - position;
}
@Override
public int read() throws IOException {
if (position == source.length) {
return -1;
}
return source[position++];
}
}
}
| apache-2.0 |
jasonchaffee/undertow | core/src/main/java/io/undertow/security/impl/InMemorySingleSignOnManager.java | 3638 | /*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.security.impl;
import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import io.undertow.security.idm.Account;
import io.undertow.server.session.SecureRandomSessionIdGenerator;
import io.undertow.server.session.Session;
import io.undertow.server.session.SessionManager;
import io.undertow.util.CopyOnWriteMap;
/**
* @author Stuart Douglas
* @author Paul Ferraro
*/
public class InMemorySingleSignOnManager implements SingleSignOnManager {
private static final SecureRandomSessionIdGenerator SECURE_RANDOM_SESSION_ID_GENERATOR = new SecureRandomSessionIdGenerator();
private final Map<String, SingleSignOn> ssoEntries = new ConcurrentHashMap<>();
@Override
public SingleSignOn findSingleSignOn(String ssoId) {
return this.ssoEntries.get(ssoId);
}
@Override
public SingleSignOn createSingleSignOn(Account account, String mechanism) {
String id = SECURE_RANDOM_SESSION_ID_GENERATOR.createSessionId();
SingleSignOn entry = new SimpleSingleSignOnEntry(id, account, mechanism);
this.ssoEntries.put(id, entry);
return entry;
}
@Override
public void removeSingleSignOn(SingleSignOn sso) {
this.ssoEntries.remove(sso.getId());
}
private static class SimpleSingleSignOnEntry implements SingleSignOn {
private final String id;
private final Account account;
private final String mechanismName;
private final Map<SessionManager, Session> sessions = new CopyOnWriteMap<>();
SimpleSingleSignOnEntry(String id, Account account, String mechanismName) {
this.id = id;
this.account = account;
this.mechanismName = mechanismName;
}
@Override
public String getId() {
return this.id;
}
@Override
public Account getAccount() {
return this.account;
}
@Override
public String getMechanismName() {
return this.mechanismName;
}
@Override
public Iterator<Session> iterator() {
return Collections.unmodifiableCollection(this.sessions.values()).iterator();
}
@Override
public boolean contains(Session session) {
return this.sessions.containsKey(session.getSessionManager());
}
@Override
public Session getSession(SessionManager manager) {
return this.sessions.get(manager);
}
@Override
public void add(Session session) {
this.sessions.put(session.getSessionManager(), session);
}
@Override
public void remove(Session session) {
this.sessions.remove(session.getSessionManager());
}
@Override
public void close() {
// Do nothing
}
}
}
| apache-2.0 |
drsquidop/camel | components/camel-consul/src/main/java/org/apache/camel/component/consul/policy/ConsulRoutePolicy.java | 11896 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.consul.policy;
import java.math.BigInteger;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import com.google.common.base.Optional;
import com.orbitz.consul.Consul;
import com.orbitz.consul.KeyValueClient;
import com.orbitz.consul.SessionClient;
import com.orbitz.consul.async.ConsulResponseCallback;
import com.orbitz.consul.model.ConsulResponse;
import com.orbitz.consul.model.kv.Value;
import com.orbitz.consul.model.session.ImmutableSession;
import com.orbitz.consul.option.QueryOptions;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.Route;
import org.apache.camel.api.management.ManagedAttribute;
import org.apache.camel.api.management.ManagedResource;
import org.apache.camel.component.consul.ConsulConfiguration;
import org.apache.camel.component.consul.ConsulConstants;
import org.apache.camel.support.RoutePolicySupport;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @deprecated use {@link org.apache.camel.component.consul.ha.ConsulClusterService} and {@link org.apache.camel.impl.ha.ClusteredRoutePolicy} instead.
*/
@Deprecated
@ManagedResource(description = "Route policy using Consul as clustered lock")
public final class ConsulRoutePolicy extends RoutePolicySupport implements CamelContextAware {
private static final Logger LOGGER = LoggerFactory.getLogger(ConsulRoutePolicy.class);
private final Object lock = new Object();
private final AtomicBoolean leader = new AtomicBoolean(false);
private final Set<Route> suspendedRoutes = new HashSet<>();
private final AtomicReference<BigInteger> index = new AtomicReference<>(BigInteger.valueOf(0));
private Route route;
private CamelContext camelContext;
private String serviceName;
private String servicePath;
private ExecutorService executorService;
private int ttl = 60;
private int lockDelay = 10;
private boolean shouldStopConsumer = true;
private String consulUrl = ConsulConstants.CONSUL_DEFAULT_URL;
private Consul consul;
private SessionClient sessionClient;
private KeyValueClient keyValueClient;
private String sessionId;
public ConsulRoutePolicy() {
}
public ConsulRoutePolicy(String consulUrl) {
this.consulUrl = consulUrl;
}
public ConsulRoutePolicy(ConsulConfiguration configuration) throws Exception {
this.consulUrl = configuration.getUrl();
this.consul = configuration.createConsulClient(camelContext);
}
@Override
public CamelContext getCamelContext() {
return camelContext;
}
@Override
public void setCamelContext(CamelContext camelContext) {
this.camelContext = camelContext;
}
public String getConsulUrl() {
return consulUrl;
}
public void setConsulUrl(String consulUrl) {
this.consulUrl = consulUrl;
}
@Override
public void onInit(Route route) {
super.onInit(route);
this.route = route;
}
@Override
public void onStart(Route route) {
if (!leader.get() && shouldStopConsumer) {
stopConsumer(route);
}
}
@Override
public void onStop(Route route) {
synchronized (lock) {
suspendedRoutes.remove(route);
}
}
@Override
public synchronized void onSuspend(Route route) {
synchronized (lock) {
suspendedRoutes.remove(route);
}
}
@Override
protected void doStart() throws Exception {
ObjectHelper.notNull(camelContext, "camelContext");
ObjectHelper.notNull(serviceName, "serviceName");
ObjectHelper.notNull(servicePath, "servicePath");
if (consul == null) {
Consul.Builder builder = Consul.builder();
if (consulUrl != null) {
builder.withUrl(consulUrl);
}
consul = builder.build();
}
if (sessionClient == null) {
sessionClient = consul.sessionClient();
}
if (keyValueClient == null) {
keyValueClient = consul.keyValueClient();
}
if (sessionId == null) {
sessionId = sessionClient.createSession(
ImmutableSession.builder()
.name(serviceName)
.ttl(ttl + "s")
.lockDelay(lockDelay + "s")
.build()
).getId();
LOGGER.debug("SessionID = {}", sessionId);
if (executorService == null) {
executorService = getCamelContext().getExecutorServiceManager().newSingleThreadExecutor(this, "ConsulRoutePolicy");
}
setLeader(keyValueClient.acquireLock(servicePath, sessionId));
executorService.submit(new Watcher());
}
super.doStart();
}
@Override
protected void doStop() throws Exception {
super.doStop();
if (sessionId != null) {
sessionClient.destroySession(sessionId);
sessionId = null;
}
if (executorService != null) {
getCamelContext().getExecutorServiceManager().shutdownGraceful(executorService);
}
}
// *************************************************************************
//
// *************************************************************************
protected void setLeader(boolean isLeader) {
if (isLeader && leader.compareAndSet(false, isLeader)) {
LOGGER.debug("Leadership taken ({}, {})", serviceName, sessionId);
startAllStoppedConsumers();
} else {
if (!leader.getAndSet(isLeader) && isLeader) {
LOGGER.debug("Leadership lost ({}, {})", serviceName, sessionId);
}
}
}
private void startConsumer(Route route) {
synchronized (lock) {
try {
if (suspendedRoutes.contains(route)) {
startConsumer(route.getConsumer());
suspendedRoutes.remove(route);
}
} catch (Exception e) {
handleException(e);
}
}
}
private void stopConsumer(Route route) {
synchronized (lock) {
try {
if (!suspendedRoutes.contains(route)) {
LOGGER.debug("Stopping consumer for {} ({})", route.getId(), route.getConsumer());
stopConsumer(route.getConsumer());
suspendedRoutes.add(route);
}
} catch (Exception e) {
handleException(e);
}
}
}
private void startAllStoppedConsumers() {
synchronized (lock) {
try {
for (Route route : suspendedRoutes) {
LOGGER.debug("Starting consumer for {} ({})", route.getId(), route.getConsumer());
startConsumer(route.getConsumer());
}
suspendedRoutes.clear();
} catch (Exception e) {
handleException(e);
}
}
}
// *************************************************************************
// Getter/Setters
// *************************************************************************
@ManagedAttribute(description = "The route id")
public String getRouteId() {
if (route != null) {
return route.getId();
}
return null;
}
@ManagedAttribute(description = "The consumer endpoint", mask = true)
public String getEndpointUrl() {
if (route != null && route.getConsumer() != null && route.getConsumer().getEndpoint() != null) {
return route.getConsumer().getEndpoint().toString();
}
return null;
}
public Consul getConsul() {
return consul;
}
@ManagedAttribute(description = "The consul service name")
public String getServiceName() {
return serviceName;
}
public void setServiceName(String serviceName) {
this.serviceName = serviceName;
this.servicePath = String.format("/service/%s/leader", serviceName);
}
@ManagedAttribute(description = "The time to live (seconds)")
public int getTtl() {
return ttl;
}
public void setTtl(int ttl) {
this.ttl = ttl > 10 ? ttl : 10;
}
@ManagedAttribute(description = "The lock delay (seconds)")
public int getLockDelay() {
return lockDelay;
}
public void setLockDelay(int lockDelay) {
this.lockDelay = lockDelay > 10 ? lockDelay : 10;
}
@ManagedAttribute(description = "Whether to stop consumer when starting up and failed to become master")
public boolean isShouldStopConsumer() {
return shouldStopConsumer;
}
public void setShouldStopConsumer(boolean shouldStopConsumer) {
this.shouldStopConsumer = shouldStopConsumer;
}
@ManagedAttribute(description = "Is this route the master or a slave")
public boolean isLeader() {
return leader.get();
}
// *************************************************************************
// Watch
// *************************************************************************
private class Watcher implements Runnable, ConsulResponseCallback<Optional<Value>> {
@Override
public void onComplete(ConsulResponse<Optional<Value>> consulResponse) {
if (isRunAllowed()) {
Optional<Value> value = consulResponse.getResponse();
if (value.isPresent()) {
Optional<String> sid = value.get().getSession();
if (sid.isPresent() && ObjectHelper.isNotEmpty(sid.get())) {
// If the key is not held by any session, try acquire a
// lock (become leader)
LOGGER.debug("Try to take leadership ...");
setLeader(keyValueClient.acquireLock(servicePath, sessionId));
} else if (!sessionId.equals(sid) && leader.get()) {
// Looks like I've lost leadership
setLeader(false);
}
}
index.set(consulResponse.getIndex());
run();
}
}
@Override
public void onFailure(Throwable throwable) {
handleException(throwable);
}
@Override
public void run() {
if (isRunAllowed()) {
// Refresh session
sessionClient.renewSession(sessionId);
keyValueClient.getValue(
servicePath,
QueryOptions.blockSeconds(ttl / 3, index.get()).build(),
this
);
}
}
}
}
| apache-2.0 |
ispras/binnavi | src/main/java/com/google/security/zynamics/reil/translators/ppc/CntlzwGenerator.java | 7590 | /*
Copyright 2011-2016 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.google.security.zynamics.reil.translators.ppc;
import com.google.security.zynamics.reil.OperandSize;
import com.google.security.zynamics.reil.ReilHelpers;
import com.google.security.zynamics.reil.ReilInstruction;
import com.google.security.zynamics.reil.translators.ITranslationEnvironment;
import com.google.security.zynamics.reil.translators.InternalTranslationException;
import com.google.security.zynamics.reil.translators.TranslationHelpers;
import com.google.security.zynamics.zylib.disassembly.IInstruction;
import com.google.security.zynamics.zylib.disassembly.IOperandTreeNode;
import java.util.List;
public class CntlzwGenerator {
public static void generate(long baseOffset,
final ITranslationEnvironment environment,
final IInstruction instruction,
final List<ReilInstruction> instructions,
final String mnemonic,
final String secondOperand,
final boolean setCr) throws InternalTranslationException {
TranslationHelpers.checkTranslationArguments(environment, instruction, instructions, mnemonic);
final IOperandTreeNode registerOperand1 =
instruction.getOperands().get(0).getRootNode().getChildren().get(0);
final String targetRegister = registerOperand1.getValue();
final String a = secondOperand;
final String b = environment.getNextVariableString();
final String c = environment.getNextVariableString();
final String d = environment.getNextVariableString();
final String e = environment.getNextVariableString();
final String f = environment.getNextVariableString();
final String g = environment.getNextVariableString();
final String h = environment.getNextVariableString();
final String i = environment.getNextVariableString();
final String j = environment.getNextVariableString();
final String k = environment.getNextVariableString();
final String l = environment.getNextVariableString();
final String m = environment.getNextVariableString();
final String n = environment.getNextVariableString();
final String o = environment.getNextVariableString();
final String p = environment.getNextVariableString();
final String q = environment.getNextVariableString();
final String r = environment.getNextVariableString();
final String s = environment.getNextVariableString();
final String t = environment.getNextVariableString();
final String u = environment.getNextVariableString();
final String v = environment.getNextVariableString();
final String w = environment.getNextVariableString();
final String x = environment.getNextVariableString();
final String y = environment.getNextVariableString();
final String z = environment.getNextVariableString();
final String crTemp = environment.getNextVariableString();
final OperandSize dw = OperandSize.DWORD;
final OperandSize bt = OperandSize.BYTE;
final OperandSize wo = OperandSize.WORD;
final OperandSize qw = OperandSize.QWORD;
// y = a >> 1;
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, a, bt, String.valueOf(-1L), dw, y));
// p = y | a;
instructions.add(ReilHelpers.createOr(baseOffset++, dw, y, dw, a, dw, p));
// q = p >> 2;
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, p, bt, String.valueOf(-2L), dw, q));
// r = q | p;
instructions.add(ReilHelpers.createOr(baseOffset++, dw, q, dw, p, dw, r));
// s = r >> 4;
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, r, bt, String.valueOf(-4L), dw, s));
// t = s | r;
instructions.add(ReilHelpers.createOr(baseOffset++, dw, s, dw, r, dw, t));
// u = t >> 8;
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, t, bt, String.valueOf(-8L), dw, u));
// v = u | t;
instructions.add(ReilHelpers.createOr(baseOffset++, dw, u, dw, t, dw, v));
// w = v >> 16;
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, v, wo, String.valueOf(-16L), dw, w));
// z = w | v;
instructions.add(ReilHelpers.createOr(baseOffset++, dw, w, dw, v, dw, z));
// x = z >> 1;
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, z, bt, String.valueOf(-1L), dw, x));
// b = x & 0x55555555;
instructions.add(
ReilHelpers.createAnd(baseOffset++, dw, x, dw, String.valueOf(0x55555555L), dw, b));
// c = z - b;
instructions.add(ReilHelpers.createSub(baseOffset++, dw, z, dw, b, qw, c));
// d = c & 0x33333333;
instructions.add(
ReilHelpers.createAnd(baseOffset++, qw, c, dw, String.valueOf(0x33333333L), dw, d));
// e = c >> 2;
instructions.add(ReilHelpers.createBsh(baseOffset++, qw, c, bt, String.valueOf(-2L), dw, e));
// f = e & 0x33333333;
instructions.add(
ReilHelpers.createAnd(baseOffset++, dw, e, dw, String.valueOf(0x33333333), dw, f));
// g = f + d;
instructions.add(ReilHelpers.createAdd(baseOffset++, dw, f, dw, d, dw, g));
// h = g >> 4;
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, g, bt, String.valueOf(-4L), dw, h));
// i = h + g;
instructions.add(ReilHelpers.createAdd(baseOffset++, dw, h, dw, g, dw, i));
// j = i & 0x0f0f0f0f;
instructions.add(
ReilHelpers.createAnd(baseOffset++, dw, i, dw, String.valueOf(0x0F0F0F0F), dw, j));
// k = j >> 8;
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, j, wo, String.valueOf(-8L), dw, k));
// l = k + j;
instructions.add(ReilHelpers.createAdd(baseOffset++, dw, k, dw, j, dw, l));
// m = l >> 16;
instructions.add(ReilHelpers.createBsh(baseOffset++, dw, l, wo, String.valueOf(-16L), dw, m));
// n = m + l;
instructions.add(ReilHelpers.createAdd(baseOffset++, dw, m, dw, l, dw, n));
// o = n & 0x0000003f;
instructions.add(
ReilHelpers.createAnd(baseOffset++, dw, n, dw, String.valueOf(0x0000003F), dw, o));
// result = 32 - o
instructions.add(
ReilHelpers.createSub(baseOffset++, wo, String.valueOf(32L), dw, o, dw, targetRegister));
if (setCr) {
// EQ CR0
instructions.add(ReilHelpers.createBisz(baseOffset++, OperandSize.DWORD, targetRegister,
OperandSize.BYTE, Helpers.CR0_EQUAL));
// LT CR0
instructions.add(ReilHelpers.createBsh(baseOffset++,
OperandSize.DWORD,
targetRegister,
OperandSize.WORD,
"-31",
OperandSize.BYTE,
Helpers.CR0_LESS_THEN));
// GT CR0
instructions.add(ReilHelpers.createOr(baseOffset++,
OperandSize.BYTE,
Helpers.CR0_EQUAL,
OperandSize.BYTE,
Helpers.CR0_LESS_THEN,
OperandSize.BYTE,
crTemp));
instructions.add(ReilHelpers.createBisz(baseOffset++, OperandSize.BYTE, crTemp,
OperandSize.BYTE, Helpers.CR0_GREATER_THEN));
// SO CR0
instructions.add(ReilHelpers.createStr(baseOffset, OperandSize.BYTE,
Helpers.XER_SUMMARY_OVERFLOW, OperandSize.BYTE, Helpers.CRO_SUMMARY_OVERFLOW));
}
}
}
| apache-2.0 |
Vivid-Wang/Clear-Evipreserve | Server/src/main/java/sample/Module/Share/Massage/UploadSuccessMsg.java | 632 | package sample.Module.Share.Massage;
import sample.Module.Share.Proof;
/**
* Created by WangMingming on 2017/3/28.
*/
public class UploadSuccessMsg extends Message {
private Proof proof;
private boolean isLightService = false;
public UploadSuccessMsg(){
this.setType(MsgType.UPLOADSUC);
}
public Proof getProof() {
return proof;
}
public void setProof(Proof proof) {
this.proof = proof;
}
public boolean isLightService() {
return isLightService;
}
public void setLightService(boolean lightService) {
isLightService = lightService;
}
}
| apache-2.0 |
yimingpeng/rl-library | projects/environments/experimental/KeepAway/src/kaMessages/KAHistoricStateResponse.java | 2236 | package kaMessages;
import java.util.StringTokenizer;
import java.util.Vector;
import rlVizLib.messaging.AbstractResponse;
import rlVizLib.messaging.GenericMessage;
import rlVizLib.messaging.MessageUser;
import rlVizLib.messaging.MessageValueType;
import rlVizLib.messaging.NotAnRLVizMessageException;
import rlVizLib.messaging.environment.EnvMessageType;
/*
* This is the first trick, we'll find something better later
*/
public class KAHistoricStateResponse extends AbstractResponse {
Vector<String> KAStateResponsePayloads=null;
public KAHistoricStateResponse(Vector<String> KAStateResponsePayloads) {
this.KAStateResponsePayloads=KAStateResponsePayloads;
}
public KAHistoricStateResponse(String responseMessage) throws NotAnRLVizMessageException {
KAStateResponsePayloads=new Vector<String>();
GenericMessage theGenericResponse = new GenericMessage(responseMessage);
String thePayLoadString=theGenericResponse.getPayLoad();
StringTokenizer firstTokenizer = new StringTokenizer(thePayLoadString, ":");
int numMessages=Integer.parseInt(firstTokenizer.nextToken());
// System.out.println("\t\tPARSING: "+numMessages+" messages in: "+responseMessage);
if(numMessages==0)return;
for(int i=0;i<numMessages;i++){
KAStateResponsePayloads.add(firstTokenizer.nextToken("@"));
}
}
public String makeStringResponse() {
StringBuffer theResponseBuffer= new StringBuffer();
theResponseBuffer.append("TO=");
theResponseBuffer.append(MessageUser.kBenchmark.id());
theResponseBuffer.append(" FROM=");
theResponseBuffer.append(MessageUser.kEnv.id());
theResponseBuffer.append(" CMD=");
theResponseBuffer.append(EnvMessageType.kEnvResponse.id());
theResponseBuffer.append(" VALTYPE=");
theResponseBuffer.append(MessageValueType.kStringList.id());
theResponseBuffer.append(" VALS=");
theResponseBuffer.append(KAStateResponsePayloads.size());
theResponseBuffer.append(":");
for(int i=0;i<KAStateResponsePayloads.size();i++){
theResponseBuffer.append(KAStateResponsePayloads.get(i));
theResponseBuffer.append("@");
}
return theResponseBuffer.toString();
}
public Vector<String> getKAStateResponsePayloads() {
return KAStateResponsePayloads;
}
}
| apache-2.0 |
trustin/armeria | core/src/main/java/com/linecorp/armeria/client/DnsResolverGroupBuilder.java | 16223 | /*
* Copyright 2019 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.client;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkArgument;
import static java.util.Objects.requireNonNull;
import java.time.Duration;
import java.util.List;
import java.util.function.Consumer;
import com.github.benmanes.caffeine.cache.CaffeineSpec;
import com.google.common.collect.ImmutableList;
import com.linecorp.armeria.client.retry.Backoff;
import com.linecorp.armeria.common.Flags;
import com.linecorp.armeria.common.annotation.Nullable;
import com.linecorp.armeria.common.metric.MeterIdPrefix;
import com.linecorp.armeria.common.util.TransportType;
import io.micrometer.core.instrument.MeterRegistry;
import io.netty.channel.EventLoopGroup;
import io.netty.resolver.AddressResolver;
import io.netty.resolver.AddressResolverGroup;
import io.netty.resolver.HostsFileEntriesResolver;
import io.netty.resolver.ResolvedAddressTypes;
import io.netty.resolver.dns.BiDnsQueryLifecycleObserverFactory;
import io.netty.resolver.dns.DnsNameResolver;
import io.netty.resolver.dns.DnsNameResolverBuilder;
import io.netty.resolver.dns.DnsQueryLifecycleObserverFactory;
import io.netty.resolver.dns.DnsServerAddressStreamProvider;
import io.netty.resolver.dns.NoopAuthoritativeDnsServerCache;
import io.netty.resolver.dns.NoopDnsCache;
import io.netty.resolver.dns.NoopDnsCnameCache;
/**
* Builds an {@link AddressResolverGroup} which builds {@link AddressResolver}s that update DNS caches
* automatically. Standard {@link DnsNameResolver} will only expire a cache entry after TTL,
* meaning DNS queries after TTL will always take time to resolve. A refreshing {@link AddressResolver}
* on the other hand updates the DNS cache automatically when TTL elapses,
* meaning DNS queries after TTL will retrieve a refreshed result right away. If refreshing fails,
* the {@link AddressResolver} will retry with {@link #refreshBackoff(Backoff)}.
*
* <p>The refreshing {@link AddressResolver} will only start auto refresh for a given hostname
* on the second access before TTL to avoid auto-refreshing for queries that only happen once
* (e.g., requests during server startup).
*/
public final class DnsResolverGroupBuilder {
private Backoff refreshBackoff = Backoff.ofDefault();
private int minTtl = 1;
private int maxTtl = Integer.MAX_VALUE;
private int negativeTtl;
// DnsNameResolverBuilder properties:
private boolean traceEnabled = true;
private long queryTimeoutMillis = 5000; // 5 seconds.
@Nullable
private ResolvedAddressTypes resolvedAddressTypes;
@Nullable
private Boolean recursionDesired;
@Nullable
private Integer maxQueriesPerResolve;
@Nullable
private Integer maxPayloadSize;
@Nullable
private Boolean optResourceEnabled;
@Nullable
private HostsFileEntriesResolver hostsFileEntriesResolver;
@Nullable
private DnsServerAddressStreamProvider dnsServerAddressStreamProvider;
@Nullable
private DnsQueryLifecycleObserverFactory dnsQueryLifecycleObserverFactory;
private boolean dnsQueryMetricsDisabled;
@Nullable
private List<String> searchDomains;
@Nullable
private Integer ndots;
@Nullable
private Boolean decodeIdn;
@Nullable
private String cacheSpec;
@Nullable
private MeterRegistry meterRegistry;
DnsResolverGroupBuilder() {}
/**
* Sets {@link Backoff} which is used when the {@link DnsNameResolver} fails to update the cache.
*/
public DnsResolverGroupBuilder refreshBackoff(Backoff refreshBackoff) {
this.refreshBackoff = requireNonNull(refreshBackoff, "refreshBackoff");
return this;
}
/**
* Sets the minimum and maximum TTL of the cached DNS resource records in seconds. If the TTL of the DNS
* resource record returned by the DNS server is less than the minimum TTL or greater than the maximum TTL,
* this resolver will ignore the TTL from the DNS server and use the minimum TTL or the maximum TTL instead
* respectively.
* The default value is {@code 1} and {@link Integer#MAX_VALUE}, which practically tells this resolver to
* respect the TTL from the DNS server.
*/
public DnsResolverGroupBuilder ttl(int minTtl, int maxTtl) {
checkArgument(minTtl > 0 && minTtl <= maxTtl,
"minTtl: %s, maxTtl: %s (expected: 1 <= minTtl <= maxTtl)", minTtl, maxTtl);
this.minTtl = minTtl;
this.maxTtl = maxTtl;
return this;
}
/**
* Sets the TTL of the cache for the failed DNS queries in seconds. The default value is {@code 0} which
* means that the {@link AddressResolver} does not cache when DNS queries are failed.
*/
public DnsResolverGroupBuilder negativeTtl(int negativeTtl) {
checkArgument(negativeTtl >= 0, "negativeTtl: %s, (expected: >= 0)", negativeTtl);
this.negativeTtl = negativeTtl;
return this;
}
/**
* Sets if this resolver should generate detailed trace information in exception messages so that
* it is easier to understand the cause of resolution failure. This flag is enabled by default.
*/
public DnsResolverGroupBuilder traceEnabled(boolean traceEnabled) {
this.traceEnabled = traceEnabled;
return this;
}
/**
* Sets the timeout of the DNS query performed by this resolver. {@code 0} disables the timeout.
*
* @see DnsNameResolverBuilder#queryTimeoutMillis(long)
*/
public DnsResolverGroupBuilder queryTimeout(Duration queryTimeout) {
requireNonNull(queryTimeout, "queryTimeout");
checkArgument(!queryTimeout.isNegative(), "queryTimeout: %s (expected: >= 0)", queryTimeout);
return queryTimeoutMillis(queryTimeout.toMillis());
}
/**
* Sets the timeout of the DNS query performed by this resolver in milliseconds.
* {@code 0} disables the timeout.
*
* @see DnsNameResolverBuilder#queryTimeoutMillis(long)
*/
public DnsResolverGroupBuilder queryTimeoutMillis(long queryTimeoutMillis) {
checkArgument(queryTimeoutMillis >= 0, "queryTimeoutMillis: %s (expected: >= 0)", queryTimeoutMillis);
this.queryTimeoutMillis = queryTimeoutMillis;
return this;
}
/**
* Sets {@link ResolvedAddressTypes} which is the list of the protocol families of the address resolved.
*
* @see DnsNameResolverBuilder#resolvedAddressTypes(ResolvedAddressTypes)
*/
public DnsResolverGroupBuilder resolvedAddressTypes(
ResolvedAddressTypes resolvedAddressTypes) {
this.resolvedAddressTypes = requireNonNull(resolvedAddressTypes, "resolvedAddressTypes");
return this;
}
/**
* Sets if this resolver has to send a DNS query with the RD (recursion desired) flag set.
*
* @see DnsNameResolverBuilder#recursionDesired(boolean)
*/
public DnsResolverGroupBuilder recursionDesired(boolean recursionDesired) {
this.recursionDesired = recursionDesired;
return this;
}
/**
* Returns the maximum allowed number of DNS queries to send when resolving a host name.
*
* @see DnsNameResolverBuilder#maxQueriesPerResolve(int)
*/
public DnsResolverGroupBuilder maxQueriesPerResolve(int maxQueriesPerResolve) {
checkArgument(maxQueriesPerResolve > 0,
"maxQueriesPerResolve: %s (expected: > 0)", maxQueriesPerResolve);
this.maxQueriesPerResolve = maxQueriesPerResolve;
return this;
}
/**
* Sets the capacity of the datagram packet buffer in bytes.
*
* @see DnsNameResolverBuilder#maxPayloadSize(int)
*/
public DnsResolverGroupBuilder maxPayloadSize(int maxPayloadSize) {
this.maxPayloadSize = maxPayloadSize;
return this;
}
/**
* Enables the automatic inclusion of a optional records that tries to give the remote DNS server a hint
* about how much data the resolver can read per response. Some DNSServer may not support this and so
* fail to answer queries.
*
* @see DnsNameResolverBuilder#optResourceEnabled(boolean)
*/
public DnsResolverGroupBuilder optResourceEnabled(boolean optResourceEnabled) {
this.optResourceEnabled = optResourceEnabled;
return this;
}
/**
* Sets {@link HostsFileEntriesResolver} which is used to first check if the hostname is locally aliased.
*
* @see DnsNameResolverBuilder#hostsFileEntriesResolver(HostsFileEntriesResolver)
*/
public DnsResolverGroupBuilder hostsFileEntriesResolver(
HostsFileEntriesResolver hostsFileEntriesResolver) {
this.hostsFileEntriesResolver = requireNonNull(hostsFileEntriesResolver, "hostsFileEntriesResolver");
return this;
}
/**
* Sets {@link DnsServerAddressStreamProvider} which is used to determine which DNS server is used to
* resolve each hostname.
*
* @see DnsNameResolverBuilder#nameServerProvider(DnsServerAddressStreamProvider)
*/
public DnsResolverGroupBuilder dnsServerAddressStreamProvider(
DnsServerAddressStreamProvider dnsServerAddressStreamProvider) {
this.dnsServerAddressStreamProvider =
requireNonNull(dnsServerAddressStreamProvider, "dnsServerAddressStreamProvider");
return this;
}
/**
* Sets {@link DnsQueryLifecycleObserverFactory} that is used to generate objects which can observe
* individual DNS queries.
*
* @see DnsNameResolverBuilder#dnsQueryLifecycleObserverFactory(DnsQueryLifecycleObserverFactory)
*/
public DnsResolverGroupBuilder dnsQueryLifecycleObserverFactory(
DnsQueryLifecycleObserverFactory dnsQueryLifecycleObserverFactory) {
this.dnsQueryLifecycleObserverFactory =
requireNonNull(dnsQueryLifecycleObserverFactory, "dnsQueryLifecycleObserverFactory");
return this;
}
/**
* Disables the default {@link DnsQueryLifecycleObserverFactory} that collects DNS query metrics through
* {@link MeterRegistry}.
*/
public DnsResolverGroupBuilder disableDnsQueryMetrics() {
dnsQueryMetricsDisabled = true;
return this;
}
/**
* Sets the list of search domains of the resolver.
*
* @see DnsNameResolverBuilder#searchDomains(Iterable)
*/
public DnsResolverGroupBuilder searchDomains(Iterable<String> searchDomains) {
this.searchDomains = ImmutableList.copyOf(requireNonNull(searchDomains, "searchDomains"));
return this;
}
/**
* Sets the search domains of the resolver.
*
* @see DnsNameResolverBuilder#searchDomains(Iterable)
*/
public DnsResolverGroupBuilder searchDomains(String... searchDomains) {
return searchDomains(ImmutableList.copyOf(requireNonNull(searchDomains, "searchDomains")));
}
/**
* Sets the number of dots which must appear in a name before an initial absolute query is made.
*
* @see DnsNameResolverBuilder#ndots(int)
*/
public DnsResolverGroupBuilder ndots(int ndots) {
checkArgument(ndots >= 0, "ndots: %s (expected: >= 0)", ndots);
this.ndots = ndots;
return this;
}
/**
* Sets if the domain and host names should be decoded to unicode when received.
* See <a href="https://datatracker.ietf.org/doc/rfc3492/">rfc3492</a>. This flag is enabled by default.
*
* @see DnsNameResolverBuilder#decodeIdn(boolean)
*/
public DnsResolverGroupBuilder decodeIdn(boolean decodeIdn) {
this.decodeIdn = decodeIdn;
return this;
}
/**
* Sets {@link MeterRegistry} to collect the DNS query metrics.
*/
DnsResolverGroupBuilder meterRegistry(MeterRegistry meterRegistry) {
this.meterRegistry = meterRegistry;
return this;
}
/**
* Sets the {@linkplain CaffeineSpec Caffeine specification string} of the cache that stores the domain
* names and their resolved addresses. If not set, {@link Flags#dnsCacheSpec()} is used by default.
*/
public DnsResolverGroupBuilder cacheSpec(String cacheSpec) {
this.cacheSpec = requireNonNull(cacheSpec, "cacheSpec");
return this;
}
RefreshingAddressResolverGroup build(EventLoopGroup eventLoopGroup) {
final Consumer<DnsNameResolverBuilder> resolverConfigurator = builder -> {
builder.channelType(TransportType.datagramChannelType(eventLoopGroup))
.socketChannelType(TransportType.socketChannelType(eventLoopGroup))
.resolveCache(NoopDnsCache.INSTANCE)
.authoritativeDnsServerCache(NoopAuthoritativeDnsServerCache.INSTANCE)
.cnameCache(NoopDnsCnameCache.INSTANCE)
.traceEnabled(traceEnabled)
.completeOncePreferredResolved(true);
if (queryTimeoutMillis == 0) {
builder.queryTimeoutMillis(Long.MAX_VALUE);
} else {
builder.queryTimeoutMillis(queryTimeoutMillis);
}
if (resolvedAddressTypes != null) {
builder.resolvedAddressTypes(resolvedAddressTypes);
}
if (recursionDesired != null) {
builder.recursionDesired(recursionDesired);
}
if (maxQueriesPerResolve != null) {
builder.maxQueriesPerResolve(maxQueriesPerResolve);
}
if (maxPayloadSize != null) {
builder.maxPayloadSize(maxPayloadSize);
}
if (optResourceEnabled != null) {
builder.optResourceEnabled(optResourceEnabled);
}
if (hostsFileEntriesResolver != null) {
builder.hostsFileEntriesResolver(hostsFileEntriesResolver);
}
if (dnsServerAddressStreamProvider != null) {
builder.nameServerProvider(dnsServerAddressStreamProvider);
}
assert meterRegistry != null;
DnsQueryLifecycleObserverFactory observerFactory = dnsQueryLifecycleObserverFactory;
if (!dnsQueryMetricsDisabled) {
final DefaultDnsQueryLifecycleObserverFactory defaultObserverFactory =
new DefaultDnsQueryLifecycleObserverFactory(
meterRegistry, new MeterIdPrefix("armeria.client.dns.queries"));
if (observerFactory == null) {
observerFactory = defaultObserverFactory;
} else {
observerFactory = new BiDnsQueryLifecycleObserverFactory(
observerFactory, defaultObserverFactory);
}
}
if (observerFactory != null) {
builder.dnsQueryLifecycleObserverFactory(observerFactory);
}
if (searchDomains != null) {
builder.searchDomains(searchDomains);
}
if (ndots != null) {
builder.ndots(ndots);
}
if (decodeIdn != null) {
builder.decodeIdn(decodeIdn);
}
};
final String cacheSpec = firstNonNull(this.cacheSpec, Flags.dnsCacheSpec());
return new RefreshingAddressResolverGroup(resolverConfigurator, minTtl, maxTtl, negativeTtl,
queryTimeoutMillis, refreshBackoff, resolvedAddressTypes,
cacheSpec);
}
}
| apache-2.0 |
AndroidX/androidx | vectordrawable/vectordrawable-animated/src/main/java/androidx/vectordrawable/graphics/drawable/AnimationUtilsCompat.java | 6062 | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.vectordrawable.graphics.drawable;
import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP_PREFIX;
import android.content.Context;
import android.content.res.Resources.NotFoundException;
import android.content.res.XmlResourceParser;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Xml;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.AccelerateInterpolator;
import android.view.animation.AnimationUtils;
import android.view.animation.AnticipateInterpolator;
import android.view.animation.AnticipateOvershootInterpolator;
import android.view.animation.BounceInterpolator;
import android.view.animation.CycleInterpolator;
import android.view.animation.DecelerateInterpolator;
import android.view.animation.Interpolator;
import android.view.animation.LinearInterpolator;
import android.view.animation.OvershootInterpolator;
import androidx.annotation.RestrictTo;
import androidx.interpolator.view.animation.FastOutLinearInInterpolator;
import androidx.interpolator.view.animation.FastOutSlowInInterpolator;
import androidx.interpolator.view.animation.LinearOutSlowInInterpolator;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
/**
* Defines common utilities for working with animations.
* @hide
*/
@RestrictTo(LIBRARY_GROUP_PREFIX)
public class AnimationUtilsCompat {
/**
* Loads an {@link Interpolator} object from a resource
*
* @param context Application context used to access resources
* @param id The resource id of the animation to load
* @return The animation object reference by the specified id
*/
public static Interpolator loadInterpolator(Context context, int id)
throws NotFoundException {
// From API 21, we added path Interpolator .
if (Build.VERSION.SDK_INT >= 21) {
return AnimationUtils.loadInterpolator(context, id);
}
XmlResourceParser parser = null;
try {
// Special treatment for the interpolator introduced at API 21.
if (id == AndroidResources.FAST_OUT_LINEAR_IN) {
return new FastOutLinearInInterpolator();
} else if (id == AndroidResources.FAST_OUT_SLOW_IN) {
return new FastOutSlowInInterpolator();
} else if (id == AndroidResources.LINEAR_OUT_SLOW_IN) {
return new LinearOutSlowInInterpolator();
}
parser = context.getResources().getAnimation(id);
return createInterpolatorFromXml(context, parser);
} catch (XmlPullParserException ex) {
NotFoundException rnf = new NotFoundException("Can't load animation resource ID #0x"
+ Integer.toHexString(id));
rnf.initCause(ex);
throw rnf;
} catch (IOException ex) {
NotFoundException rnf = new NotFoundException("Can't load animation resource ID #0x"
+ Integer.toHexString(id));
rnf.initCause(ex);
throw rnf;
} finally {
if (parser != null) parser.close();
}
}
private static Interpolator createInterpolatorFromXml(Context context,
XmlPullParser parser)
throws XmlPullParserException, IOException {
Interpolator interpolator = null;
// Make sure we are on a start tag.
int type;
int depth = parser.getDepth();
while (((type = parser.next()) != XmlPullParser.END_TAG || parser.getDepth() > depth)
&& type != XmlPullParser.END_DOCUMENT) {
if (type != XmlPullParser.START_TAG) {
continue;
}
AttributeSet attrs = Xml.asAttributeSet(parser);
String name = parser.getName();
if (name.equals("linearInterpolator")) {
interpolator = new LinearInterpolator();
} else if (name.equals("accelerateInterpolator")) {
interpolator = new AccelerateInterpolator(context, attrs);
} else if (name.equals("decelerateInterpolator")) {
interpolator = new DecelerateInterpolator(context, attrs);
} else if (name.equals("accelerateDecelerateInterpolator")) {
interpolator = new AccelerateDecelerateInterpolator();
} else if (name.equals("cycleInterpolator")) {
interpolator = new CycleInterpolator(context, attrs);
} else if (name.equals("anticipateInterpolator")) {
interpolator = new AnticipateInterpolator(context, attrs);
} else if (name.equals("overshootInterpolator")) {
interpolator = new OvershootInterpolator(context, attrs);
} else if (name.equals("anticipateOvershootInterpolator")) {
interpolator = new AnticipateOvershootInterpolator(context, attrs);
} else if (name.equals("bounceInterpolator")) {
interpolator = new BounceInterpolator();
} else if (name.equals("pathInterpolator")) {
interpolator = new PathInterpolatorCompat(context, attrs, parser);
} else {
throw new RuntimeException("Unknown interpolator name: " + parser.getName());
}
}
return interpolator;
}
private AnimationUtilsCompat() {
}
}
| apache-2.0 |
jacksonic/vjlofvhjfgm | src/foam/net/node/Router.js | 1431 | /**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
foam.INTERFACE({
package: 'foam.net.node',
name: 'Router',
implements: [ 'foam.net.node.Handler' ],
flags: ['node'],
methods: [
{
name: 'addRoute',
documentation: `Bind a route to a handler in the context of this Router
object. The handler that was actually added is returned; some Routers
produce a modified and/or decorated Handler`,
args: [
{
name: 'route',
documentation: `The route specification to add.`,
type: 'foam.net.node.Route'
},
{
name: 'handler',
documentation: `The handler responsible for the route.`,
type: 'foam.net.node.Handler'
},
],
type: 'foam.net.node.Handler',
code: function(route, handler) {}
}
]
});
| apache-2.0 |
zhouyx/amphtml | extensions/amp-selector/0.1/amp-selector.js | 21231 | /**
* Copyright 2016 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {ActionTrust} from '../../../src/action-constants';
import {AmpEvents} from '../../../src/amp-events';
import {CSS} from '../../../build/amp-selector-0.1.css';
import {Keys} from '../../../src/utils/key-codes';
import {Services} from '../../../src/services';
import {areEqualOrdered} from '../../../src/utils/array';
import {
closestAncestorElementBySelector,
isRTL,
tryFocus,
} from '../../../src/dom';
import {createCustomEvent} from '../../../src/event-helper';
import {dev, user, userAssert} from '../../../src/log';
import {dict} from '../../../src/utils/object';
import {mod} from '../../../src/utils/math';
import {toArray} from '../../../src/types';
const TAG = 'amp-selector';
/**
* Set of namespaces that can be set for lifecycle reporters.
*
* @enum {string}
*/
const KEYBOARD_SELECT_MODES = {
NONE: 'none',
FOCUS: 'focus',
SELECT: 'select',
};
export class AmpSelector extends AMP.BaseElement {
/** @param {!AmpElement} element */
constructor(element) {
super(element);
/** @private {boolean} */
this.isMultiple_ = false;
/** @private {!Array<!Element>} */
this.selectedElements_ = [];
/** @private {!Array<!Element>} */
this.elements_ = [];
/** @private {!Array<!Element>} */
this.inputs_ = [];
/** @private {?../../../src/service/action-impl.ActionService} */
this.action_ = null;
/**
* The index of the option that should receive tab focus. Only one
* option should ever receive tab focus, with the other options reachable
* by arrow keys when the option is in focus.
* @private {number}
*/
this.focusedIndex_ = 0;
/** @private {!KEYBOARD_SELECT_MODES} */
this.kbSelectMode_ = KEYBOARD_SELECT_MODES.NONE;
}
/** @override */
isLayoutSupported() {
return true;
}
/** @override */
prerenderAllowed() {
return true;
}
/** @override */
buildCallback() {
this.action_ = Services.actionServiceForDoc(this.element);
this.isMultiple_ = this.element.hasAttribute('multiple');
if (!this.element.hasAttribute('role')) {
this.element.setAttribute('role', 'listbox');
}
if (this.isMultiple_) {
this.element.setAttribute('aria-multiselectable', 'true');
}
if (this.element.hasAttribute('disabled')) {
this.element.setAttribute('aria-disabled', 'true');
}
let kbSelectMode = this.element.getAttribute('keyboard-select-mode');
if (kbSelectMode) {
kbSelectMode = kbSelectMode.toLowerCase();
user().assertEnumValue(KEYBOARD_SELECT_MODES, kbSelectMode);
userAssert(
!(this.isMultiple_ && kbSelectMode == KEYBOARD_SELECT_MODES.SELECT),
'[keyboard-select-mode=select] not supported for multiple ' +
'selection amp-selector'
);
} else {
kbSelectMode = KEYBOARD_SELECT_MODES.NONE;
}
this.kbSelectMode_ = /** @type {!KEYBOARD_SELECT_MODES} */ (kbSelectMode);
this.registerAction('clear', this.clearAllSelections_.bind(this));
this.init_();
this.element.addEventListener('click', this.clickHandler_.bind(this));
this.element.addEventListener('keydown', this.keyDownHandler_.bind(this));
this.registerAction(
'selectUp',
(invocation) => {
const {args, trust} = invocation;
const delta = args && args['delta'] !== undefined ? -args['delta'] : -1;
this.select_(delta, trust);
},
ActionTrust.LOW
);
this.registerAction(
'selectDown',
(invocation) => {
const {args, trust} = invocation;
const delta = args && args['delta'] !== undefined ? args['delta'] : 1;
this.select_(delta, trust);
},
ActionTrust.LOW
);
this.registerAction(
'toggle',
(invocation) => {
const {args, trust} = invocation;
userAssert(args['index'] >= 0, "'index' must be greater than 0");
userAssert(
args['index'] < this.elements_.length,
"'index' must " +
'be less than the length of options in the <amp-selector>'
);
if (args && args['index'] !== undefined) {
return this.toggle_(args['index'], args['value'], trust);
} else {
return Promise.reject("'index' must be specified");
}
},
ActionTrust.LOW
);
/** If the element is in an `email` document, allow its `clear`,
* `selectDown`, `selectUp`, and `toggle` actions. */
this.action_.addToAllowlist(
TAG,
['clear', 'selectDown', 'selectUp', 'toggle'],
['email']
);
// Triggers on DOM children updates
this.element.addEventListener(
AmpEvents.DOM_UPDATE,
this.maybeRefreshOnUpdate_.bind(this)
);
}
/** @override */
mutatedAttributesCallback(mutations) {
const selected = mutations['selected'];
if (selected !== undefined) {
this.selectedAttributeMutated_(selected);
}
const disabled = mutations['disabled'];
if (disabled !== undefined) {
if (disabled) {
this.element.setAttribute('aria-disabled', 'true');
} else {
this.element.removeAttribute('aria-disabled');
}
}
}
/**
* Handles mutation of the `selected` attribute.
* @param {null|boolean|string|number|Array|Object} newValue
* @private
*/
selectedAttributeMutated_(newValue) {
let selected = Array.isArray(newValue) ? newValue : [newValue];
if (newValue === null || selected.length == 0) {
this.clearAllSelections_();
return;
}
// Only use first value if multiple selection is disabled.
if (!this.isMultiple_) {
selected = selected.slice(0, 1);
}
// If selection hasn't changed, early-out.
const current = this.selectedOptions_();
if (areEqualOrdered(current.sort(), selected.sort())) {
return;
}
// Convert array values to strings and create map for fast lookup.
const isSelected = selected.reduce((map, value) => {
map[value] = true;
return map;
}, Object.create(null));
// Iterate through elements and toggle selection as necessary.
for (let i = 0; i < this.elements_.length; i++) {
const element = this.elements_[i];
const option = element.getAttribute('option');
if (isSelected[option]) {
this.setSelection_(element);
} else {
this.clearSelection_(element);
}
}
this.updateFocus_();
this.setInputs_();
}
/**
* Update focus such that only one option in the selector can receive focus.
* When keyboard-select-mode is not none, this function handles focus as if
* the selector options are set of radio buttons. Otherwise, this function
* is a no-op.
*
* If no element is provided, this function will determine which option should
* receive focus.
*
* In multi-select selectors, focus should go to the first option.
* In single-select selectors, focus should go to the initially selected
* option, or to the first option if none are initially selected.
* @param {Element=} opt_focusEl Element to put focus on
* @private
*/
updateFocus_(opt_focusEl) {
if (this.kbSelectMode_ == KEYBOARD_SELECT_MODES.NONE) {
// Don't manage focus.
return;
}
this.elements_.forEach((option) => {
option.tabIndex = -1;
});
let focusElement = opt_focusEl;
if (!focusElement) {
if (this.isMultiple_) {
focusElement = this.elements_[0];
} else {
focusElement = this.selectedElements_[0] || this.elements_[0];
}
}
if (focusElement) {
this.focusedIndex_ = this.elements_.indexOf(focusElement);
focusElement.tabIndex = 0;
}
}
/**
* Calls init_ again if options element has changed
* @param {Event} unusedEvent
* @private
*/
maybeRefreshOnUpdate_(unusedEvent) {
const newElements = toArray(this.element.querySelectorAll('[option]'));
if (areEqualOrdered(this.elements_, newElements)) {
return;
}
this.init_(newElements);
}
/**
* @param {!Array<!Element>=} opt_elements
* @private
*/
init_(opt_elements) {
this.selectedElements_.length = 0;
const elements = opt_elements
? opt_elements
: toArray(this.element.querySelectorAll('[option]'));
elements.forEach((el) => {
if (!el.hasAttribute('role')) {
el.setAttribute('role', 'option');
}
if (el.hasAttribute('disabled')) {
el.setAttribute('aria-disabled', 'true');
}
if (el.hasAttribute('selected')) {
this.setSelection_(el);
} else {
this.clearSelection_(el);
}
el.tabIndex = 0;
});
this.elements_ = elements;
this.updateFocus_();
this.setInputs_();
}
/**
* Creates inputs for the currently selected elements and returns a string
* array of their option values.
* Note: Ignores elements that have `disabled` attribute set.
* @return {!Array<string>}
* @private
*/
setInputs_() {
const selectedValues = [];
const elementName = this.element.getAttribute('name');
if (!elementName || this.element.hasAttribute('disabled')) {
return selectedValues;
}
const formId = this.element.getAttribute('form');
this.inputs_.forEach((input) => {
this.element.removeChild(input);
});
this.inputs_ = [];
const doc = this.win.document;
const fragment = doc.createDocumentFragment();
this.selectedElements_.forEach((option) => {
if (!option.hasAttribute('disabled')) {
const hidden = doc.createElement('input');
const value = option.getAttribute('option');
hidden.setAttribute('type', 'hidden');
hidden.setAttribute('name', elementName);
hidden.setAttribute('value', value);
if (formId) {
hidden.setAttribute('form', formId);
}
this.inputs_.push(hidden);
fragment.appendChild(hidden);
selectedValues.push(value);
}
});
this.element.appendChild(fragment);
return selectedValues;
}
/**
* Handles user selection on an option.
* @param {!Element} el The element selected.
* @private
*/
onOptionPicked_(el) {
if (el.hasAttribute('disabled')) {
return;
}
this.mutateElement(() => {
if (el.hasAttribute('selected')) {
if (this.isMultiple_) {
this.clearSelection_(el);
this.setInputs_();
}
} else {
this.setSelection_(el);
this.setInputs_();
}
// Newly picked option should always have focus.
this.updateFocus_(el);
// User gesture trigger is "high" trust.
this.fireSelectEvent_(el, ActionTrust.HIGH);
});
}
/**
* @return {!Array<string>}
* @private
*/
selectedOptions_() {
return this.selectedElements_.map((el) => el.getAttribute('option'));
}
/**
* Handles click events for the selectables.
* @param {!Event} event
* @private
*/
clickHandler_(event) {
if (this.element.hasAttribute('disabled')) {
return;
}
let el = dev().assertElement(event.target);
if (!el) {
return;
}
if (!el.hasAttribute('option')) {
el = closestAncestorElementBySelector(el, '[option]');
}
if (el) {
this.onOptionPicked_(el);
}
}
/**
* Handles toggle action.
* @param {number} index
* @param {boolean|undefined} value
* @param {!ActionTrust} trust
* @return {!Promise}
* @private
*/
toggle_(index, value, trust) {
// Change the selection to the next element in the specified direction.
// The selection should loop around if the user attempts to go one
// past the beginning or end.
const el = this.elements_[index];
const indexCurrentStatus = el.hasAttribute('selected');
const indexFinalStatus = value !== undefined ? value : !indexCurrentStatus;
const selectedIndex = this.elements_.indexOf(this.selectedElements_[0]);
if (indexFinalStatus === indexCurrentStatus) {
return Promise.resolve();
}
// There is a change of the `selected` attribute for the element
return this.mutateElement(() => {
if (selectedIndex !== index) {
this.setSelection_(el);
const selectedEl = this.elements_[selectedIndex];
if (selectedEl) {
this.clearSelection_(selectedEl);
}
} else {
this.clearSelection_(el);
}
// Propagate the trust of the originating action.
this.fireSelectEvent_(el, trust);
});
}
/**
* Triggers a 'select' event with two data params:
* 'targetOption' - option value of the selected or deselected element.
* 'selectedOptions' - array of option values of selected elements.
* @param {!Element} el The element that was selected or deslected.
* @param {!ActionTrust} trust
* @private
*/
fireSelectEvent_(el, trust) {
const name = 'select';
const selectEvent = createCustomEvent(
this.win,
`amp-selector.${name}`,
dict({
'targetOption': el.getAttribute('option'),
'selectedOptions': this.selectedOptions_(),
})
);
// TODO(wg-components): Remove this in Q1 2020.
if (trust < ActionTrust.DEFAULT) {
user().warn(
TAG,
'"select" event now has the same trust as the originating action. ' +
'See https://github.com/ampproject/amphtml/issues/24443 for details.'
);
}
this.action_.trigger(this.element, name, selectEvent, trust);
}
/**
* Handles selectUp events.
* @param {number} delta
* @param {!ActionTrust} trust
* @private
*/
select_(delta, trust) {
// Change the selection to the next element in the specified direction.
// The selection should loop around if the user attempts to go one
// past the beginning or end.
const previousIndex = this.elements_.indexOf(this.selectedElements_[0]);
// If previousIndex === -1 is true, then a negative delta will be offset
// one more than is wanted when looping back around in the options.
// This occurs when no options are selected and "selectUp" is called.
const selectUpWhenNoneSelected = previousIndex === -1 && delta < 0;
const index = selectUpWhenNoneSelected ? delta : previousIndex + delta;
const normalizedIndex = mod(index, this.elements_.length);
const el = this.elements_[normalizedIndex];
this.setSelection_(el);
const previousEl = this.elements_[previousIndex];
if (previousEl) {
this.clearSelection_(previousEl);
}
this.setInputs_();
// Propagate the trust of the source action.
this.fireSelectEvent_(el, trust);
}
/**
* Handles keyboard events.
* @param {!Event} event
* @return {!Promise}
* @private
*/
keyDownHandler_(event) {
if (this.element.hasAttribute('disabled')) {
return Promise.resolve();
}
const {key} = event;
switch (key) {
case Keys.LEFT_ARROW: /* fallthrough */
case Keys.UP_ARROW: /* fallthrough */
case Keys.RIGHT_ARROW: /* fallthrough */
case Keys.DOWN_ARROW: /* fallthrough */
case Keys.HOME: /* fallthrough */
case Keys.END:
if (this.kbSelectMode_ != KEYBOARD_SELECT_MODES.NONE) {
return this.navigationKeyDownHandler_(event);
}
return Promise.resolve();
case Keys.ENTER: /* fallthrough */
case Keys.SPACE:
this.selectionKeyDownHandler_(event);
return Promise.resolve();
}
return Promise.resolve();
}
/**
* Handles keyboard navigation events. Should not be called if
* keyboard selection is disabled.
* @param {!Event} event
* @return {!Promise}
* @private
*/
navigationKeyDownHandler_(event) {
const doc = this.win.document;
let dir = 0;
switch (event.key) {
case Keys.LEFT_ARROW:
// Left is considered 'previous' in LTR and 'next' in RTL.
dir = isRTL(doc) ? 1 : -1;
break;
case Keys.UP_ARROW:
// Up is considered 'previous' in both LTR and RTL.
dir = -1;
break;
case Keys.RIGHT_ARROW:
// Right is considered 'next' in LTR and 'previous' in RTL.
dir = isRTL(doc) ? -1 : 1;
break;
case Keys.DOWN_ARROW:
// Down is considered 'next' in both LTR and RTL.
dir = 1;
break;
case Keys.HOME:
// Home looks for first nonhidden element, in 'next' direction.
dir = 1;
break;
case Keys.END:
// End looks for last nonhidden element, in 'previous' direction.
dir = -1;
break;
default:
return Promise.resolve();
}
event.preventDefault();
// Make currently selected option unfocusable
this.elements_[this.focusedIndex_].tabIndex = -1;
return this.getElementsSizes_().then((sizes) => {
const originalIndex = this.focusedIndex_;
// For Home/End keys, start at end/beginning respectively and wrap around
switch (event.key) {
case Keys.HOME:
this.focusedIndex_ = this.elements_.length - 1;
break;
case Keys.END:
this.focusedIndex_ = 0;
break;
}
do {
// Change the focus to the next element in the specified direction.
// The selection should loop around if the user attempts to go one
// past the beginning or end.
this.focusedIndex_ = (this.focusedIndex_ + dir) % this.elements_.length;
if (this.focusedIndex_ < 0) {
this.focusedIndex_ = this.focusedIndex_ + this.elements_.length;
}
} while (
isElementHidden(
this.elements_[this.focusedIndex_],
sizes[this.focusedIndex_]
) &&
this.focusedIndex_ != originalIndex
);
// Focus newly selected option
const newSelectedOption = this.elements_[this.focusedIndex_];
newSelectedOption.tabIndex = 0;
tryFocus(newSelectedOption);
const focusedOption = this.elements_[this.focusedIndex_];
if (this.kbSelectMode_ == KEYBOARD_SELECT_MODES.SELECT) {
this.onOptionPicked_(focusedOption);
}
});
}
/**
* Handles keyboard selection events.
* @param {!Event} event
* @private
*/
selectionKeyDownHandler_(event) {
const {key} = event;
if (key == Keys.SPACE || key == Keys.ENTER) {
if (this.elements_.includes(dev().assertElement(event.target))) {
event.preventDefault();
const el = dev().assertElement(event.target);
this.onOptionPicked_(el);
}
}
}
/**
* Clears a given element from the list of selected options.
* @param {!Element} element
* @private
*/
clearSelection_(element) {
element.removeAttribute('selected');
element.setAttribute('aria-selected', 'false');
const selIndex = this.selectedElements_.indexOf(element);
if (selIndex !== -1) {
this.selectedElements_.splice(selIndex, 1);
}
}
/**
* Clears all selected options.
* @private
*/
clearAllSelections_() {
while (this.selectedElements_.length > 0) {
// Clear selected options for single select.
const el = this.selectedElements_.pop();
this.clearSelection_(el);
}
this.setInputs_();
}
/**
* Marks a given element as selected and clears the others if required.
* @param {!Element} element
* @private
*/
setSelection_(element) {
// Exit if `element` is already selected.
if (this.selectedElements_.includes(element)) {
return;
}
if (!this.isMultiple_) {
this.clearAllSelections_();
}
element.setAttribute('selected', '');
element.setAttribute('aria-selected', 'true');
this.selectedElements_.push(element);
}
/**
* @return {!Array<!Element>}
* @visibleForTesting
*/
getElementsForTesting() {
return this.elements_;
}
/**
* @return {!Array<!Element>}
* @visibleForTesting
*/
getSelectedElementsForTesting() {
return this.selectedElements_;
}
/**
* Cache the rects of each of the elements.
* @return {!Promise<!Array<!ClientRect>>}
* @private
*/
getElementsSizes_() {
return this.measureElement(() => {
return this.elements_.map((element) =>
element./*OK*/ getBoundingClientRect()
);
});
}
}
/**
* Detect if an element is hidden.
* @param {!Element} element
* @param {!ClientRect} rect
* @return {boolean}
*/
function isElementHidden(element, rect) {
const {width, height} = rect;
return element.hidden || width == 0 || height == 0;
}
AMP.extension(TAG, '0.1', (AMP) => {
AMP.registerElement(TAG, AmpSelector, CSS);
});
| apache-2.0 |
drautureau/Grapes | server/src/main/java/org/axway/grapes/server/db/datamodel/DbCollections.java | 906 | package org.axway.grapes.server.db.datamodel;
/**
* DB Collections
*
* <p>This interface contains all the collection names that could be found in Grapes database.</p>
*
* author: jdcoffre
*/
public interface DbCollections {
static final String datamodelVersion = "2.2.0";
public static final String DB_ORGANIZATION = DbOrganization.class.getSimpleName();
public static final String DB_PRODUCT = DbProduct.class.getSimpleName();
public static final String DB_MODULES = DbModule.class.getSimpleName();
public static final String DB_ARTIFACTS = DbArtifact.class.getSimpleName();
public static final String DB_LICENSES = DbLicense.class.getSimpleName();
public static final String DB_CREDENTIALS = DbCredential.class.getSimpleName();
public static final String DB_GRAPES_INFO = DbGrapesInfo.class.getSimpleName();
public static final String DEFAULT_ID = "_id";
}
| apache-2.0 |
gaocegege/treadmill | tests/rest/api/server_test.py | 1939 | """Server REST api tests.
"""
import http.client
import json
import unittest
import flask
import flask_restplus as restplus
import mock
from treadmill import webutils
from treadmill.rest.api import server
class ServerTest(unittest.TestCase):
"""Test the logic corresponding to the /server namespace."""
def setUp(self):
"""Initialize the app with the corresponding logic."""
self.app = flask.Flask(__name__)
self.app.testing = True
api = restplus.Api(self.app)
cors = webutils.cors(origin='*',
content_type='application/json',
credentials=False)
self.impl = mock.Mock()
server.init(api, cors, self.impl)
self.client = self.app.test_client()
def test_get_server_list(self):
"""Test getting a list of servers."""
server_list = [
{'cell': 'foo', 'traits': [], '_id': 'server1', 'data': []},
{'cell': 'bar', 'traits': [], '_id': 'server2', 'data': []}
]
self.impl.list.return_value = server_list
resp = self.client.get('/server/')
self.assertEqual(
json.loads((b''.join(resp.response)).decode()),
server_list
)
self.assertEqual(resp.status_code, http.client.OK)
self.impl.list.assert_called_with(None, None)
resp = self.client.get('/server/?cell=foo')
self.assertEqual(resp.status_code, http.client.OK)
self.impl.list.assert_called_with('foo', None)
resp = self.client.get('/server/?partition=baz')
self.assertEqual(resp.status_code, http.client.OK)
self.impl.list.assert_called_with(None, 'baz')
resp = self.client.get('/server/?cell=foo&partition=baz')
self.assertEqual(resp.status_code, http.client.OK)
self.impl.list.assert_called_with('foo', 'baz')
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
mwerle/EDDiscovery | EliteDangerous/JournalEvents/JournalMissionRedirected.cs | 2986 | /*
* Copyright © 2016 EDDiscovery development team
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
* ANY KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*
* EDDiscovery is not affiliated with Frontier Developments plc.
*/
using Newtonsoft.Json.Linq;
using System.Linq;
namespace EliteDangerousCore.JournalEvents
{
//When written: when a mission is updated with a new destination
//Parameters
// MissionID
// MissionName
// NewDestinationStation
// OldDestinationStation
// NewDestinationSystem
//{ "timestamp": "2017-08-01T09:04:07Z", "event": "MissionRedirected", "MissionID": 65367315, "NewDestinationStation": "Metcalf Orbital", "OldDestinationStation": "Cuffey Orbital", "NewDestinationSystem": "Cemiess", "OldDestinationSystem": "Vequess" }
[JournalEntryType(JournalTypeEnum.MissionRedirected)]
public class JournalMissionRedirected : JournalEntry, IMissions
{
public JournalMissionRedirected(JObject evt ) : base(evt, JournalTypeEnum.MissionRedirected)
{
Name = JournalFieldNaming.GetBetterMissionName(evt["MissionName"].Str());
MissionId = evt["MissionID"].Int();
NewDestinationStation = evt["NewDestinationStation"].Str();
OldDestinationStation = evt["OldDestinationStation"].Str();
NewDestinationSystem = evt["NewDestinationSystem"].Str();
OldDestinationSystem = evt["OldDestinationSystem"].Str();
}
public string NewDestinationStation { get; set; }
public string OldDestinationStation { get; set; }
public string NewDestinationSystem { get; set; }
public string OldDestinationSystem { get; set; }
public int MissionId { get; set; }
public string Name { get; set; }
public override void FillInformation(out string summary, out string info, out string detailed) //V
{
summary = EventTypeStr.SplitCapsWord();
info = info = BaseUtils.FieldBuilder.Build("Mission name:", Name,
"From:", OldDestinationSystem,
"", OldDestinationStation,
"To:", NewDestinationSystem,
"", NewDestinationStation);
detailed = "";
}
public void UpdateMissions(MissionListAccumulator mlist, EliteDangerousCore.ISystem sys, string body, DB.SQLiteConnectionUser conn)
{
mlist.Redirected(this);
}
}
}
| apache-2.0 |
darciopacifico/omr | tags/PreSCMSetup/jsf-app/src/main/java/br/com/dlp/jazzqa/pessoa/PessoaPK.java | 301 | package br.com.dlp.jazzqa.pessoa;
import br.com.dlp.jazzqa.base.SimpleJazzQAPK;
/**
*
* @author dpacifico
*
*/
public class PessoaPK extends SimpleJazzQAPK {
public PessoaPK(Long id) {
super(id);
}
private static final long serialVersionUID = -4521417989286138551L;
}
| apache-2.0 |
dmvolod/camel | camel-core/src/test/java/org/apache/camel/processor/interceptor/CustomInterceptorRouteWithChildOutputTest.java | 3456 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.interceptor;
import java.util.ArrayList;
import java.util.List;
import org.apache.camel.CamelContext;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.Processor;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.model.LogDefinition;
import org.apache.camel.model.ProcessorDefinition;
import org.apache.camel.model.SplitDefinition;
import org.apache.camel.model.ToDefinition;
import org.apache.camel.spi.InterceptStrategy;
/**
*
*/
public class CustomInterceptorRouteWithChildOutputTest extends ContextTestSupport {
private MyInterceptor myInterceptor = new MyInterceptor();
public void testCustomInterceptor() throws Exception {
getMockEndpoint("mock:child").expectedMessageCount(3);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "A,B,C");
assertMockEndpointsSatisfied();
assertEquals(4, myInterceptor.getDefs().size());
assertIsInstanceOf(LogDefinition.class, myInterceptor.getDefs().get(0));
assertIsInstanceOf(ToDefinition.class, myInterceptor.getDefs().get(1));
assertEquals("mock:child", myInterceptor.getDefs().get(1).getLabel());
assertIsInstanceOf(SplitDefinition.class, myInterceptor.getDefs().get(2));
assertIsInstanceOf(ToDefinition.class, myInterceptor.getDefs().get(3));
assertEquals("mock:result", myInterceptor.getDefs().get(3).getLabel());
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
// add our custom interceptor
context.addInterceptStrategy(myInterceptor);
from("direct:start")
.split(body().tokenize(","))
.log("Spltted ${body}")
.to("mock:child")
.end()
.to("mock:result");
}
};
}
@SuppressWarnings("rawtypes")
private static class MyInterceptor implements InterceptStrategy {
private final List<ProcessorDefinition> defs = new ArrayList<>();
@Override
public Processor wrapProcessorInInterceptors(CamelContext context, ProcessorDefinition<?> definition,
Processor target, Processor nextTarget) throws Exception {
defs.add(definition);
return target;
}
public List<ProcessorDefinition> getDefs() {
return defs;
}
}
}
| apache-2.0 |
MFAnderson/gocd | common/test/unit/com/thoughtworks/go/domain/materials/svn/SvnCommandRemoteTest.java | 15594 | /*
* Copyright 2017 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.domain.materials.svn;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.domain.materials.ValidationBean;
import com.thoughtworks.go.helper.SvnRemoteRepository;
import com.thoughtworks.go.util.FileUtil;
import com.thoughtworks.go.util.TestFileUtil;
import com.thoughtworks.go.util.command.InMemoryStreamConsumer;
import com.thoughtworks.go.util.command.ProcessOutputStreamConsumer;
import org.apache.commons.io.FileUtils;
import org.hamcrest.core.Is;
import org.jdom2.input.SAXBuilder;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.util.List;
import static com.thoughtworks.go.util.command.ProcessOutputStreamConsumer.inMemoryConsumer;
import static org.hamcrest.Matchers.is;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
public class SvnCommandRemoteTest {
public SvnRemoteRepository repository;
private static final String HARRY = "harry";
private static final String HARRYS_PASSWORD = "harryssecret";
public SvnCommand command;
public File workingDir;
private InMemoryStreamConsumer outputStreamConsumer;
@Before
public void startRepo() throws Exception {
repository = new SvnRemoteRepository();
repository.addUser(HARRY, HARRYS_PASSWORD);
repository.start();
command = new SvnCommand(null, repository.getUrl(), HARRY, HARRYS_PASSWORD, true);
workingDir = TestFileUtil.createTempFolder("workingDir" + System.currentTimeMillis());
outputStreamConsumer = inMemoryConsumer();
}
@After
public void stopRepo() throws Exception {
if (repository!=null) repository.stop();
FileUtil.deleteFolder(workingDir);
}
@Test public void shouldSupportSvnInfo() throws Exception {
SvnCommand.SvnInfo info = command.remoteInfo(new SAXBuilder());
assertThat(info.getUrl(), is(repository.getUrl()));
}
@Test public void shouldSupportSvnLog() throws Exception {
List<Modification> info = command.latestModification();
assertThat(info.get(0).getComment(), is("Added simple build shell to dump the environment to console."));
}
@Test public void shouldSupportModificationsSince() throws Exception {
List<Modification> info = command.modificationsSince(new SubversionRevision(2));
assertThat(info.size(), is(2));
assertThat(info.get(0).getRevision(), is("4"));
assertThat(info.get(1).getRevision(), is("3"));
}
@Test public void shouldSupportLocalSvnInfoWithoutPassword() throws Exception {
command.checkoutTo(ProcessOutputStreamConsumer.inMemoryConsumer(), workingDir,
new SubversionRevision(4));
SvnCommand commandWithoutPassword = new SvnCommand(null, repository.getUrl(), null, null, true);
SvnCommand.SvnInfo info = commandWithoutPassword.workingDirInfo(workingDir);
assertThat(info.getUrl(), is(repository.getUrl()));
}
@Test
public void shouldMaskPassword_CheckConnection() {
ValidationBean goodResponse = command.checkConnection();
assertThat(goodResponse.isValid(), Is.is(true));
assertThat("Plain text password detected!", goodResponse.getError().contains(HARRYS_PASSWORD), Is.is(false));
ValidationBean badResponse = badUserNameCommand().checkConnection();
assertThat(badResponse.isValid(), Is.is(false));
assertThat("Plain text password detected!", badResponse.getError().contains(HARRYS_PASSWORD), Is.is(false));
badResponse = badPasswordCommand().checkConnection();
assertThat(badResponse.isValid(), Is.is(false));
assertThat("Plain text password detected!", badResponse.getError().contains("some_bad_password"), Is.is(false));
badResponse = badUrlCommand().checkConnection();
assertThat(badResponse.isValid(), Is.is(false));
assertThat("Plain text password detected!", badResponse.getError().contains(HARRYS_PASSWORD), Is.is(false));
}
@Test
public void shouldMaskPassword_UpdateTo() {
command.checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
command.updateTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false));
try {
badUserNameCommand().updateTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false));
}
try {
badPasswordCommand().updateTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false));
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains("some_bad_password"), Is.is(false));
}
try {
badUrlCommand().updateTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false));
}
}
@Test
public void shouldMaskPassword_CheckoutTo() {
command.checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false));
try {
FileUtil.deleteFolder(workingDir);
badUserNameCommand().checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false));
}
try {
FileUtil.deleteFolder(workingDir);
badPasswordCommand().checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false));
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains("some_bad_password"), Is.is(false));
}
try {
FileUtil.deleteFolder(workingDir);
badUrlCommand().checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false));
}
}
@Test
public void shouldMaskPassword_getAllExternalURLs() {
try {
badUserNameCommand().getAllExternalURLs();
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
try {
badPasswordCommand().getAllExternalURLs();
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false));
}
try {
badUrlCommand().getAllExternalURLs();
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
}
@Test
public void shouldMaskPassword_latestModification() {
try {
badUserNameCommand().latestModification();
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
try {
badPasswordCommand().latestModification();
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false));
}
try {
badUrlCommand().latestModification();
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
}
@Test
public void shouldMaskPassword_modificationsSince() {
try {
badUserNameCommand().latestModification();
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
try {
badPasswordCommand().latestModification();
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false));
}
try {
badUrlCommand().latestModification();
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
}
@Test
public void shouldMaskPassword_remoteInfo() {
try {
badUserNameCommand().remoteInfo(new SAXBuilder());
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
try {
badPasswordCommand().remoteInfo(new SAXBuilder());
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false));
}
try {
badUrlCommand().remoteInfo(new SAXBuilder());
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
}
@Test
public void shouldMaskPassword_workingDirInfo() {
try {
badUserNameCommand().workingDirInfo(workingDir);
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
try {
badPasswordCommand().workingDirInfo(workingDir);
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false));
}
try {
badUrlCommand().workingDirInfo(workingDir);
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
}
@Test
public void shouldMaskPassword_commit() throws IOException {
command.checkoutTo(outputStreamConsumer, workingDir, new SubversionRevision(2));
File newFile = new File(workingDir.getAbsolutePath() + "/foo");
FileUtils.writeStringToFile(newFile, "content");
command.add(outputStreamConsumer, newFile);
try {
badUserNameCommand().commit(outputStreamConsumer, workingDir, "message");
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false));
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
try {
badPasswordCommand().commit(outputStreamConsumer, workingDir, "message");
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains("some_bad_password"), Is.is(false));
assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false));
}
try {
badUrlCommand().commit(outputStreamConsumer, workingDir, "message");
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", outputStreamConsumer.getAllOutput().contains(HARRYS_PASSWORD), Is.is(false));
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
}
@Test
public void shouldMaskPassword_propset() throws IOException {
try {
badUserNameCommand().propset(workingDir, "svn:ignore", "*.foo");
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
try {
badPasswordCommand().propset(workingDir, "svn:ignore", "*.foo");
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains("some_bad_password"), Is.is(false));
}
try {
badUrlCommand().propset(workingDir, "svn:ignore", "*.foo");
fail("should have failed");
} catch (Exception e) {
assertThat("Plain text password detected!", e.getMessage().contains(HARRYS_PASSWORD), Is.is(false));
}
}
private SvnCommand badUrlCommand() {
return new SvnCommand(null, "https://invalid", "blrstdcrspair", HARRYS_PASSWORD, false);
}
private SvnCommand badUserNameCommand() {
return new SvnCommand(null, repository.getUrl(), "some_bad_user", HARRYS_PASSWORD, false);
}
private SvnCommand badPasswordCommand() {
return new SvnCommand(null, repository.getUrl(), HARRY, "some_bad_password", false);
}
}
| apache-2.0 |
aws/aws-sdk-cpp | aws-cpp-sdk-customer-profiles/source/model/DeleteProfileRequest.cpp | 700 | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/customer-profiles/model/DeleteProfileRequest.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::CustomerProfiles::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
DeleteProfileRequest::DeleteProfileRequest() :
m_profileIdHasBeenSet(false),
m_domainNameHasBeenSet(false)
{
}
Aws::String DeleteProfileRequest::SerializePayload() const
{
JsonValue payload;
if(m_profileIdHasBeenSet)
{
payload.WithString("ProfileId", m_profileId);
}
return payload.View().WriteReadable();
}
| apache-2.0 |
atopuzov/nitro-python | nssrc/com/citrix/netscaler/nitro/resource/config/ns/nslimitsessions.py | 8654 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class nslimitsessions(base_resource) :
""" Configuration for limit sessions resource. """
def __init__(self) :
self._limitidentifier = ""
self._detail = False
self._timeout = 0
self._hits = 0
self._drop = 0
self._number = []
self._name = ""
self._unit = 0
self._flags = 0
self._referencecount = 0
self._maxbandwidth = 0
self._selectoripv61 = ""
self._selectoripv62 = ""
self._flag = 0
self.___count = 0
@property
def limitidentifier(self) :
ur"""Name of the rate limit identifier for which to display the sessions.<br/>Minimum length = 1.
"""
try :
return self._limitidentifier
except Exception as e:
raise e
@limitidentifier.setter
def limitidentifier(self, limitidentifier) :
ur"""Name of the rate limit identifier for which to display the sessions.<br/>Minimum length = 1
"""
try :
self._limitidentifier = limitidentifier
except Exception as e:
raise e
@property
def detail(self) :
ur"""Show the individual hash values.
"""
try :
return self._detail
except Exception as e:
raise e
@detail.setter
def detail(self, detail) :
ur"""Show the individual hash values.
"""
try :
self._detail = detail
except Exception as e:
raise e
@property
def timeout(self) :
ur"""The time remaining on the session before a flush can be attempted. If active transactions are present the session will not be flushed.
"""
try :
return self._timeout
except Exception as e:
raise e
@property
def hits(self) :
ur"""The number of times this entry was hit.
"""
try :
return self._hits
except Exception as e:
raise e
@property
def drop(self) :
ur"""The number of times action was taken.
"""
try :
return self._drop
except Exception as e:
raise e
@property
def number(self) :
ur"""The hash of the matched selectlets.
"""
try :
return self._number
except Exception as e:
raise e
@property
def name(self) :
ur"""The string formed by gathering selectlet values.
"""
try :
return self._name
except Exception as e:
raise e
@property
def unit(self) :
ur"""Total computed hash of the matched selectlets.
"""
try :
return self._unit
except Exception as e:
raise e
@property
def flags(self) :
ur"""Used internally to identify ip addresses.
"""
try :
return self._flags
except Exception as e:
raise e
@property
def referencecount(self) :
ur"""Total number of transactions pointing to this entry. Its the sum total of the connection and bandwidth references.
"""
try :
return self._referencecount
except Exception as e:
raise e
@property
def maxbandwidth(self) :
ur"""The current bandwidth.
"""
try :
return self._maxbandwidth
except Exception as e:
raise e
@property
def selectoripv61(self) :
ur"""First IPV6 address gathered.
"""
try :
return self._selectoripv61
except Exception as e:
raise e
@property
def selectoripv62(self) :
ur"""Second IPV6 address gathered.
"""
try :
return self._selectoripv62
except Exception as e:
raise e
@property
def flag(self) :
ur"""Used internally to identify ipv6 addresses.
"""
try :
return self._flag
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(nslimitsessions_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.nslimitsessions
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
return 0
except Exception as e :
raise e
@classmethod
def clear(cls, client, resource) :
ur""" Use this API to clear nslimitsessions.
"""
try :
if type(resource) is not list :
clearresource = nslimitsessions()
clearresource.limitidentifier = resource.limitidentifier
return clearresource.perform_operation(client,"clear")
else :
if (resource and len(resource) > 0) :
clearresources = [ nslimitsessions() for _ in range(len(resource))]
for i in range(len(resource)) :
clearresources[i].limitidentifier = resource[i].limitidentifier
result = cls.perform_operation_bulk_request(client, clearresources,"clear")
return result
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
ur""" Use this API to fetch all the nslimitsessions resources that are configured on netscaler.
"""
try :
if type(name) == cls :
if type(name) is not list :
option_ = options()
option_.args = nitro_util.object_to_string_withoutquotes(name)
response = name.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [nslimitsessions() for _ in range(len(name))]
for i in range(len(name)) :
option_ = options()
option_.args = nitro_util.object_to_string_withoutquotes(name[i])
response[i] = name[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_args(cls, client, args) :
ur""" Use this API to fetch all the nslimitsessions resources that are configured on netscaler.
# This uses nslimitsessions_args which is a way to provide additional arguments while fetching the resources.
"""
try :
obj = nslimitsessions()
option_ = options()
option_.args = nitro_util.object_to_string_withoutquotes(args)
response = obj.get_resources(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_, obj) :
ur""" Use this API to fetch filtered set of nslimitsessions resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
option_ = options()
option_.filter = filter_
option_.args = nitro_util.object_to_string_withoutquotes(obj)
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client, obj) :
ur""" Use this API to count the nslimitsessions resources configured on NetScaler.
"""
try :
option_ = options()
option_.count = True
option_.args = nitro_util.object_to_string_withoutquotes(obj)
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_, obj) :
ur""" Use this API to count filtered the set of nslimitsessions resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
option_ = options()
option_.count = True
option_.filter = filter_
option_.args = nitro_util.object_to_string_withoutquotes(obj)
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class nslimitsessions_response(base_response) :
def __init__(self, length=1) :
self.nslimitsessions = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.nslimitsessions = [nslimitsessions() for _ in range(length)]
| apache-2.0 |
blindpirate/gradle | subprojects/platform-native/src/main/java/org/gradle/nativeplatform/internal/resolve/DefaultLibraryResolver.java | 5732 | /*
* Copyright 2013 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.nativeplatform.internal.resolve;
import org.gradle.api.DomainObjectSet;
import org.gradle.api.InvalidUserDataException;
import org.gradle.language.base.internal.resolve.LibraryResolveException;
import org.gradle.nativeplatform.BuildType;
import org.gradle.nativeplatform.Flavor;
import org.gradle.nativeplatform.NativeBinarySpec;
import org.gradle.nativeplatform.NativeDependencySet;
import org.gradle.nativeplatform.NativeLibraryBinary;
import org.gradle.nativeplatform.NativeLibraryRequirement;
import org.gradle.nativeplatform.SharedLibraryBinary;
import org.gradle.nativeplatform.StaticLibraryBinary;
import org.gradle.nativeplatform.platform.NativePlatform;
import org.gradle.util.internal.GUtil;
import java.util.Set;
class DefaultLibraryResolver {
private final NativeLibraryRequirement requirement;
private final NativeBinarySpec context;
private final LibraryBinaryLocator libraryBinaryLocator;
public DefaultLibraryResolver(LibraryBinaryLocator libraryBinaryLocator, NativeLibraryRequirement requirement, NativeBinarySpec context) {
this.requirement = requirement;
this.context = context;
this.libraryBinaryLocator = libraryBinaryLocator;
}
public NativeLibraryBinary resolveLibraryBinary() {
DomainObjectSet<NativeLibraryBinary> binaries = libraryBinaryLocator.getBinaries(new LibraryIdentifier(requirement.getProjectPath(), requirement.getLibraryName()));
if (binaries == null) {
throw new LibraryResolveException(getFailureMessage(requirement));
}
return new LibraryResolution()
.withFlavor(context.getFlavor())
.withPlatform(context.getTargetPlatform())
.withBuildType(context.getBuildType())
.resolveLibrary(binaries);
}
private String getFailureMessage(NativeLibraryRequirement requirement) {
return requirement.getProjectPath() == null || requirement.getProjectPath().equals(context.getProjectPath())
? String.format("Could not locate library '%s' required by %s.", requirement.getLibraryName(), getContextMessage())
: String.format("Could not locate library '%s' in project '%s' required by %s.", requirement.getLibraryName(), requirement.getProjectPath(), getContextMessage());
}
private String getContextMessage() {
return String.format("'%s' in project '%s'", context.getComponent().getName(), context.getProjectPath());
}
private class LibraryResolution {
private Flavor flavor;
private NativePlatform platform;
private BuildType buildType;
public LibraryResolution withFlavor(Flavor flavor) {
this.flavor = flavor;
return this;
}
public LibraryResolution withPlatform(NativePlatform platform) {
this.platform = platform;
return this;
}
public LibraryResolution withBuildType(BuildType buildType) {
this.buildType = buildType;
return this;
}
public NativeDependencySet resolve(DomainObjectSet<NativeLibraryBinary> allBinaries) {
NativeLibraryBinary resolve = resolveLibrary(allBinaries);
return new DefaultNativeDependencySet(resolve);
}
public NativeLibraryBinary resolveLibrary(DomainObjectSet<NativeLibraryBinary> allBinaries) {
Class<? extends NativeLibraryBinary> type = getTypeForLinkage(requirement.getLinkage());
DomainObjectSet<? extends NativeLibraryBinary> candidateBinaries = allBinaries.withType(type);
return resolve(candidateBinaries);
}
private Class<? extends NativeLibraryBinary> getTypeForLinkage(String linkage) {
if ("static".equals(linkage)) {
return StaticLibraryBinary.class;
}
if ("shared".equals(linkage) || linkage == null) {
return SharedLibraryBinary.class;
}
throw new InvalidUserDataException("Not a valid linkage: " + linkage);
}
private NativeLibraryBinary resolve(Set<? extends NativeLibraryBinary> candidates) {
for (NativeLibraryBinary candidate : candidates) {
if (flavor != null && !flavor.getName().equals(candidate.getFlavor().getName())) {
continue;
}
if (platform != null && !platform.getName().equals(candidate.getTargetPlatform().getName())) {
continue;
}
if (buildType != null && !buildType.getName().equals(candidate.getBuildType().getName())) {
continue;
}
return candidate;
}
String typeName = GUtil.elvis(requirement.getLinkage(), "shared");
throw new LibraryResolveException(String.format("No %s library binary available for library '%s' with [flavor: '%s', platform: '%s', buildType: '%s']",
typeName, requirement.getLibraryName(), flavor.getName(), platform.getName(), buildType.getName()));
}
}
}
| apache-2.0 |
emaeliena/PerfKitBenchmarker | perfkitbenchmarker/benchmarks/iperf_benchmark.py | 6057 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs plain Iperf.
Docs:
http://iperf.fr/
Runs Iperf to collect network throughput.
"""
import logging
import re
from perfkitbenchmarker import flags
from perfkitbenchmarker import sample
from perfkitbenchmarker import vm_util
flags.DEFINE_integer('iperf_sending_thread_count', 1,
'Number of connections to make to the'
' server for sending traffic.')
flags.DEFINE_integer('iperf_runtime_in_seconds', 60,
'Number of seconds to run iperf.')
FLAGS = flags.FLAGS
BENCHMARK_INFO = {'name': 'iperf',
'description': 'Run iperf',
'scratch_disk': False,
'num_machines': 2}
IPERF_PORT = 20000
IPERF_RETRIES = 5
def GetInfo():
return BENCHMARK_INFO
def Prepare(benchmark_spec):
"""Install iperf and start the server on all machines.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
fw = benchmark_spec.firewall
vms = benchmark_spec.vms
for vm in vms:
vm.Install('iperf')
fw.AllowPort(vm, IPERF_PORT)
vm.RemoteCommand('nohup iperf --server --port %s &> /dev/null &' %
IPERF_PORT)
@vm_util.Retry(max_retries=IPERF_RETRIES)
def _RunIperf(sending_vm, receiving_vm, receiving_ip_address, ip_type):
"""Run iperf using sending 'vm' to connect to 'ip_address'.
Args:
sending_vm: The VM sending traffic.
receiving_vm: The VM receiving traffic.
receiving_ip_address: The IP address of the iperf server (ie the receiver).
ip_type: The IP type of 'ip_address' (e.g. 'internal', 'external')
Returns:
A Sample.
"""
iperf_cmd = ('iperf --client %s --port %s --format m --time %s -P %s' %
(receiving_ip_address, IPERF_PORT,
FLAGS.iperf_runtime_in_seconds,
FLAGS.iperf_sending_thread_count))
stdout, _ = sending_vm.RemoteCommand(iperf_cmd, should_log=True)
# Example output from iperf that needs to be parsed
# STDOUT: ------------------------------------------------------------
# Client connecting to 10.237.229.201, TCP port 5001
# TCP window size: 0.04 MByte (default)
# ------------------------------------------------------------
# [ 6] local 10.76.234.115 port 53527 connected with 10.237.229.201 port 5001
# [ 3] local 10.76.234.115 port 53524 connected with 10.237.229.201 port 5001
# [ 4] local 10.76.234.115 port 53525 connected with 10.237.229.201 port 5001
# [ 5] local 10.76.234.115 port 53526 connected with 10.237.229.201 port 5001
# [ ID] Interval Transfer Bandwidth
# [ 4] 0.0-60.0 sec 3730 MBytes 521.1 Mbits/sec
# [ 5] 0.0-60.0 sec 3499 MBytes 489 Mbits/sec
# [ 6] 0.0-60.0 sec 3044 MBytes 425 Mbits/sec
# [ 3] 0.0-60.0 sec 3738 MBytes 522 Mbits/sec
# [SUM] 0.0-60.0 sec 14010 MBytes 1957 Mbits/sec
thread_values = re.findall(r'\[SUM].*\s+(\d+\.?\d*).Mbits/sec', stdout)
if not thread_values:
# If there is no sum you have try and figure out an estimate
# which happens when threads start at different times. The code
# below will tend to overestimate a bit.
thread_values = re.findall('\[.*\d+\].*\s+(\d+\.?\d*).Mbits/sec', stdout)
if len(thread_values) != FLAGS.iperf_sending_thread_count:
raise ValueError('Only %s out of %s iperf threads reported a'
' throughput value.' %
(len(thread_values), FLAGS.iperf_sending_thread_count))
total_throughput = 0.0
for value in thread_values:
total_throughput += float(value)
metadata = {
# The meta data defining the environment
'receiving_machine_type': receiving_vm.machine_type,
'receiving_zone': receiving_vm.zone,
'sending_machine_type': sending_vm.machine_type,
'sending_thread_count': FLAGS.iperf_sending_thread_count,
'sending_zone': sending_vm.zone,
'runtime_in_seconds': FLAGS.iperf_runtime_in_seconds,
'ip_type': ip_type
}
return sample.Sample('Throughput', total_throughput, 'Mbits/sec', metadata)
def Run(benchmark_spec):
"""Run iperf on the target vm.
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
Returns:
A list of sample.Sample objects.
"""
vms = benchmark_spec.vms
results = []
logging.info('Iperf Results:')
# Send traffic in both directions
for originator in [0, 1]:
sending_vm = vms[originator]
receiving_vm = vms[originator ^ 1]
# Send using external IP addresses
if vm_util.ShouldRunOnExternalIpAddress():
results.append(_RunIperf(sending_vm,
receiving_vm,
receiving_vm.ip_address,
'external'))
# Send using internal IP addresses
if vm_util.ShouldRunOnInternalIpAddress(sending_vm,
receiving_vm):
results.append(_RunIperf(sending_vm,
receiving_vm,
receiving_vm.internal_ip,
'internal'))
return results
def Cleanup(benchmark_spec):
"""Cleanup iperf on the target vm (by uninstalling).
Args:
benchmark_spec: The benchmark specification. Contains all data that is
required to run the benchmark.
"""
vms = benchmark_spec.vms
vms[1].RemoteCommand('pkill -9 iperf')
| apache-2.0 |
pedroreys/EntityFramework.Filters | src/EntityFramework.Filters.Example/ExampleContext.cs | 5017 | namespace EntityFramework.Filters.Example
{
using System.Data.Entity;
using System.Data.Entity.Migrations;
using System.Data.Entity.SqlServer;
using System.Linq;
public class MigrationsConfiguration : DbMigrationsConfiguration<ExampleContext>
{
public MigrationsConfiguration()
{
AutomaticMigrationsEnabled = true;
AutomaticMigrationDataLossAllowed = true;
}
protected override void Seed(ExampleContext context)
{
var tenant1 = new Tenant
{
Name = "Tenant 1"
};
context.Tenants.Add(tenant1);
context.SaveChanges();
context.CurrentTenant = tenant1;
var category = new Category
{
Name = "Good posts"
};
context.Categories.Add(category);
context.SaveChanges();
var author = new Author
{
FirstName = "John",
LastName = "Doe"
};
context.Authors.Add(author);
context.SaveChanges();
var blog = new BlogEntry
{
Title = "My entry",
Body = "Blog entry here",
Author = author
};
blog.Comments.Add(new Comment
{
Text = "First comment"
});
blog.Comments.Add(new Comment
{
Text = "Second comment"
});
blog.AddCategory(category);
context.BlogEntries.Add(blog);
context.SaveChanges();
var tenant2 = new Tenant
{
Name = "Tenant 2"
};
context.Tenants.Add(tenant2);
context.SaveChanges();
context.CurrentTenant = tenant2;
var category2 = new Category
{
Name = "Bad posts"
};
context.Categories.Add(category2);
context.SaveChanges();
var author2 = new Author
{
FirstName = "Jane",
LastName = "Doe"
};
context.Authors.Add(author2);
context.SaveChanges();
var blog2 = new BlogEntry
{
Title = "My other entry",
Body = "Blog entry here",
Author = author
};
blog2.Comments.Add(new Comment
{
Text = "First comment"
});
blog2.Comments.Add(new Comment
{
Text = "Second comment"
});
blog2.AddCategory(category2);
context.BlogEntries.Add(blog2);
context.SaveChanges();
}
}
public class ExampleConfiguration : DbConfiguration
{
public ExampleConfiguration()
{
AddInterceptor(new FilterInterceptor());
}
}
public class ExampleContext : DbContext
{
public ExampleContext() :base("name=EntityFramework.Filters.Example.ExampleContext")
{
}
public DbSet<BlogEntry> BlogEntries { get; set; }
public DbSet<Comment> Comments { get; set; }
public DbSet<Category> Categories { get; set; }
public DbSet<Tenant> Tenants { get; set; }
public DbSet<Author> Authors { get; set; }
public Tenant CurrentTenant { get; set; }
protected override void OnModelCreating(DbModelBuilder modelBuilder)
{
modelBuilder.Entity<BlogEntry>()
.Filter("BadCategory", fc => fc.Condition(be => be.Categories.Select(c => c.Category.Name).Contains("Bad posts")))
.HasMany(m => m.Categories).WithRequired(m => m.BlogEntry).WillCascadeOnDelete(true);
modelBuilder.Entity<BlogEntry>()
.HasRequired(m => m.Tenant).WithMany().WillCascadeOnDelete(false);
modelBuilder.Entity<Comment>()
.HasRequired(m => m.Tenant).WithMany().WillCascadeOnDelete(false);
modelBuilder.Entity<Category>()
.HasRequired(m => m.Tenant).WithMany().WillCascadeOnDelete(false);
modelBuilder.Entity<BlogEntryCategory>()
.HasRequired(m => m.Tenant).WithMany().WillCascadeOnDelete(false);
modelBuilder.Entity<Author>()
.HasRequired(m => m.Tenant).WithMany().WillCascadeOnDelete(false);
modelBuilder.Conventions.Add(FilterConvention.Create<ITenantEntity, int>("Tenant", (e, tenantId) => e.TenantId == tenantId));
}
public override int SaveChanges()
{
var tenantEntities = ChangeTracker.Entries<ITenantEntity>().ToArray();
foreach (var item in tenantEntities.Where(t => t.State == EntityState.Added))
{
item.Entity.Tenant = CurrentTenant;
}
return base.SaveChanges();
}
}
} | apache-2.0 |
Transtech/omim | routing/routing_tests/osrm_router_test.cpp | 6157 | #include "testing/testing.hpp"
#include "routing/car_router.hpp"
#include "indexer/features_offsets_table.hpp"
#include "geometry/mercator.hpp"
#include "platform/country_file.hpp"
#include "platform/local_country_file.hpp"
#include "platform/local_country_file_utils.hpp"
#include "platform/platform.hpp"
#include "platform/platform_tests_support/scoped_mwm.hpp"
#include "coding/file_writer.hpp"
#include "defines.hpp"
#include "base/scope_guard.hpp"
#include "std/bind.hpp"
#include "std/unique_ptr.hpp"
#include "std/vector.hpp"
using namespace routing;
using platform::CountryIndexes;
namespace
{
typedef vector<OsrmFtSegMappingBuilder::FtSegVectorT> InputDataT;
typedef vector< vector<TOsrmNodeId> > NodeIdDataT;
typedef vector< pair<size_t, size_t> > RangeDataT;
typedef OsrmMappingTypes::FtSeg SegT;
void TestNodeId(OsrmFtSegMapping const & mapping, NodeIdDataT const & test)
{
for (TOsrmNodeId nodeId = 0; nodeId < test.size(); ++nodeId)
{
for (auto idx : test[nodeId])
TEST_EQUAL(nodeId, mapping.GetNodeId(idx), ());
}
}
void TestSegmentRange(OsrmFtSegMapping const & mapping, RangeDataT const & test)
{
for (TOsrmNodeId nodeId = 0; nodeId < test.size(); ++nodeId)
{
// Input test range is { start, count } but we should pass [start, end).
auto const & r = test[nodeId];
TEST_EQUAL(mapping.GetSegmentsRange(nodeId), make_pair(r.first, r.first + r.second), ());
}
}
void TestMapping(InputDataT const & data,
NodeIdDataT const & nodeIds,
RangeDataT const & ranges)
{
platform::CountryFile country("TestCountry");
platform::LocalCountryFile localFile(GetPlatform().WritableDir(), country, 0 /* version */);
localFile.SyncWithDisk();
platform::tests_support::ScopedMwm mapMwm(
platform::GetFileName(localFile.GetCountryFile().GetName(), MapOptions::Map,
version::FOR_TESTING_TWO_COMPONENT_MWM1));
static string const ftSegsPath = GetPlatform().WritablePathForFile("test1.tmp");
platform::CountryIndexes::PreparePlaceOnDisk(localFile);
string const & featuresOffsetsTablePath =
CountryIndexes::GetPath(localFile, CountryIndexes::Index::Offsets);
MY_SCOPE_GUARD(ftSegsFileDeleter, bind(FileWriter::DeleteFileX, ftSegsPath));
MY_SCOPE_GUARD(featuresOffsetsTableFileDeleter,
bind(FileWriter::DeleteFileX, featuresOffsetsTablePath));
MY_SCOPE_GUARD(indexesDeleter, bind(&CountryIndexes::DeleteFromDisk, localFile));
{
// Prepare fake features offsets table for input data, because
// OsrmFtSegMapping::Load() loads routing index and creates
// additional helper indexes and some of them require
// FeatureOffsetsTable existence.
//
// As instantiation of FeatureOffsetsTable requires complete MWM
// file with features or at least already searialized
// FeatureOffsetsTable, the purpose of this code is to prepare a
// file with serialized FeatureOffsetsTable and feed it to
// OsrmFtSegMapping.
feature::FeaturesOffsetsTable::Builder tableBuilder;
for (auto const & segVector : data)
{
for (auto const & seg : segVector)
tableBuilder.PushOffset(seg.m_fid);
}
unique_ptr<feature::FeaturesOffsetsTable> table =
feature::FeaturesOffsetsTable::Build(tableBuilder);
table->Save(featuresOffsetsTablePath);
}
OsrmFtSegMappingBuilder builder;
for (TOsrmNodeId nodeId = 0; nodeId < data.size(); ++nodeId)
builder.Append(nodeId, data[nodeId]);
TestNodeId(builder, nodeIds);
TestSegmentRange(builder, ranges);
{
FilesContainerW w(ftSegsPath);
builder.Save(w);
}
{
FilesMappingContainer cont(ftSegsPath);
OsrmFtSegMapping mapping;
mapping.Load(cont, localFile);
mapping.Map(cont);
TestNodeId(mapping, nodeIds);
TestSegmentRange(mapping, ranges);
for (size_t i = 0; i < mapping.GetSegmentsCount(); ++i)
{
TOsrmNodeId const node = mapping.GetNodeId(i);
size_t count = 0;
mapping.ForEachFtSeg(node, [&] (OsrmMappingTypes::FtSeg const & s)
{
TEST_EQUAL(s, data[node][count++], ());
});
TEST_EQUAL(count, data[node].size(), ());
}
}
}
bool TestFtSeg(SegT const & s)
{
return (SegT(s.Store()) == s);
}
}
UNIT_TEST(FtSeg_Smoke)
{
SegT arr[] = {
{ 5, 1, 2 },
{ 666, 0, 17 },
};
for (size_t i = 0; i < ARRAY_SIZE(arr); ++i)
TEST(TestFtSeg(arr[i]), (arr[i].Store()));
}
UNIT_TEST(OsrmFtSegMappingBuilder_Smoke)
{
{
InputDataT data =
{
{ {0, 0, 1} },
{ {1, 0, 1} },
{ {2, 0, 1}, {3, 0, 1} },
{ {4, 0, 1} },
{ {5, 0, 1}, {6, 0, 1}, {7, 0, 1} },
{ {8, 0, 1}, {9, 0, 1}, {10, 0, 1}, {11, 0, 1} },
{ {12, 0, 1} }
};
NodeIdDataT nodeIds =
{
{ 0 },
{ 1 },
{ 2, 3 },
{ 4 },
{ 5, 6, 7 },
{ 8, 9, 10, 11 },
{ 12 }
};
RangeDataT ranges =
{
{ 0, 1 },
{ 1, 1 },
{ 2, 2 },
{ 4, 1 },
{ 5, 3 },
{ 8, 4 },
{ 12, 1 },
};
TestMapping(data, nodeIds, ranges);
}
{
InputDataT data =
{
{ {0, 0, 1} },
{ {1, 0, 1} },
{ {2, 0, 1} },
{ {3, 0, 1} },
{ {4, 0, 1} },
{ {5, 0, 1}, {6, 0, 1} },
{ {7, 0, 1} },
{ {8, 0, 1}, {9, 0, 1}, {10, 0, 1} },
{ {11, 0, 1}, {12, 0, 1}, {13, 0, 1} }
};
NodeIdDataT nodeIds =
{
{ 0 },
{ 1 },
{ 2 },
{ 3 },
{ 4 },
{ 5, 6 },
{ 7 },
{ 8, 9, 10 },
{ 11, 12, 13 }
};
RangeDataT ranges =
{
{ 0, 1 },
{ 1, 1 },
{ 2, 1 },
{ 3, 1 },
{ 4, 1 },
{ 5, 2 },
{ 7, 1 },
{ 8, 3 },
{ 11, 3 },
};
TestMapping(data, nodeIds, ranges);
}
{
InputDataT data =
{
{ {0, 0, 1}, {1, 2, 3} },
{ },
{ {3, 6, 7} },
{ {4, 8, 9}, {5, 10, 11} },
};
NodeIdDataT nodeIds =
{
{ 0, 1 },
{ },
{ 3 },
{ 4, 5 },
};
RangeDataT ranges =
{
{ 0, 2 },
{ 2, 1 },
{ 3, 1 },
{ 4, 2 },
};
TestMapping(data, nodeIds, ranges);
}
}
| apache-2.0 |
ruixie/onboard | frontend/kernel/src/main/resources/static/js/ng-modules/data/comment-websocket-service.js | 509 | /**
* Created by Dongdong Du on 12/24/2014.
*/
angular.module('data')
.service('commentWebSocketService', ['commentService', function(commentService) {
this.add = function(commentDTO) {
commentService.updateAllComments(commentDTO);
};
this.update = function(commentDTO) {
commentService.updateAllComments(commentDTO);
};
this.delete = function(commentDTO) {
commentService.updateAllComments(commentDTO);
};
}]);
| apache-2.0 |
prigaux/cas | support/cas-server-support-actions/src/main/java/org/apereo/cas/web/flow/ServiceAuthorizationCheck.java | 3032 | package org.apereo.cas.web.flow;
import lombok.AllArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apereo.cas.authentication.AuthenticationServiceSelectionPlan;
import org.apereo.cas.authentication.principal.Service;
import org.apereo.cas.authentication.principal.WebApplicationService;
import org.apereo.cas.services.RegisteredService;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.services.UnauthorizedServiceException;
import org.apereo.cas.web.support.WebUtils;
import org.springframework.webflow.action.AbstractAction;
import org.springframework.webflow.execution.Event;
import org.springframework.webflow.execution.RequestContext;
/**
* Performs a basic check if an authentication request for a provided service is authorized to proceed
* based on the registered services registry configuration (or lack thereof).
*
* @author Dmitriy Kopylenko
* @since 3.5.1
**/
@Slf4j
@AllArgsConstructor
public class ServiceAuthorizationCheck extends AbstractAction {
private final ServicesManager servicesManager;
private final AuthenticationServiceSelectionPlan authenticationRequestServiceSelectionStrategies;
@Override
protected Event doExecute(final RequestContext context) {
final WebApplicationService serviceInContext = WebUtils.getService(context);
final Service service = authenticationRequestServiceSelectionStrategies.resolveService(serviceInContext);
if (service == null) {
return success();
}
if (this.servicesManager.getAllServices().isEmpty()) {
final String msg = String.format("No service definitions are found in the service manager. "
+ "Service [%s] will not be automatically authorized to request authentication.", service.getId());
LOGGER.warn(msg);
throw new UnauthorizedServiceException(UnauthorizedServiceException.CODE_EMPTY_SVC_MGMR, msg);
}
final RegisteredService registeredService = this.servicesManager.findServiceBy(service);
if (registeredService == null) {
final String msg = String.format("Service Management: missing service. "
+ "Service [%s] is not found in service registry.", service.getId());
LOGGER.warn(msg);
throw new UnauthorizedServiceException(UnauthorizedServiceException.CODE_UNAUTHZ_SERVICE, msg);
}
if (!registeredService.getAccessStrategy().isServiceAccessAllowed()) {
final String msg = String.format("Service Management: Unauthorized Service Access. "
+ "Service [%s] is not allowed access via the service registry.", service.getId());
LOGGER.warn(msg);
WebUtils.putUnauthorizedRedirectUrlIntoFlowScope(context,
registeredService.getAccessStrategy().getUnauthorizedRedirectUrl());
throw new UnauthorizedServiceException(UnauthorizedServiceException.CODE_UNAUTHZ_SERVICE, msg);
}
return success();
}
}
| apache-2.0 |
nmldiegues/stibt | infinispan/server/memcached/src/test/scala/org/infinispan/server/memcached/MemcachedMainTest.scala | 1820 | /*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc. and/or its affiliates and other
* contributors as indicated by the @author tags. All rights reserved.
* See the copyright.txt in the distribution for a full listing of
* individual contributors.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.infinispan.server.memcached
import org.testng.annotations.Test
import org.infinispan.server.core.Main
import test.MemcachedTestingUtil._
import org.testng.Assert._
@Test(groups = Array("functional"), testName = "server.memcached.MemcachedMainTest")
class MemcachedMainTest {
def testMainNoConfigExposesStatistics() {
Main.boot(Array("-r", "memcached", "-p", "23345"))
try {
val memcachedClient = createMemcachedClient(60000, 23345)
val allStats = memcachedClient.getStats
assertEquals(allStats.size(), 1)
val stats = allStats.values.iterator.next
assertEquals(stats.get("cmd_set"), "0")
} finally {
Main.getServer.stop
Main.getCacheManager.stop()
}
}
} | apache-2.0 |
getyourguide/fbthrift | thrift/lib/hack/src/protocol/simplejson/TSimpleJSONProtocol.php | 17172 | <?hh
/**
* Copyright (c) 2006- Facebook
* Distributed under the Thrift Software License
*
* See accompanying file LICENSE or visit the Thrift site at:
* http://developers.facebook.com/thrift/
*
* @package thrift.protocol.simplejson
*/
/**
* Protocol for encoding/decoding simple json
*/
class TSimpleJSONProtocol extends TProtocol {
const VERSION_1 = 0x80010000;
private IThriftBufferedTransport $bufTrans;
private Vector<TSimpleJSONProtocolContext> $contexts;
public function __construct(TTransport $trans) {
$this->contexts = Vector {};
if (!($trans instanceof IThriftBufferedTransport)) {
$trans = new TBufferedTransport($trans);
}
$this->bufTrans = $trans;
parent::__construct($trans);
$this->contexts
->add(new TSimpleJSONProtocolContext($this->trans_, $this->bufTrans));
}
private function pushListWriteContext(): int {
return $this->pushWriteContext(
new TSimpleJSONProtocolListContext($this->trans_, $this->bufTrans),
);
}
private function pushMapWriteContext(): int {
return $this->pushWriteContext(
new TSimpleJSONProtocolMapContext($this->trans_, $this->bufTrans),
);
}
private function pushListReadContext(): void {
$this->pushReadContext(
new TSimpleJSONProtocolListContext($this->trans_, $this->bufTrans),
);
}
private function pushMapReadContext(): void {
$this->pushReadContext(
new TSimpleJSONProtocolMapContext($this->trans_, $this->bufTrans),
);
}
private function pushWriteContext(TSimpleJSONProtocolContext $ctx): int {
$this->contexts->add($ctx);
return $ctx->writeStart();
}
private function popWriteContext(): int {
$ctx = $this->contexts->pop();
return $ctx->writeEnd();
}
private function pushReadContext(TSimpleJSONProtocolContext $ctx): void {
$this->contexts->add($ctx);
$ctx->readStart();
}
private function popReadContext(): void {
$ctx = $this->contexts->pop();
$ctx->readEnd();
}
private function getContext(): TSimpleJSONProtocolContext {
return $this->contexts->at($this->contexts->count() - 1);
}
public function writeMessageBegin($name, $type, $seqid) {
return
$this->getContext()->writeSeparator() +
$this->pushListWriteContext() +
$this->writeI32(self::VERSION_1) +
$this->writeString($name) +
$this->writeI32($type) +
$this->writeI32($seqid);
}
public function writeMessageEnd() {
return $this->popWriteContext();
}
public function writeStructBegin($name) {
return
$this->getContext()->writeSeparator() + $this->pushMapWriteContext();
}
public function writeStructEnd() {
return $this->popWriteContext();
}
public function writeFieldBegin($fieldName, $fieldType, $fieldId) {
return $this->writeString($fieldName);
}
public function writeFieldEnd() {
return 0;
}
public function writeFieldStop() {
return 0;
}
public function writeMapBegin($keyType, $valType, $size) {
return
$this->getContext()->writeSeparator() + $this->pushMapWriteContext();
}
public function writeMapEnd() {
return $this->popWriteContext();
}
public function writeListBegin($elemType, $size) {
return
$this->getContext()->writeSeparator() + $this->pushListWriteContext();
}
public function writeListEnd() {
return $this->popWriteContext();
}
public function writeSetBegin($elemType, $size) {
return
$this->getContext()->writeSeparator() + $this->pushListWriteContext();
}
public function writeSetEnd() {
return $this->popWriteContext();
}
public function writeBool($value) {
$x = $this->getContext()->writeSeparator();
if ($value) {
$this->trans_->write('true');
$x += 4;
} else {
$this->trans_->write('false');
$x += 5;
}
return $x;
}
public function writeByte($value) {
return $this->writeNum((int) $value);
}
public function writeI16($value) {
return $this->writeNum((int) $value);
}
public function writeI32($value) {
return $this->writeNum((int) $value);
}
public function writeI64($value) {
return $this->writeNum((int) $value);
}
public function writeDouble($value) {
return $this->writeNum((float) $value);
}
public function writeFloat($value) {
return $this->writeNum((float) $value);
}
private function writeNum($value) {
$ctx = $this->getContext();
$ret = $ctx->writeSeparator();
if ($ctx->escapeNum()) {
$value = (string) $value;
}
$enc = json_encode($value);
$this->trans_->write($enc);
return $ret + strlen($enc);
}
public function writeString($value) {
$ctx = $this->getContext();
$ret = $ctx->writeSeparator();
$value = (string) $value;
$sb = new StringBuffer();
$sb->append('"');
$len = strlen($value);
for ($i = 0; $i < $len; $i++) {
$c = $value[$i];
$ord = ord($c);
switch ($ord) {
case 8:
$sb->append('\b');
break;
case 9:
$sb->append('\t');
break;
case 10:
$sb->append('\n');
break;
case 12:
$sb->append('\f');
break;
case 13:
$sb->append('\r');
break;
case 34:
// "
case 92:
// \
$sb->append('\\');
$sb->append($c);
break;
default:
if ($ord < 32 || $ord > 126) {
$sb->append('\\u00');
$sb->append(bin2hex($c));
} else {
$sb->append($c);
}
break;
}
}
$sb->append('"');
$enc = $sb->detach();
$this->trans_->write($enc);
return $ret + strlen($enc);
}
public function readMessageBegin(&$name, &$type, &$seqid) {
throw new TProtocolException(
'Reading with TSimpleJSONProtocol is not supported. '.
'Use readFromJSON() on your struct',
);
}
public function readMessageEnd() {
throw new TProtocolException(
'Reading with TSimpleJSONProtocol is not supported. '.
'Use readFromJSON() on your struct',
);
}
public function readStructBegin(&$name) {
$this->getContext()->readSeparator();
$this->skipWhitespace();
$this->pushMapReadContext();
}
public function readStructEnd() {
$this->popReadContext();
}
public function readFieldBegin(&$name, &$fieldType, &$fieldId) {
$fieldId = null;
$ctx = $this->getContext();
$name = null;
while ($name === null) {
if ($ctx->readContextOver()) {
$fieldType = TType::STOP;
break;
} else {
$ctx->readSeparator();
$this->skipWhitespace();
$name = $this->readJSONString()[0];
// We need to guess the type of the value, in case the name is bogus or we are in a skip method up the stack
$offset = $this->skipWhitespace(true);
$this->expectChar(':', true, $offset);
$offset += 1 + $this->skipWhitespace(true, $offset + 1);
$c = $this->bufTrans->peek(1, $offset);
if ($c === 'n' && $this->bufTrans->peek(4, $offset) === 'null') {
// We actually want to skip this field, but there isn't an appropriate
// TType to send back. So instead, we will silently skip
$ctx->readSeparator();
$this->skipWhitespace();
$this->trans_->readAll(4);
$name = null;
continue;
} else {
$fieldType = $this->guessFieldTypeBasedOnByte($c);
}
}
}
}
public function readFieldEnd() {
// Do nothing
}
public function readMapBegin(&$keyType, &$valType, &$size) {
$size = null;
$this->getContext()->readSeparator();
$this->skipWhitespace();
$this->pushMapReadContext();
if ($this->readMapHasNext()) {
// We need to guess the type of the keys/values, in case we are in a skip method up the stack
$keyType = TType::STRING;
$this->skipWhitespace(); // This is not a peek, since we can do this safely again
$offset = $this->readJSONString(true)[1];
$offset += $this->skipWhitespace(true, $offset);
$this->expectChar(':', true, $offset);
$offset += 1 + $this->skipWhitespace(true, $offset + 1);
$c = $this->bufTrans->peek(1, $offset);
$valType = $this->guessFieldTypeBasedOnByte($c);
}
}
public function readMapHasNext(): bool {
return !$this->getContext()->readContextOver();
}
public function readMapEnd() {
$this->popReadContext();
}
public function readListBegin(&$elemType, &$size) {
$size = null;
$this->getContext()->readSeparator();
$this->skipWhitespace();
$this->pushListReadContext();
if ($this->readListHasNext()) {
// We need to guess the type of the values, in case we are in a skip method up the stack
$this->skipWhitespace(); // This is not a peek, since we can do this safely again
$c = $this->bufTrans->peek(1);
$elemType = $this->guessFieldTypeBasedOnByte($c);
}
}
public function readListHasNext(): bool {
return !$this->getContext()->readContextOver();
}
public function readListEnd() {
$this->popReadContext();
}
public function readSetBegin(&$elemType, &$size) {
$size = null;
$this->getContext()->readSeparator();
$this->skipWhitespace();
$this->pushListReadContext();
if ($this->readSetHasNext()) {
// We need to guess the type of the values, in case we are in a skip method up the stack
$this->skipWhitespace(); // This is not a peek, since we can do this safely again
$c = $this->bufTrans->peek(1);
$elemType = $this->guessFieldTypeBasedOnByte($c);
}
}
public function readSetHasNext(): bool {
return !$this->getContext()->readContextOver();
}
public function readSetEnd() {
$this->popReadContext();
}
public function readBool(&$value) {
$this->getContext()->readSeparator();
$this->skipWhitespace();
$c = $this->trans_->readAll(1);
$target = null;
switch ($c) {
case 't':
$value = true;
$target = 'rue';
break;
case 'f':
$value = false;
$target = 'alse';
break;
default:
throw new TProtocolException(
'TSimpleJSONProtocol: Expected t or f, encountered 0x'.
bin2hex($c),
);
}
for ($i = 0; $i < strlen($target); $i++) {
$this->expectChar($target[$i]);
}
}
private function readInteger(?int $min, ?int $max): int {
$val = intval($this->readNumStr());
if (($min !== null && $max !== null) && ($val < $min || $val > $max)) {
throw new TProtocolException(
'TProtocolException: value '.
$val.
' is outside the expected bounds',
);
}
return $val;
}
private function readNumStr(): string {
$ctx = $this->getContext();
if ($ctx->escapeNum()) {
$this->expectChar('"');
}
$count = 0;
$reading = true;
while ($reading) {
$c = $this->bufTrans->peek(1, $count);
switch ($c) {
case '+':
case '-':
case '.':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case 'E':
case 'e':
$count++;
break;
default:
$reading = false;
break;
}
}
$str = $this->trans_->readAll($count);
if (!preg_match(
'/^[+-]?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?$/',
$str,
)) {
throw new TProtocolException(
'TSimpleJSONProtocol: Invalid json number '.$str,
);
}
if ($ctx->escapeNum()) {
$this->expectChar('"');
}
return $str;
}
public function readByte(&$value) {
$this->getContext()->readSeparator();
$this->skipWhitespace();
$value = $this->readInteger(-0x80, 0x7f);
}
public function readI16(&$value) {
$this->getContext()->readSeparator();
$this->skipWhitespace();
$value = $this->readInteger(-0x8000, 0x7fff);
}
public function readI32(&$value) {
$this->getContext()->readSeparator();
$this->skipWhitespace();
$value = $this->readInteger(-0x80000000, 0x7fffffff);
}
public function readI64(&$value) {
$this->getContext()->readSeparator();
$this->skipWhitespace();
$value = $this->readInteger(null, null);
}
public function readDouble(&$value) {
$this->getContext()->readSeparator();
$this->skipWhitespace();
$value = doubleval($this->readNumStr());
}
public function readFloat(&$value) {
$this->getContext()->readSeparator();
$this->skipWhitespace();
$value = floatval($this->readNumStr());
}
public function readString(&$value) {
$this->getContext()->readSeparator();
$this->skipWhitespace();
$value = $this->readJSONString()[0];
}
private function readJSONString(
bool $peek = false,
int $start = 0,
): Pair<string, int> {
if (!$peek) {
$start = 0;
}
$this->expectChar('"', $peek, $start);
$count = $peek ? 1 : 0;
$sb = new StringBuffer();
$reading = true;
while ($reading) {
$c = $this->bufTrans->peek(1, $start + $count);
switch ($c) {
case '"':
$reading = false;
break;
case '\\':
$count++;
$c = $this->bufTrans->peek(1, $start + $count);
switch ($c) {
case '\\':
$count++;
$sb->append('\\');
break;
case '"':
$count++;
$sb->append('"');
break;
case 'b':
$count++;
$sb->append(chr(0x08));
break;
case '/':
$count++;
$sb->append('/');
break;
case 'f':
$count++;
$sb->append("\f");
break;
case 'n':
$count++;
$sb->append("\n");
break;
case 'r':
$count++;
$sb->append("\r");
break;
case 't':
$count++;
$sb->append("\t");
break;
case 'u':
$count++;
$this->expectChar('0', true, $start + $count);
$this->expectChar('0', true, $start + $count + 1);
$count += 2;
$sb->append(
hex2bin($this->bufTrans->peek(2, $start + $count)),
);
$count += 2;
break;
default:
throw new TProtocolException(
'TSimpleJSONProtocol: Expected Control Character, found 0x'.
bin2hex($c),
);
}
break;
case '':
// end of buffer, this string is unclosed
$reading = false;
break;
default:
$count++;
$sb->append($c);
break;
}
}
if (!$peek) {
$this->trans_->readAll($count);
}
$this->expectChar('"', $peek, $start + $count);
return Pair {$sb->detach(), $count + 1};
}
private function skipWhitespace(bool $peek = false, int $start = 0): int {
if (!$peek) {
$start = 0;
}
$count = 0;
$reading = true;
while ($reading) {
$byte = $this->bufTrans->peek(1, $count + $start);
switch ($byte) {
case ' ':
case "\t":
case "\n":
case "\r":
$count++;
break;
default:
$reading = false;
break;
}
}
if (!$peek) {
$this->trans_->readAll($count);
}
return $count;
}
private function expectChar(
string $char,
bool $peek = false,
int $start = 0,
): void {
if (!$peek) {
$start = 0;
}
$c = null;
if ($peek) {
$c = $this->bufTrans->peek(1, $start);
} else {
$c = $this->trans_->readAll(1);
}
if ($c !== $char) {
throw new TProtocolException(
'TSimpleJSONProtocol: Expected '.
$char.
', but encountered 0x'.
bin2hex($c),
);
}
}
private function guessFieldTypeBasedOnByte(string $byte): ?int {
switch ($byte) {
case '{':
return TType::STRUCT;
case '[':
return TType::LST;
case 't':
case 'f':
return TType::BOOL;
case '-':
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
// These technically aren't allowed to start JSON floats, but are here for backwards compatibility
case '+':
case '.':
return TType::DOUBLE;
case '"':
return TType::STRING;
case ']':
case '}':
// We can get here with empty lists/maps, returning a dummy value
return TType::STOP;
}
throw new TProtocolException(
'TSimpleJSONProtocol: Unable to guess TType for character 0x'.
bin2hex($byte),
);
}
}
| apache-2.0 |
batfish/batfish | projects/batfish/src/test/java/org/batfish/vendor/check_point_management/parsing/parboiled/UhDportAstNodeTest.java | 1204 | package org.batfish.vendor.check_point_management.parsing.parboiled;
import static org.hamcrest.Matchers.equalTo;
import static org.junit.Assert.assertThat;
import com.google.common.testing.EqualsTester;
import org.apache.commons.lang3.SerializationUtils;
import org.junit.Test;
/** Test of {@link UhDportAstNode}. */
public final class UhDportAstNodeTest {
@Test
public void testJavaSerialization() {
UhDportAstNode obj = new UhDportAstNode("foo", EqualsAstNode.instance(), Uint16AstNode.of(1));
assertThat(SerializationUtils.clone(obj), equalTo(obj));
}
@Test
public void testEquals() {
UhDportAstNode obj = new UhDportAstNode("foo", EqualsAstNode.instance(), Uint16AstNode.of(1));
new EqualsTester()
.addEqualityGroup(
obj, new UhDportAstNode("foo", EqualsAstNode.instance(), Uint16AstNode.of(1)))
.addEqualityGroup(new UhDportAstNode("bar", EqualsAstNode.instance(), Uint16AstNode.of(1)))
.addEqualityGroup(
new UhDportAstNode("foo", LessThanOrEqualsAstNode.instance(), Uint16AstNode.of(1)))
.addEqualityGroup(new UhDportAstNode("foo", EqualsAstNode.instance(), Uint16AstNode.of(2)))
.testEquals();
}
}
| apache-2.0 |
olegarx/restler | restler-core/src/main/java/org/restler/http/security/SecuritySession.java | 1872 | package org.restler.http.security;
import org.restler.http.security.authentication.AuthenticationContext;
import org.restler.http.security.authentication.AuthenticationStrategy;
import org.restler.http.security.authorization.AuthorizationStrategy;
/**
* Mutable (but thread-safe) class that obtains authenticationToken from authorization strategy, stores it and passes to authentication strategy.
*/
public class SecuritySession implements AuthenticationContext {
private final AuthorizationStrategy authorizationStrategy;
private final AuthenticationStrategy authenticationStrategy;
private final boolean autoAuthorize;
private Object authenticationToken;
private boolean authorized;
public SecuritySession(AuthorizationStrategy authorizationStrategy, AuthenticationStrategy authenticationStrategy, boolean autoAuthorize) {
this.authenticationStrategy = authenticationStrategy;
this.authorizationStrategy = authorizationStrategy;
this.autoAuthorize = autoAuthorize;
}
@Override
public synchronized Object getAuthenticationToken() {
if (authorized) {
return authenticationToken;
} else {
if (autoAuthorize) {
authorize();
return authenticationToken;
} else {
throw new IllegalStateException("Unauthorized session can't provide authentication token.");
}
}
}
public synchronized void authorize() {
if (authorizationStrategy == null) {
throw new IllegalStateException("Can't authorize session as authorization strategy is not set.");
}
authenticationToken = authorizationStrategy.authorize();
authorized = true;
}
public AuthenticationStrategy getAuthenticationStrategy() {
return authenticationStrategy;
}
}
| apache-2.0 |
RyanTech/CuiTrip | TripApp/src/main/java/com/cuitrip/model/RecommendOutData.java | 1357 | package com.cuitrip.model;
import java.util.List;
public class RecommendOutData {
// "total": "12", //总的结果数
// "num": "2", //当前请求返回结果数
// "start": "10", // 开始位置
// "lists": [{
// "sid": "231", //服务ID
// "serviceName": "阿亮带你看妈祖绕境", //旅程名称
// "serviceAddress": "台湾彰化县", //旅程所在地
// "headPic": "http://alicdn.aliyun.com/pic1.jpg", //发现者头像
// "userNick": "阿亮", // 发现者昵称
// "servicePicUrl": "http://******", // 发现者昵称
// }]
private int total;
private int num;
private int start;
private List<RecommendItem> lists;
public int getTotal() {
return total;
}
public void setTotal(int total) {
this.total = total;
}
public int getNum() {
return num;
}
public void setNum(int num) {
this.num = num;
}
public int getStart() {
return start;
}
public void setStart(int start) {
this.start = start;
}
public List<RecommendItem> getLists() {
return lists;
}
public void setLists(List<RecommendItem> lists) {
this.lists = lists;
}
}
| apache-2.0 |
xzturn/tensorflow | tensorflow/python/grappler/tf_optimizer_wrapper.cc | 4550 | /* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include <memory>
#include <stdexcept>
#include <string>
#include <unordered_map>
#include "include/pybind11/pybind11.h"
#include "tensorflow/core/common_runtime/device.h"
#include "tensorflow/core/common_runtime/device_factory.h"
#include "tensorflow/core/framework/device_attributes.pb.h"
#include "tensorflow/core/framework/device_base.h"
#include "tensorflow/core/framework/graph.pb.h"
#include "tensorflow/core/framework/graph_def_util.h"
#include "tensorflow/core/grappler/clusters/cluster.h"
#include "tensorflow/core/grappler/clusters/utils.h"
#include "tensorflow/core/grappler/grappler_item.h"
#include "tensorflow/core/grappler/grappler_item_builder.h"
#include "tensorflow/core/grappler/optimizers/meta_optimizer.h"
#include "tensorflow/core/protobuf/config.pb.h"
#include "tensorflow/core/protobuf/device_properties.pb.h"
#include "tensorflow/core/protobuf/meta_graph.pb.h"
#include "tensorflow/core/public/session_options.h"
#include "tensorflow/python/lib/core/pybind11_status.h"
namespace py = pybind11;
void DetectDevices(
std::unordered_map<std::string, tensorflow::DeviceProperties>* device_map) {
tensorflow::SessionOptions options;
std::vector<std::unique_ptr<tensorflow::Device>> devices;
if (!tensorflow::DeviceFactory::AddDevices(options, "", &devices).ok()) {
return;
}
for (const std::unique_ptr<tensorflow::Device>& device : devices) {
tensorflow::DeviceProperties& prop = (*device_map)[device->name()];
prop = tensorflow::grappler::GetDeviceInfo(device->parsed_name());
// Overwrite the memory limit since users might have requested to use only a
// fraction of the available device memory.
const tensorflow::DeviceAttributes& attr = device->attributes();
prop.set_memory_size(attr.memory_limit());
}
}
PYBIND11_MODULE(_pywrap_tf_optimizer, m) {
m.def(
"TF_OptimizeGraph",
[](tensorflow::grappler::Cluster* cluster,
const py::bytes& serialized_config_proto,
const py::bytes& serialized_metagraph, bool verbose,
const std::string& graph_id,
bool strip_default_attributes) -> py::bytes {
tensorflow::ConfigProto config_proto;
if (!config_proto.ParseFromString(serialized_config_proto)) {
throw std::invalid_argument(
"The ConfigProto could not be parsed as a valid protocol buffer");
}
tensorflow::MetaGraphDef metagraph;
if (!metagraph.ParseFromString(serialized_metagraph)) {
throw std::invalid_argument(
"The MetaGraphDef could not be parsed as a valid protocol "
"buffer");
}
tensorflow::grappler::ItemConfig item_config;
// This disables graph optimizations in the older graph optimizer, which
// tend to overlap / be redundant with those in Grappler.
item_config.apply_optimizations = false;
item_config.ignore_user_placement = false;
std::unique_ptr<tensorflow::grappler::GrapplerItem> grappler_item =
tensorflow::grappler::GrapplerItemFromMetaGraphDef(
graph_id, metagraph, item_config);
if (!grappler_item) {
throw std::invalid_argument(
"Failed to import metagraph, check error log for more info.");
}
tensorflow::DeviceBase* cpu_device = nullptr;
tensorflow::GraphDef out_graph;
tensorflow::grappler::MetaOptimizer optimizer(cpu_device, config_proto);
MaybeRaiseRegisteredFromStatus(
optimizer.Optimize(cluster, *grappler_item, &out_graph));
if (strip_default_attributes) {
tensorflow::StripDefaultAttributes(*tensorflow::OpRegistry::Global(),
out_graph.mutable_node());
}
if (verbose) {
optimizer.PrintResult();
}
return out_graph.SerializeAsString();
});
}
| apache-2.0 |
fholm/IronJS | Src/Tests/ietestcenter/chapter15/15.2/15.2.3/15.2.3.5/15.2.3.5-4-182.js | 2544 | /// Copyright (c) 2009 Microsoft Corporation
///
/// Redistribution and use in source and binary forms, with or without modification, are permitted provided
/// that the following conditions are met:
/// * Redistributions of source code must retain the above copyright notice, this list of conditions and
/// the following disclaimer.
/// * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and
/// the following disclaimer in the documentation and/or other materials provided with the distribution.
/// * Neither the name of Microsoft nor the names of its contributors may be used to
/// endorse or promote products derived from this software without specific prior written permission.
///
/// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
/// IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
/// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
/// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
/// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
/// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
/// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
/// ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
ES5Harness.registerTest({
id: "15.2.3.5-4-182",
path: "TestCases/chapter15/15.2/15.2.3/15.2.3.5/15.2.3.5-4-182.js",
description: "Object.create - 'writable' property of one property in 'Properties' is own data property that overrides an inherited data property (8.10.5 step 6.a)",
test: function testcase() {
var proto = {
writable: false
};
var ConstructFun = function () { };
ConstructFun.prototype = proto;
var descObj = new ConstructFun();
descObj.writable = true;
var newObj = Object.create({}, {
prop: descObj
});
var beforeWrite = (newObj.hasOwnProperty("prop") && typeof (newObj.prop) === "undefined");
newObj.prop = "isWritable";
var afterWrite = (newObj.prop === "isWritable");
return beforeWrite === true && afterWrite === true;
},
precondition: function prereq() {
return fnExists(Object.create);
}
});
| apache-2.0 |
dump247/aws-sdk-java | aws-java-sdk-cloudsearch/src/main/java/com/amazonaws/services/cloudsearch/model/StemmingOptionsStatus.java | 7064 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.cloudsearch.model;
import java.io.Serializable;
/**
* <p>
* The stemming options configured for this search domain and the current status of those options.
* </p>
*/
@Deprecated
public class StemmingOptionsStatus implements Serializable {
/**
* Maps terms to their stems, serialized as a JSON document. The document
* has a single object with one property "stems" whose value is an object
* mapping terms to their stems. The maximum size of a stemming document
* is 500 KB. Example: <code>{ "stems": {"people": "person", "walking":
* "walk"} }</code>
*/
private String options;
/**
* The status of an option, including when it was last updated and
* whether it is actively in use for searches.
*/
private OptionStatus status;
/**
* Maps terms to their stems, serialized as a JSON document. The document
* has a single object with one property "stems" whose value is an object
* mapping terms to their stems. The maximum size of a stemming document
* is 500 KB. Example: <code>{ "stems": {"people": "person", "walking":
* "walk"} }</code>
*
* @return Maps terms to their stems, serialized as a JSON document. The document
* has a single object with one property "stems" whose value is an object
* mapping terms to their stems. The maximum size of a stemming document
* is 500 KB. Example: <code>{ "stems": {"people": "person", "walking":
* "walk"} }</code>
*/
public String getOptions() {
return options;
}
/**
* Maps terms to their stems, serialized as a JSON document. The document
* has a single object with one property "stems" whose value is an object
* mapping terms to their stems. The maximum size of a stemming document
* is 500 KB. Example: <code>{ "stems": {"people": "person", "walking":
* "walk"} }</code>
*
* @param options Maps terms to their stems, serialized as a JSON document. The document
* has a single object with one property "stems" whose value is an object
* mapping terms to their stems. The maximum size of a stemming document
* is 500 KB. Example: <code>{ "stems": {"people": "person", "walking":
* "walk"} }</code>
*/
public void setOptions(String options) {
this.options = options;
}
/**
* Maps terms to their stems, serialized as a JSON document. The document
* has a single object with one property "stems" whose value is an object
* mapping terms to their stems. The maximum size of a stemming document
* is 500 KB. Example: <code>{ "stems": {"people": "person", "walking":
* "walk"} }</code>
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param options Maps terms to their stems, serialized as a JSON document. The document
* has a single object with one property "stems" whose value is an object
* mapping terms to their stems. The maximum size of a stemming document
* is 500 KB. Example: <code>{ "stems": {"people": "person", "walking":
* "walk"} }</code>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public StemmingOptionsStatus withOptions(String options) {
this.options = options;
return this;
}
/**
* The status of an option, including when it was last updated and
* whether it is actively in use for searches.
*
* @return The status of an option, including when it was last updated and
* whether it is actively in use for searches.
*/
public OptionStatus getStatus() {
return status;
}
/**
* The status of an option, including when it was last updated and
* whether it is actively in use for searches.
*
* @param status The status of an option, including when it was last updated and
* whether it is actively in use for searches.
*/
public void setStatus(OptionStatus status) {
this.status = status;
}
/**
* The status of an option, including when it was last updated and
* whether it is actively in use for searches.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param status The status of an option, including when it was last updated and
* whether it is actively in use for searches.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public StemmingOptionsStatus withStatus(OptionStatus status) {
this.status = status;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getOptions() != null) sb.append("Options: " + getOptions() + ",");
if (getStatus() != null) sb.append("Status: " + getStatus() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getOptions() == null) ? 0 : getOptions().hashCode());
hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof StemmingOptionsStatus == false) return false;
StemmingOptionsStatus other = (StemmingOptionsStatus)obj;
if (other.getOptions() == null ^ this.getOptions() == null) return false;
if (other.getOptions() != null && other.getOptions().equals(this.getOptions()) == false) return false;
if (other.getStatus() == null ^ this.getStatus() == null) return false;
if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false;
return true;
}
}
| apache-2.0 |
faguirre1/go-swagger | spec/schema.go | 14232 | // Copyright 2015 go-swagger maintainers
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package spec
import (
"encoding/json"
"fmt"
"net/url"
"strings"
"github.com/go-swagger/go-swagger/jsonpointer"
"github.com/go-swagger/go-swagger/swag"
)
// BooleanProperty creates a boolean property
func BooleanProperty() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"boolean"}}}
}
// BoolProperty creates a boolean property
func BoolProperty() *Schema { return BooleanProperty() }
// StringProperty creates a string property
func StringProperty() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"string"}}}
}
// CharProperty creates a string property
func CharProperty() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"string"}}}
}
// Float64Property creates a float64/double property
func Float64Property() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"number"}, Format: "double"}}
}
// Float32Property creates a float32/float property
func Float32Property() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"number"}, Format: "float"}}
}
// Int8Property creates an int8 property
func Int8Property() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"integer"}, Format: "int8"}}
}
// Int16Property creates an int16 property
func Int16Property() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"integer"}, Format: "int16"}}
}
// Int32Property creates an int32 property
func Int32Property() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"integer"}, Format: "int32"}}
}
// Int64Property creates an int64 property
func Int64Property() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"integer"}, Format: "int64"}}
}
// StrFmtProperty creates a property for the named string format
func StrFmtProperty(format string) *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"string"}, Format: format}}
}
// DateProperty creates a date property
func DateProperty() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"string"}, Format: "date"}}
}
// DateTimeProperty creates a date time property
func DateTimeProperty() *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"string"}, Format: "date-time"}}
}
// MapProperty creates a map property
func MapProperty(property *Schema) *Schema {
return &Schema{schemaProps: schemaProps{Type: []string{"object"}, AdditionalProperties: &SchemaOrBool{Allows: true, Schema: property}}}
}
// RefProperty creates a ref property
func RefProperty(name string) *Schema {
return &Schema{schemaProps: schemaProps{Ref: MustCreateRef(name)}}
}
// ArrayProperty creates an array property
func ArrayProperty(items *Schema) *Schema {
if items == nil {
return &Schema{schemaProps: schemaProps{Type: []string{"array"}}}
}
return &Schema{schemaProps: schemaProps{Items: &SchemaOrArray{Schema: items}, Type: []string{"array"}}}
}
// SchemaURL represents a schema url
type SchemaURL string
// MarshalJSON marshal this to JSON
func (r SchemaURL) MarshalJSON() ([]byte, error) {
if r == "" {
return []byte("{}"), nil
}
v := map[string]interface{}{"$schema": string(r)}
return json.Marshal(v)
}
// UnmarshalJSON unmarshal this from JSON
func (r *SchemaURL) UnmarshalJSON(data []byte) error {
var v map[string]interface{}
if err := json.Unmarshal(data, &v); err != nil {
return err
}
if v == nil {
return nil
}
if vv, ok := v["$schema"]; ok {
if str, ok := vv.(string); ok {
u, err := url.Parse(str)
if err != nil {
return err
}
*r = SchemaURL(u.String())
}
}
return nil
}
// type extraSchemaProps map[string]interface{}
// // JSONSchema represents a structure that is a json schema draft 04
// type JSONSchema struct {
// schemaProps
// extraSchemaProps
// }
// // MarshalJSON marshal this to JSON
// func (s JSONSchema) MarshalJSON() ([]byte, error) {
// b1, err := json.Marshal(s.schemaProps)
// if err != nil {
// return nil, err
// }
// b2, err := s.Ref.MarshalJSON()
// if err != nil {
// return nil, err
// }
// b3, err := s.Schema.MarshalJSON()
// if err != nil {
// return nil, err
// }
// b4, err := json.Marshal(s.extraSchemaProps)
// if err != nil {
// return nil, err
// }
// return util.ConcatJSON(b1, b2, b3, b4), nil
// }
// // UnmarshalJSON marshal this from JSON
// func (s *JSONSchema) UnmarshalJSON(data []byte) error {
// var sch JSONSchema
// if err := json.Unmarshal(data, &sch.schemaProps); err != nil {
// return err
// }
// if err := json.Unmarshal(data, &sch.Ref); err != nil {
// return err
// }
// if err := json.Unmarshal(data, &sch.Schema); err != nil {
// return err
// }
// if err := json.Unmarshal(data, &sch.extraSchemaProps); err != nil {
// return err
// }
// *s = sch
// return nil
// }
type schemaProps struct {
ID string `json:"id,omitempty"`
Ref Ref `json:"-,omitempty"`
Schema SchemaURL `json:"-,omitempty"`
Description string `json:"description,omitempty"`
Type StringOrArray `json:"type,omitempty"`
Format string `json:"format,omitempty"`
Title string `json:"title,omitempty"`
Default interface{} `json:"default,omitempty"`
Maximum *float64 `json:"maximum,omitempty"`
ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"`
Minimum *float64 `json:"minimum,omitempty"`
ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"`
MaxLength *int64 `json:"maxLength,omitempty"`
MinLength *int64 `json:"minLength,omitempty"`
Pattern string `json:"pattern,omitempty"`
MaxItems *int64 `json:"maxItems,omitempty"`
MinItems *int64 `json:"minItems,omitempty"`
UniqueItems bool `json:"uniqueItems,omitempty"`
MultipleOf *float64 `json:"multipleOf,omitempty"`
Enum []interface{} `json:"enum,omitempty"`
MaxProperties *int64 `json:"maxProperties,omitempty"`
MinProperties *int64 `json:"minProperties,omitempty"`
Required []string `json:"required,omitempty"`
Items *SchemaOrArray `json:"items,omitempty"`
AllOf []Schema `json:"allOf,omitempty"`
OneOf []Schema `json:"oneOf,omitempty"`
AnyOf []Schema `json:"anyOf,omitempty"`
Not *Schema `json:"not,omitempty"`
Properties map[string]Schema `json:"properties,omitempty"`
AdditionalProperties *SchemaOrBool `json:"additionalProperties,omitempty"`
PatternProperties map[string]Schema `json:"patternProperties,omitempty"`
Dependencies Dependencies `json:"dependencies,omitempty"`
AdditionalItems *SchemaOrBool `json:"additionalItems,omitempty"`
Definitions Definitions `json:"definitions,omitempty"`
}
type swaggerSchemaProps struct {
Discriminator string `json:"discriminator,omitempty"`
ReadOnly bool `json:"readOnly,omitempty"`
XML *XMLObject `json:"xml,omitempty"`
ExternalDocs *ExternalDocumentation `json:"externalDocs,omitempty"`
Example interface{} `json:"example,omitempty"`
}
// Schema the schema object allows the definition of input and output data types.
// These types can be objects, but also primitives and arrays.
// This object is based on the [JSON Schema Specification Draft 4](http://json-schema.org/)
// and uses a predefined subset of it.
// On top of this subset, there are extensions provided by this specification to allow for more complete documentation.
//
// For more information: http://goo.gl/8us55a#schemaObject
type Schema struct {
vendorExtensible
schemaProps
swaggerSchemaProps
ExtraProps map[string]interface{} `json:"-"`
}
// JSONLookup implements an interface to customize json pointer lookup
func (s Schema) JSONLookup(token string) (interface{}, error) {
if ex, ok := s.Extensions[token]; ok {
return &ex, nil
}
if ex, ok := s.ExtraProps[token]; ok {
return &ex, nil
}
r, _, err := jsonpointer.GetForToken(s.schemaProps, token)
if r != nil || err != nil {
return r, err
}
r, _, err = jsonpointer.GetForToken(s.swaggerSchemaProps, token)
return r, err
}
// WithProperties sets the properties for this schema
func (s *Schema) WithProperties(schemas map[string]Schema) *Schema {
s.Properties = schemas
return s
}
// SetProperty sets a property on this schema
func (s *Schema) SetProperty(name string, schema Schema) *Schema {
if s.Properties == nil {
s.Properties = make(map[string]Schema)
}
s.Properties[name] = schema
return s
}
// WithAllOf sets the all of property
func (s *Schema) WithAllOf(schemas ...Schema) *Schema {
s.AllOf = schemas
return s
}
// WithMaxProperties sets the max number of properties an object can have
func (s *Schema) WithMaxProperties(max int64) *Schema {
s.MaxProperties = &max
return s
}
// WithMinProperties sets the min number of properties an object must have
func (s *Schema) WithMinProperties(min int64) *Schema {
s.MinProperties = &min
return s
}
// Typed sets the type of this schema for a single value item
func (s *Schema) Typed(tpe, format string) *Schema {
s.Type = []string{tpe}
s.Format = format
return s
}
// AddType adds a type with potential format to the types for this schema
func (s *Schema) AddType(tpe, format string) *Schema {
s.Type = append(s.Type, tpe)
if format != "" {
s.Format = format
}
return s
}
// CollectionOf a fluent builder method for an array parameter
func (s *Schema) CollectionOf(items Schema) *Schema {
s.Type = []string{"array"}
s.Items = &SchemaOrArray{Schema: &items}
return s
}
// WithDefault sets the default value on this parameter
func (s *Schema) WithDefault(defaultValue interface{}) *Schema {
s.Default = defaultValue
return s
}
// WithRequired flags this parameter as required
func (s *Schema) WithRequired(items ...string) *Schema {
s.Required = items
return s
}
// WithMaxLength sets a max length value
func (s *Schema) WithMaxLength(max int64) *Schema {
s.MaxLength = &max
return s
}
// WithMinLength sets a min length value
func (s *Schema) WithMinLength(min int64) *Schema {
s.MinLength = &min
return s
}
// WithPattern sets a pattern value
func (s *Schema) WithPattern(pattern string) *Schema {
s.Pattern = pattern
return s
}
// WithMultipleOf sets a multiple of value
func (s *Schema) WithMultipleOf(number float64) *Schema {
s.MultipleOf = &number
return s
}
// WithMaximum sets a maximum number value
func (s *Schema) WithMaximum(max float64, exclusive bool) *Schema {
s.Maximum = &max
s.ExclusiveMaximum = exclusive
return s
}
// WithMinimum sets a minimum number value
func (s *Schema) WithMinimum(min float64, exclusive bool) *Schema {
s.Minimum = &min
s.ExclusiveMinimum = exclusive
return s
}
// WithEnum sets a the enum values (replace)
func (s *Schema) WithEnum(values ...interface{}) *Schema {
s.Enum = append([]interface{}{}, values...)
return s
}
// WithMaxItems sets the max items
func (s *Schema) WithMaxItems(size int64) *Schema {
s.MaxItems = &size
return s
}
// WithMinItems sets the min items
func (s *Schema) WithMinItems(size int64) *Schema {
s.MinItems = &size
return s
}
// UniqueValues dictates that this array can only have unique items
func (s *Schema) UniqueValues() *Schema {
s.UniqueItems = true
return s
}
// AllowDuplicates this array can have duplicates
func (s *Schema) AllowDuplicates() *Schema {
s.UniqueItems = false
return s
}
// MarshalJSON marshal this to JSON
func (s Schema) MarshalJSON() ([]byte, error) {
b1, err := json.Marshal(s.schemaProps)
if err != nil {
return nil, fmt.Errorf("schema props %v", err)
}
b2, err := json.Marshal(s.vendorExtensible)
if err != nil {
return nil, fmt.Errorf("vendor props %v", err)
}
b3, err := s.Ref.MarshalJSON()
if err != nil {
return nil, fmt.Errorf("ref prop %v", err)
}
b4, err := s.Schema.MarshalJSON()
if err != nil {
return nil, fmt.Errorf("schema prop %v", err)
}
b5, err := json.Marshal(s.swaggerSchemaProps)
if err != nil {
return nil, fmt.Errorf("common validations %v", err)
}
var b6 []byte
if s.ExtraProps != nil {
jj, err := json.Marshal(s.ExtraProps)
if err != nil {
return nil, fmt.Errorf("extra props %v", err)
}
b6 = jj
}
return swag.ConcatJSON(b1, b2, b3, b4, b5, b6), nil
}
// UnmarshalJSON marshal this from JSON
func (s *Schema) UnmarshalJSON(data []byte) error {
var sch Schema
if err := json.Unmarshal(data, &sch.schemaProps); err != nil {
return err
}
if err := json.Unmarshal(data, &sch.Ref); err != nil {
return err
}
if err := json.Unmarshal(data, &sch.Schema); err != nil {
return err
}
if err := json.Unmarshal(data, &sch.swaggerSchemaProps); err != nil {
return err
}
var d map[string]interface{}
if err := json.Unmarshal(data, &d); err != nil {
return err
}
delete(d, "$ref")
delete(d, "$schema")
for _, pn := range swag.DefaultJSONNameProvider.GetJSONNames(s) {
delete(d, pn)
}
for k, vv := range d {
lk := strings.ToLower(k)
if strings.HasPrefix(lk, "x-") {
if sch.Extensions == nil {
sch.Extensions = map[string]interface{}{}
}
sch.Extensions[k] = vv
continue
}
if sch.ExtraProps == nil {
sch.ExtraProps = map[string]interface{}{}
}
sch.ExtraProps[k] = vv
}
*s = sch
return nil
}
| apache-2.0 |
aslakknutsen/fabric8 | sandbox/fab/fab-osgi/src/main/java/io/fabric8/fab/osgi/internal/Configuration.java | 907 | /**
* Copyright 2005-2014 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.fab.osgi.internal;
import io.fabric8.fab.MavenResolver;
public interface Configuration {
String[] getSharedResourcePaths();
boolean getCertificateCheck();
boolean isInstallMissingDependencies();
MavenResolver getResolver();
}
| apache-2.0 |
WadeBarnes/schoolbus | Server/src/SchoolBusAPI/Authorization/PermissionRequirement.cs | 398 | using Microsoft.AspNetCore.Authorization;
using System.Collections.Generic;
namespace SchoolBusAPI.Authorization
{
public class PermissionRequirement : IAuthorizationRequirement
{
public IEnumerable<string> RequiredPermissions { get; }
public PermissionRequirement(params string[] permissions)
{
RequiredPermissions = permissions;
}
}
}
| apache-2.0 |
saikrishna321/java-client | src/main/java/io/appium/java_client/events/api/general/JavaScriptEventListener.java | 1458 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.appium.java_client.events.api.general;
import io.appium.java_client.events.api.Listener;
import org.openqa.selenium.WebDriver;
public interface JavaScriptEventListener extends Listener {
/**
* Called before
* {@link org.openqa.selenium.JavascriptExecutor#executeScript(String, Object[]) }.
*
* @param driver WebDriver
* @param script the script to be executed
*/
void beforeScript(String script, WebDriver driver);
/**
* Called after
* {@link org.openqa.selenium.remote.RemoteWebDriver#executeScript(String, Object[]) }.
* Not called if an exception is thrown
*
* @param driver WebDriver
* @param script the script that was executed
*/
void afterScript(String script, WebDriver driver);
}
| apache-2.0 |
codelabs-ch/arcanist | src/hardpoint/ArcanistVectorHardpoint.php | 319 | <?php
final class ArcanistVectorHardpoint
extends ArcanistHardpoint {
public function isVectorHardpoint() {
return true;
}
public function mergeHardpointValues(
ArcanistHardpointObject $object,
$old,
$new) {
foreach ($new as $item) {
$old[] = $item;
}
return $old;
}
}
| apache-2.0 |
prateekbh/amphtml | extensions/amp-pinterest/0.1/follow-button.js | 2991 | /**
* Copyright 2015 The AMP HTML Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS-IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {Util} from './util';
import {assertHttpsUrl} from '../../../src/url';
import {openWindowDialog} from '../../../src/dom';
import {tryResolve} from '../../../src/utils/promise';
import {userAssert} from '../../../src/log';
// Popup options
const POP_FOLLOW = `status=no,resizable=yes,scrollbars=yes,
personalbar=no,directories=no,location=no,toolbar=no,
menubar=no,width=1040,height=640,left=0,top=0`;
/**
* Pinterest Follow Button
* data-href: the url of the user's profile to follow
* data-label: the text to display (user's full name)
*/
export class FollowButton {
/** @param {!Element} rootElement */
constructor(rootElement) {
userAssert(
rootElement.getAttribute('data-href'),
'The data-href attribute is required for follow buttons'
);
userAssert(
rootElement.getAttribute('data-label'),
'The data-label attribute is required for follow buttons'
);
this.element = rootElement;
this.label = rootElement.getAttribute('data-label');
this.href = assertHttpsUrl(
rootElement.getAttribute('data-href'),
rootElement
);
}
/**
* Override the default href click handling to log and open popup
* @param {Event} event
*/
handleClick(event) {
event.preventDefault();
openWindowDialog(window, this.href, 'pin' + Date.now(), POP_FOLLOW);
Util.log(`&type=button_follow&href=${this.href}`);
}
/**
* Render the follow button
* @return {Element}
*/
renderTemplate() {
const followButton = Util.make(this.element.ownerDocument, {
'a': {
class: '-amp-pinterest-follow-button',
href: this.href,
textContent: this.label,
},
});
followButton.appendChild(Util.make(this.element.ownerDocument, {'i': {}}));
followButton.onclick = this.handleClick.bind(this);
return followButton;
}
/**
* Prepare the render data, create the node and add handlers
* @return {!Promise}
*/
render() {
// Add trailing slash?
if (this.href.substr(-1) !== '/') {
this.href += '/';
}
this.href += `pins/follow/?guid=${Util.guid}`;
return tryResolve(() => this.renderTemplate());
}
/**
* Determine the height of the contents to allow resizing after first layout.
*
* @return {!Promise<number|null>}
*/
height() {
return Promise.resolve(null);
}
}
| apache-2.0 |
androidx/constraintlayout | desktop/ConstraintLayoutInspector/app/src/androidx/constraintLayout/desktop/constraintRendering/drawing/decorator/TextWidgetConstants.java | 1015 | /*
* Copyright (C) 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.uibuilder.handlers.constraint.drawing.decorator;
public class TextWidgetConstants {
public static final int TEXT_ALIGNMENT_TEXT_START = 2;
public static final int TEXT_ALIGNMENT_TEXT_END = 3;
public static final int TEXT_ALIGNMENT_VIEW_START = 5;
public static final int TEXT_ALIGNMENT_VIEW_END = 6;
public static final int TEXT_ALIGNMENT_CENTER = 4;
}
| apache-2.0 |
noironetworks/neutron | neutron/objects/plugins/ml2/base.py | 1271 | # Copyright (c) 2016 Intel Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from neutron.objects import base
class EndpointBase(base.NeutronDbObject):
primary_keys = ['ip_address']
@classmethod
def modify_fields_from_db(cls, db_obj):
result = super(EndpointBase, cls).modify_fields_from_db(db_obj)
if 'ip_address' in result:
result['ip_address'] = netaddr.IPAddress(result['ip_address'])
return result
@classmethod
def modify_fields_to_db(cls, fields):
result = super(EndpointBase, cls).modify_fields_to_db(fields)
if 'ip_address' in fields:
result['ip_address'] = cls.filter_to_str(result['ip_address'])
return result
| apache-2.0 |
ilinum/intellij-scala | test/org/jetbrains/plugins/scala/failed/typeInference/CaseClassTypeInferenceTest.scala | 1474 | package org.jetbrains.plugins.scala.failed.typeInference
import org.jetbrains.plugins.scala.PerfCycleTests
import org.jetbrains.plugins.scala.lang.typeInference.TypeInferenceTestBase
import org.junit.experimental.categories.Category
/**
* Created by Anton Yalyshev on 17/05/16.
*/
@Category(Array(classOf[PerfCycleTests]))
class CaseClassTypeInferenceTest extends TypeInferenceTestBase {
def testSCL10292(): Unit = {
doTest(
s"""
|case class Foo(a: Int)
|Foo.getClass.getMethods.find(${START}x => x.getName == "apply"$END)
|//(Nothing) => Boolean
""".stripMargin)
}
def testSCL11159a(): Unit = {
doTest(
s"""import java.util.concurrent.atomic.AtomicReference
|
|object UnaryOps {
|
| case class Test(value: Int = 0)
| val atomic: AtomicReference[Test] = new AtomicReference(Test())
| atomic.getAndUpdate(${START}(t: Test) => t.copy(value = 2)$END)
|}
|//UnaryOperator[UnaryOps.Test]
""".stripMargin)
}
def testSCL11159b(): Unit = {
doTest(
s"""import java.util.concurrent.atomic.AtomicReference
|
|object UnaryOps {
|
| case class Test(value: Int = 0)
| val atomic: AtomicReference[Test] = new AtomicReference(Test())
| atomic.getAndUpdate(${START}_.copy(value = 1)$END)
|}
|//UnaryOperator[UnaryOps.Test]
""".stripMargin)
}
}
| apache-2.0 |
crazycode/weixin-java-tools | weixin-java-miniapp/src/main/java/cn/binarywang/wx/miniapp/util/json/WxMaTemplateMessageGsonAdapter.java | 1812 | package cn.binarywang.wx.miniapp.util.json;
import cn.binarywang.wx.miniapp.bean.WxMaTemplateMessage;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonSerializationContext;
import com.google.gson.JsonSerializer;
import java.lang.reflect.Type;
/**
* @author <a href="https://github.com/binarywang">Binary Wang</a>
*/
public class WxMaTemplateMessageGsonAdapter implements JsonSerializer<WxMaTemplateMessage> {
@Override
public JsonElement serialize(WxMaTemplateMessage message, Type typeOfSrc, JsonSerializationContext context) {
JsonObject messageJson = new JsonObject();
messageJson.addProperty("touser", message.getToUser());
messageJson.addProperty("template_id", message.getTemplateId());
if (message.getPage() != null) {
messageJson.addProperty("page", message.getPage());
}
if (message.getFormId() != null) {
messageJson.addProperty("form_id", message.getFormId());
}
if (message.getPage() != null) {
messageJson.addProperty("page", message.getPage());
}
if (message.getColor() != null) {
messageJson.addProperty("color", message.getColor());
}
if (message.getEmphasisKeyword() != null) {
messageJson.addProperty("emphasis_keyword", message.getEmphasisKeyword());
}
JsonObject data = new JsonObject();
messageJson.add("data", data);
if (message.getData() == null) {
return messageJson;
}
for (WxMaTemplateMessage.Data datum : message.getData()) {
JsonObject dataJson = new JsonObject();
dataJson.addProperty("value", datum.getValue());
if (datum.getColor() != null) {
dataJson.addProperty("color", datum.getColor());
}
data.add(datum.getName(), dataJson);
}
return messageJson;
}
}
| apache-2.0 |
tweakmy/dnp3-old | src/opendnp3/xml/binding/APLXML_Base.cpp | 12525 | /* ******************************
GENERATED CONTENT DO NOT ALTER!
*********************************
*/
#include "APLXML_Base.h"
using namespace std;
namespace APLXML_Base {
BaudRateEnum FromString_BaudRateEnum(TiXmlNode* apParent, const char* aValue) {
if(aValue != NULL && strcmp(aValue,"1200") == 0) return BAUDRATE_1200;
if(aValue != NULL && strcmp(aValue,"1800") == 0) return BAUDRATE_1800;
if(aValue != NULL && strcmp(aValue,"2400") == 0) return BAUDRATE_2400;
if(aValue != NULL && strcmp(aValue,"4800") == 0) return BAUDRATE_4800;
if(aValue != NULL && strcmp(aValue,"9600") == 0) return BAUDRATE_9600;
if(aValue != NULL && strcmp(aValue,"19200") == 0) return BAUDRATE_19200;
if(aValue != NULL && strcmp(aValue,"38400") == 0) return BAUDRATE_38400;
if(aValue != NULL && strcmp(aValue,"57600") == 0) return BAUDRATE_57600;
if(aValue != NULL && strcmp(aValue,"115200") == 0) return BAUDRATE_115200;
if(aValue != NULL && strcmp(aValue,"230400") == 0) return BAUDRATE_230400;
std::ostringstream oss;
oss << "String " << (aValue==NULL?"null":aValue) << " isnt valid for BaudRateEnum at " << apParent->Row() << ":" << apParent->Column();
if(IXMLDataBound::msExceptOnFailure){throw apl::Exception(LOCATION, oss.str());}
else{std::cout << oss.str() << std::endl; return BAUDRATE_1200;}
};
string ToString_BaudRateEnum(BaudRateEnum aValue) {
switch(aValue){
case BAUDRATE_1200: return "1200";
case BAUDRATE_1800: return "1800";
case BAUDRATE_2400: return "2400";
case BAUDRATE_4800: return "4800";
case BAUDRATE_9600: return "9600";
case BAUDRATE_19200: return "19200";
case BAUDRATE_38400: return "38400";
case BAUDRATE_57600: return "57600";
case BAUDRATE_115200: return "115200";
case BAUDRATE_230400: return "230400";
}
std::ostringstream oss;
oss << "Bad Enum value " << aValue << " for type BaudRateEnum";
throw apl::Exception(LOCATION, oss.str());
};
DBitsEnum FromString_DBitsEnum(TiXmlNode* apParent, const char* aValue) {
if(aValue != NULL && strcmp(aValue,"DATABITS_7") == 0) return DATABITS_7;
if(aValue != NULL && strcmp(aValue,"DATABITS_8") == 0) return DATABITS_8;
std::ostringstream oss;
oss << "String " << (aValue==NULL?"null":aValue) << " isnt valid for DBitsEnum at " << apParent->Row() << ":" << apParent->Column();
if(IXMLDataBound::msExceptOnFailure){throw apl::Exception(LOCATION, oss.str());}
else{std::cout << oss.str() << std::endl; return DATABITS_7;}
};
string ToString_DBitsEnum(DBitsEnum aValue) {
switch(aValue){
case DATABITS_7: return "DATABITS_7";
case DATABITS_8: return "DATABITS_8";
}
std::ostringstream oss;
oss << "Bad Enum value " << aValue << " for type DBitsEnum";
throw apl::Exception(LOCATION, oss.str());
};
FlowControlEnum FromString_FlowControlEnum(TiXmlNode* apParent, const char* aValue) {
if(aValue != NULL && strcmp(aValue,"FLOW_NONE") == 0) return FLOW_NONE;
if(aValue != NULL && strcmp(aValue,"FLOW_HARDWARE") == 0) return FLOW_HARDWARE;
if(aValue != NULL && strcmp(aValue,"FLOW_XONXOFF") == 0) return FLOW_XONXOFF;
std::ostringstream oss;
oss << "String " << (aValue==NULL?"null":aValue) << " isnt valid for FlowControlEnum at " << apParent->Row() << ":" << apParent->Column();
if(IXMLDataBound::msExceptOnFailure){throw apl::Exception(LOCATION, oss.str());}
else{std::cout << oss.str() << std::endl; return FLOW_NONE;}
};
string ToString_FlowControlEnum(FlowControlEnum aValue) {
switch(aValue){
case FLOW_NONE: return "FLOW_NONE";
case FLOW_HARDWARE: return "FLOW_HARDWARE";
case FLOW_XONXOFF: return "FLOW_XONXOFF";
}
std::ostringstream oss;
oss << "Bad Enum value " << aValue << " for type FlowControlEnum";
throw apl::Exception(LOCATION, oss.str());
};
LogLevelEnum FromString_LogLevelEnum(TiXmlNode* apParent, const char* aValue) {
if(aValue != NULL && strcmp(aValue,"LOG_DEBUG") == 0) return LOG_DEBUG;
if(aValue != NULL && strcmp(aValue,"LOG_COMM") == 0) return LOG_COMM;
if(aValue != NULL && strcmp(aValue,"LOG_INTERPRET") == 0) return LOG_INTERPRET;
if(aValue != NULL && strcmp(aValue,"LOG_INFO") == 0) return LOG_INFO;
if(aValue != NULL && strcmp(aValue,"LOG_WARNING") == 0) return LOG_WARNING;
if(aValue != NULL && strcmp(aValue,"LOG_ERROR") == 0) return LOG_ERROR;
if(aValue != NULL && strcmp(aValue,"LOG_EVENT") == 0) return LOG_EVENT;
std::ostringstream oss;
oss << "String " << (aValue==NULL?"null":aValue) << " isnt valid for LogLevelEnum at " << apParent->Row() << ":" << apParent->Column();
if(IXMLDataBound::msExceptOnFailure){throw apl::Exception(LOCATION, oss.str());}
else{std::cout << oss.str() << std::endl; return LOG_DEBUG;}
};
string ToString_LogLevelEnum(LogLevelEnum aValue) {
switch(aValue){
case LOG_DEBUG: return "LOG_DEBUG";
case LOG_COMM: return "LOG_COMM";
case LOG_INTERPRET: return "LOG_INTERPRET";
case LOG_INFO: return "LOG_INFO";
case LOG_WARNING: return "LOG_WARNING";
case LOG_ERROR: return "LOG_ERROR";
case LOG_EVENT: return "LOG_EVENT";
}
std::ostringstream oss;
oss << "Bad Enum value " << aValue << " for type LogLevelEnum";
throw apl::Exception(LOCATION, oss.str());
};
ParityEnum FromString_ParityEnum(TiXmlNode* apParent, const char* aValue) {
if(aValue != NULL && strcmp(aValue,"PARITY_NONE") == 0) return PARITY_NONE;
if(aValue != NULL && strcmp(aValue,"PARITY_EVEN") == 0) return PARITY_EVEN;
if(aValue != NULL && strcmp(aValue,"PARITY_ODD") == 0) return PARITY_ODD;
std::ostringstream oss;
oss << "String " << (aValue==NULL?"null":aValue) << " isnt valid for ParityEnum at " << apParent->Row() << ":" << apParent->Column();
if(IXMLDataBound::msExceptOnFailure){throw apl::Exception(LOCATION, oss.str());}
else{std::cout << oss.str() << std::endl; return PARITY_NONE;}
};
string ToString_ParityEnum(ParityEnum aValue) {
switch(aValue){
case PARITY_NONE: return "PARITY_NONE";
case PARITY_EVEN: return "PARITY_EVEN";
case PARITY_ODD: return "PARITY_ODD";
}
std::ostringstream oss;
oss << "Bad Enum value " << aValue << " for type ParityEnum";
throw apl::Exception(LOCATION, oss.str());
};
void PhysicalLayerDescriptor_t :: fromXml(TiXmlNode* pNode){
if(pNode == NULL)return;
XML_CHECK("PhysicalLayerDescriptor",pNode->Type() == TiXmlNode::ELEMENT);
TiXmlElement* pEm = pNode->ToElement();
XML_CHECK("PhysicalLayerDescriptor",pEm != 0);
Name = FromString_string(pEm, pEm->Attribute("Name"));
OpenRetryMS = FromString_int(pEm, pEm->Attribute("OpenRetryMS"));
valid=true;
};
void PhysicalLayerDescriptor_t :: toXml(TiXmlNode* pParent, bool aCreateNode, bool aIgnoreValid){
if(!aIgnoreValid && !valid) return;
TiXmlElement * pEm;
if(aCreateNode){
pEm = new TiXmlElement("PhysicalLayerDescriptor");
pParent->LinkEndChild(pEm);
}else{
pEm = pParent->ToElement();
}
pEm->SetAttribute("Name", ToString_string(Name));
pEm->SetAttribute("OpenRetryMS", ToString_int(OpenRetryMS));
};
StopBitsEnum FromString_StopBitsEnum(TiXmlNode* apParent, const char* aValue) {
if(aValue != NULL && strcmp(aValue,"STOPBITS_0") == 0) return STOPBITS_0;
if(aValue != NULL && strcmp(aValue,"STOPBITS_1") == 0) return STOPBITS_1;
if(aValue != NULL && strcmp(aValue,"STOPBITS_2") == 0) return STOPBITS_2;
std::ostringstream oss;
oss << "String " << (aValue==NULL?"null":aValue) << " isnt valid for StopBitsEnum at " << apParent->Row() << ":" << apParent->Column();
if(IXMLDataBound::msExceptOnFailure){throw apl::Exception(LOCATION, oss.str());}
else{std::cout << oss.str() << std::endl; return STOPBITS_0;}
};
string ToString_StopBitsEnum(StopBitsEnum aValue) {
switch(aValue){
case STOPBITS_0: return "STOPBITS_0";
case STOPBITS_1: return "STOPBITS_1";
case STOPBITS_2: return "STOPBITS_2";
}
std::ostringstream oss;
oss << "Bad Enum value " << aValue << " for type StopBitsEnum";
throw apl::Exception(LOCATION, oss.str());
};
void TCPClient_t :: fromXml(TiXmlNode* pNode){
if(pNode == NULL)return;
XML_CHECK("TCPClient",pNode->Type() == TiXmlNode::ELEMENT);
TiXmlElement* pEm = pNode->ToElement();
XML_CHECK("TCPClient",pEm != 0);
this->APLXML_Base::PhysicalLayerDescriptor_t::fromXml(pNode);
Address = FromString_string(pEm, pEm->Attribute("Address"));
Port = FromString_int(pEm, pEm->Attribute("Port"));
valid=true;
};
void TCPClient_t :: toXml(TiXmlNode* pParent, bool aCreateNode, bool aIgnoreValid){
if(!aIgnoreValid && !valid) return;
TiXmlElement * pEm;
if(aCreateNode){
pEm = new TiXmlElement("TCPClient");
pParent->LinkEndChild(pEm);
}else{
pEm = pParent->ToElement();
}
this->APLXML_Base::PhysicalLayerDescriptor_t::toXml(pEm, false, aIgnoreValid);
pEm->SetAttribute("Address", ToString_string(Address));
pEm->SetAttribute("Port", ToString_int(Port));
};
void TCPServer_t :: fromXml(TiXmlNode* pNode){
if(pNode == NULL)return;
XML_CHECK("TCPServer",pNode->Type() == TiXmlNode::ELEMENT);
TiXmlElement* pEm = pNode->ToElement();
XML_CHECK("TCPServer",pEm != 0);
this->APLXML_Base::PhysicalLayerDescriptor_t::fromXml(pNode);
Endpoint = FromString_string(pEm, pEm->Attribute("Endpoint"));
Port = FromString_int(pEm, pEm->Attribute("Port"));
valid=true;
};
void TCPServer_t :: toXml(TiXmlNode* pParent, bool aCreateNode, bool aIgnoreValid){
if(!aIgnoreValid && !valid) return;
TiXmlElement * pEm;
if(aCreateNode){
pEm = new TiXmlElement("TCPServer");
pParent->LinkEndChild(pEm);
}else{
pEm = pParent->ToElement();
}
this->APLXML_Base::PhysicalLayerDescriptor_t::toXml(pEm, false, aIgnoreValid);
pEm->SetAttribute("Endpoint", ToString_string(Endpoint));
pEm->SetAttribute("Port", ToString_int(Port));
};
void Log_t :: fromXml(TiXmlNode* pNode){
if(pNode == NULL)return;
XML_CHECK("Log",pNode->Type() == TiXmlNode::ELEMENT);
TiXmlElement* pEm = pNode->ToElement();
XML_CHECK("Log",pEm != 0);
Filter = FromString_LogLevelEnum(pEm, pEm->Attribute("Filter"));
valid=true;
};
void Log_t :: toXml(TiXmlNode* pParent, bool aCreateNode, bool aIgnoreValid){
if(!aIgnoreValid && !valid) return;
TiXmlElement * pEm;
if(aCreateNode){
pEm = new TiXmlElement("Log");
pParent->LinkEndChild(pEm);
}else{
pEm = pParent->ToElement();
}
pEm->SetAttribute("Filter", ToString_LogLevelEnum(Filter));
};
void Serial_t :: fromXml(TiXmlNode* pNode){
if(pNode == NULL)return;
XML_CHECK("Serial",pNode->Type() == TiXmlNode::ELEMENT);
TiXmlElement* pEm = pNode->ToElement();
XML_CHECK("Serial",pEm != 0);
this->APLXML_Base::PhysicalLayerDescriptor_t::fromXml(pNode);
Device = FromString_string(pEm, pEm->Attribute("Device"));
BaudRate = FromString_BaudRateEnum(pEm, pEm->Attribute("BaudRate"));
Parity = FromString_ParityEnum(pEm, pEm->Attribute("Parity"));
DBits = FromString_DBitsEnum(pEm, pEm->Attribute("DBits"));
StopBits = FromString_StopBitsEnum(pEm, pEm->Attribute("StopBits"));
FlowControl = FromString_FlowControlEnum(pEm, pEm->Attribute("FlowControl"));
valid=true;
};
void Serial_t :: toXml(TiXmlNode* pParent, bool aCreateNode, bool aIgnoreValid){
if(!aIgnoreValid && !valid) return;
TiXmlElement * pEm;
if(aCreateNode){
pEm = new TiXmlElement("Serial");
pParent->LinkEndChild(pEm);
}else{
pEm = pParent->ToElement();
}
this->APLXML_Base::PhysicalLayerDescriptor_t::toXml(pEm, false, aIgnoreValid);
pEm->SetAttribute("Device", ToString_string(Device));
pEm->SetAttribute("BaudRate", ToString_BaudRateEnum(BaudRate));
pEm->SetAttribute("Parity", ToString_ParityEnum(Parity));
pEm->SetAttribute("DBits", ToString_DBitsEnum(DBits));
pEm->SetAttribute("StopBits", ToString_StopBitsEnum(StopBits));
pEm->SetAttribute("FlowControl", ToString_FlowControlEnum(FlowControl));
};
PhysicalLayerList_t::PhysicalLayerList_t():
TCPServer("TCPServer"), TCPServerVector(TCPServer.collection),
TCPClient("TCPClient"), TCPClientVector(TCPClient.collection),
Serial("Serial"), SerialVector(Serial.collection){};
void PhysicalLayerList_t :: fromXml(TiXmlNode* pNode){
if(pNode == NULL)return;
XML_CHECK("PhysicalLayerList",pNode->Type() == TiXmlNode::ELEMENT);
TiXmlElement* pEm = pNode->ToElement();
XML_CHECK("PhysicalLayerList",pEm != 0);
TCPServer.fromXml(pNode);
TCPClient.fromXml(pNode);
Serial.fromXml(pNode);
valid=true;
};
void PhysicalLayerList_t :: toXml(TiXmlNode* pParent, bool aCreateNode, bool aIgnoreValid){
if(TCPServer.size() == 0 && TCPClient.size() == 0 && Serial.size() == 0)return;
if(!aIgnoreValid && !valid) return;
TiXmlElement * pEm;
if(aCreateNode){
pEm = new TiXmlElement("PhysicalLayerList");
pParent->LinkEndChild(pEm);
}else{
pEm = pParent->ToElement();
}
TCPServer.toXml(pEm, true, aIgnoreValid);
TCPClient.toXml(pEm, true, aIgnoreValid);
Serial.toXml(pEm, true, aIgnoreValid);
};
}
| apache-2.0 |
msebire/intellij-community | plugins/ui-designer/src/com/intellij/uiDesigner/designSurface/InsertComponentProcessor.java | 22956 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.uiDesigner.designSurface;
import com.intellij.CommonBundle;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.ide.palette.impl.PaletteToolWindowManager;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.LibraryTable;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.JavaPsiFacade;
import com.intellij.psi.PsiClass;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.uiDesigner.*;
import com.intellij.uiDesigner.compiler.Utils;
import com.intellij.uiDesigner.core.Util;
import com.intellij.uiDesigner.make.PsiNestedFormLoader;
import com.intellij.uiDesigner.palette.ComponentItem;
import com.intellij.uiDesigner.palette.ComponentItemDialog;
import com.intellij.uiDesigner.palette.Palette;
import com.intellij.uiDesigner.quickFixes.CreateFieldFix;
import com.intellij.uiDesigner.radComponents.*;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.KeyEvent;
import java.awt.event.MouseEvent;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author Anton Katilin
* @author Vladimir Kondratyev
*/
public final class InsertComponentProcessor extends EventProcessor {
private static final Logger LOG = Logger.getInstance("#com.intellij.uiDesigner.designSurface.InsertComponentProcessor");
private final GuiEditor myEditor;
private boolean mySticky;
private RadComponent myInsertedComponent;
private final GridInsertProcessor myGridInsertProcessor;
private ComponentItem myComponentToInsert;
private ComponentDropLocation myLastLocation;
private static final Map<String, RadComponentFactory> myComponentClassMap = new HashMap<>();
static {
myComponentClassMap.put(JScrollPane.class.getName(), new RadScrollPane.Factory());
myComponentClassMap.put(JPanel.class.getName(), new RadContainer.Factory());
myComponentClassMap.put(VSpacer.class.getName(), new RadVSpacer.Factory());
myComponentClassMap.put(HSpacer.class.getName(), new RadHSpacer.Factory());
myComponentClassMap.put(JTabbedPane.class.getName(), new RadTabbedPane.Factory());
myComponentClassMap.put(JSplitPane.class.getName(), new RadSplitPane.Factory());
myComponentClassMap.put(JToolBar.class.getName(), new RadToolBar.Factory());
myComponentClassMap.put(JTable.class.getName(), new RadTable.Factory());
}
public InsertComponentProcessor(@NotNull final GuiEditor editor) {
myEditor = editor;
myGridInsertProcessor = new GridInsertProcessor(editor);
}
public void setSticky(final boolean sticky) {
mySticky = sticky;
}
public void setComponentToInsert(final ComponentItem componentToInsert) {
myComponentToInsert = componentToInsert;
}
public void setLastLocation(final ComponentDropLocation location) {
final ComponentItem componentToInsert = getComponentToInsert();
assert componentToInsert != null;
ComponentItemDragObject dragObject = new ComponentItemDragObject(componentToInsert);
if (location.canDrop(dragObject)) {
myLastLocation = location;
}
else {
ComponentDropLocation locationToRight = location.getAdjacentLocation(ComponentDropLocation.Direction.RIGHT);
ComponentDropLocation locationToBottom = location.getAdjacentLocation(ComponentDropLocation.Direction.DOWN);
if (locationToRight != null && locationToRight.canDrop(dragObject)) {
myLastLocation = locationToRight;
}
else if (locationToBottom != null && locationToBottom.canDrop(dragObject)) {
myLastLocation = locationToBottom;
}
else {
myLastLocation = location;
}
}
if (myLastLocation.canDrop(dragObject)) {
myLastLocation.placeFeedback(myEditor.getActiveDecorationLayer(), dragObject);
}
}
@Override
protected void processKeyEvent(final KeyEvent e) {
if (e.getID() == KeyEvent.KEY_PRESSED) {
if (e.getKeyCode() == KeyEvent.VK_ENTER) {
if (myLastLocation != null) {
myEditor.getMainProcessor().stopCurrentProcessor();
processComponentInsert(getComponentToInsert(), myLastLocation);
}
}
else {
ComponentItem componentToInsert = getComponentToInsert();
if (componentToInsert == null) {
cancelOperation();
}
else {
myLastLocation = moveDropLocation(myEditor, myLastLocation, new ComponentItemDragObject(componentToInsert), e);
}
}
}
}
@NotNull
public static String suggestBinding(final RadRootContainer rootContainer, @NotNull final String componentClassName) {
String shortClassName = getShortClassName(componentClassName);
LOG.assertTrue(shortClassName.length() > 0);
return getUniqueBinding(rootContainer, shortClassName);
}
public static String getShortClassName(@NonNls final String componentClassName) {
final int lastDotIndex = componentClassName.lastIndexOf('.');
String shortClassName = componentClassName.substring(lastDotIndex + 1);
// Here is euristic. Chop first 'J' letter for standard Swing classes.
// Without 'J' bindings look better.
if (
shortClassName.length() > 1 && Character.isUpperCase(shortClassName.charAt(1)) &&
componentClassName.startsWith("javax.swing.") &&
StringUtil.startsWithChar(shortClassName, 'J')
) {
shortClassName = shortClassName.substring(1);
}
shortClassName = StringUtil.decapitalize(shortClassName);
return shortClassName;
}
public static String getUniqueBinding(RadRootContainer root, final String baseName) {
// Generate member name based on current code style
for (int i = 0; true; i++) {
final String nameCandidate = baseName + (i + 1);
final String binding = JavaCodeStyleManager.getInstance(root.getProject()).propertyNameToVariableName(
nameCandidate,
VariableKind.FIELD
);
if (FormEditingUtil.findComponentWithBinding(root, binding) == null) {
return binding;
}
}
}
/**
* Tries to create binding for {@link #myInsertedComponent}
*
* @param editor
* @param insertedComponent
* @param forceBinding
*/
public static void createBindingWhenDrop(final GuiEditor editor, final RadComponent insertedComponent, final boolean forceBinding) {
final ComponentItem item = Palette.getInstance(editor.getProject()).getItem(insertedComponent.getComponentClassName());
if ((item != null && item.isAutoCreateBinding()) || insertedComponent.isCustomCreateRequired() || forceBinding) {
doCreateBindingWhenDrop(editor, insertedComponent);
}
}
private static void doCreateBindingWhenDrop(final GuiEditor editor, final RadComponent insertedComponent) {
// Now if the inserted component is a input control, we need to automatically create binding
final String binding = suggestBinding(editor.getRootContainer(), insertedComponent.getComponentClassName());
insertedComponent.setBinding(binding);
insertedComponent.setDefaultBinding(true);
createBindingField(editor, insertedComponent);
}
public static void createBindingField(final GuiEditor editor, final RadComponent insertedComponent) {
// Try to create field in the corresponding bound class
final String classToBind = editor.getRootContainer().getClassToBind();
if (classToBind != null) {
final PsiClass aClass = FormEditingUtil.findClassToBind(editor.getModule(), classToBind);
if (aClass != null && aClass.findFieldByName(insertedComponent.getBinding(), true) == null) {
if (!FileModificationService.getInstance().preparePsiElementForWrite(aClass)) {
return;
}
ApplicationManager.getApplication().runWriteAction(
() -> CreateFieldFix.runImpl(editor.getProject(),
editor.getRootContainer(),
aClass,
insertedComponent.getComponentClassName(),
insertedComponent.getBinding(),
false, // silently skip all errors (if any)
null)
);
}
}
}
@Override
protected void processMouseEvent(final MouseEvent e) {
if (e.getID() == MouseEvent.MOUSE_PRESSED) {
final ComponentItem componentItem = getComponentToInsert();
if (componentItem != null) {
processComponentInsert(e.getPoint(), componentItem);
}
}
else if (e.getID() == MouseEvent.MOUSE_MOVED) {
final ComponentItem componentToInsert = getComponentToInsert();
if (componentToInsert != null) {
ComponentItemDragObject dragObject = new ComponentItemDragObject(componentToInsert);
myLastLocation = myGridInsertProcessor.processDragEvent(e.getPoint(), dragObject);
if (myLastLocation.canDrop(dragObject)) {
setCursor(FormEditingUtil.getCopyDropCursor());
}
else {
setCursor(FormEditingUtil.getMoveNoDropCursor());
}
}
}
}
@Nullable
private ComponentItem getComponentToInsert() {
return (myComponentToInsert != null)
? myComponentToInsert
: PaletteToolWindowManager.getInstance(myEditor).getActiveItem(ComponentItem.class);
}
public void processComponentInsert(@NotNull final Point point, final ComponentItem item) {
final ComponentDropLocation location = GridInsertProcessor.getDropLocation(myEditor.getRootContainer(), point);
processComponentInsert(item, location);
}
public void processComponentInsert(ComponentItem item, final ComponentDropLocation location) {
myEditor.getActiveDecorationLayer().removeFeedback();
myEditor.setDesignTimeInsets(2);
item = replaceAnyComponentItem(myEditor, item, UIDesignerBundle.message("palette.non.palette.component.title"));
if (item == null) {
return;
}
if (!validateNestedFormInsert(item)) {
return;
}
if (!checkAddDependencyOnInsert(item)) {
return;
}
if (!myEditor.ensureEditable()) {
return;
}
final boolean forceBinding = item.isAutoCreateBinding();
myInsertedComponent = createInsertedComponent(myEditor, item);
setCursor(Cursor.getDefaultCursor());
if (myInsertedComponent == null) {
if (!mySticky) {
PaletteToolWindowManager.getInstance(myEditor).clearActiveItem();
}
return;
}
final ComponentItemDragObject dragObject = new ComponentItemDragObject(item);
if (location.canDrop(dragObject)) {
CommandProcessor.getInstance().executeCommand(
myEditor.getProject(),
() -> {
createBindingWhenDrop(myEditor, myInsertedComponent, forceBinding);
final RadComponent[] components = new RadComponent[]{myInsertedComponent};
location.processDrop(myEditor, components, null, dragObject);
FormEditingUtil.selectSingleComponent(myEditor, myInsertedComponent);
if (location.getContainer() != null && location.getContainer().isXY()) {
Dimension newSize = myInsertedComponent.getPreferredSize();
Util.adjustSize(myInsertedComponent.getDelegee(), myInsertedComponent.getConstraints(), newSize);
myInsertedComponent.setSize(newSize);
}
if (myInsertedComponent.getParent() instanceof RadRootContainer &&
myInsertedComponent instanceof RadAtomicComponent) {
GridBuildUtil.convertToGrid(myEditor);
FormEditingUtil.selectSingleComponent(myEditor, myInsertedComponent);
}
checkBindTopLevelPanel();
if (!mySticky) {
PaletteToolWindowManager.getInstance(myEditor).clearActiveItem();
}
myEditor.refreshAndSave(false);
}, UIDesignerBundle.message("command.insert.component"), null);
}
myComponentToInsert = null;
}
private boolean checkAddDependencyOnInsert(final ComponentItem item) {
if (item.getClassName().equals(HSpacer.class.getName()) || item.getClassName().equals(VSpacer.class.getName())) {
// this is mostly required for IDEA developers, so that developers don't receive prompt to offer ui-designer-impl dependency
return true;
}
PsiManager manager = PsiManager.getInstance(myEditor.getProject());
final GlobalSearchScope projectScope = GlobalSearchScope.allScope(myEditor.getProject());
final GlobalSearchScope moduleScope = GlobalSearchScope.moduleWithDependenciesAndLibrariesScope(myEditor.getModule());
final PsiClass componentClass = JavaPsiFacade.getInstance(manager.getProject()).findClass(item.getClassName(), projectScope);
if (componentClass != null && JavaPsiFacade.getInstance(manager.getProject()).findClass(item.getClassName(), moduleScope) == null) {
final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(myEditor.getProject()).getFileIndex();
List<OrderEntry> entries = fileIndex.getOrderEntriesForFile(componentClass.getContainingFile().getVirtualFile());
if (entries.size() > 0) {
if (entries.get(0) instanceof ModuleSourceOrderEntry) {
if (!checkAddModuleDependency(item, (ModuleSourceOrderEntry)entries.get(0))) return false;
}
else if (entries.get(0) instanceof LibraryOrderEntry) {
if (!checkAddLibraryDependency(item, (LibraryOrderEntry)entries.get(0))) return false;
}
}
}
return true;
}
private boolean checkAddModuleDependency(final ComponentItem item, final ModuleSourceOrderEntry moduleSourceOrderEntry) {
final Module ownerModule = moduleSourceOrderEntry.getOwnerModule();
int rc = Messages.showYesNoCancelDialog(
myEditor,
UIDesignerBundle.message("add.module.dependency.prompt", item.getClassName(), ownerModule.getName(), myEditor.getModule().getName()),
UIDesignerBundle.message("add.module.dependency.title"),
Messages.getQuestionIcon());
if (rc == Messages.CANCEL) return false;
if (rc == Messages.YES) {
ModuleRootModificationUtil.addDependency(myEditor.getModule(), ownerModule);
}
return true;
}
private boolean checkAddLibraryDependency(final ComponentItem item, final LibraryOrderEntry libraryOrderEntry) {
int rc = Messages.showYesNoCancelDialog(
myEditor,
UIDesignerBundle.message("add.library.dependency.prompt", item.getClassName(), libraryOrderEntry.getPresentableName(),
myEditor.getModule().getName()),
UIDesignerBundle.message("add.library.dependency.title"),
Messages.getQuestionIcon());
if (rc == Messages.CANCEL) return false;
if (rc == Messages.YES) {
ApplicationManager.getApplication().runWriteAction(() -> {
final ModifiableRootModel model = ModuleRootManager.getInstance(myEditor.getModule()).getModifiableModel();
if (libraryOrderEntry.isModuleLevel()) {
copyModuleLevelLibrary(libraryOrderEntry.getLibrary(), model);
}
else {
model.addLibraryEntry(libraryOrderEntry.getLibrary());
}
model.commit();
});
}
return true;
}
private static void copyModuleLevelLibrary(final Library fromLibrary, final ModifiableRootModel toModel) {
final LibraryTable.ModifiableModel libraryTableModel = toModel.getModuleLibraryTable().getModifiableModel();
Library library = libraryTableModel.createLibrary(null);
final Library.ModifiableModel libraryModel = library.getModifiableModel();
for (OrderRootType rootType : OrderRootType.getAllTypes()) {
for (String url : fromLibrary.getUrls(rootType)) {
libraryModel.addRoot(url, rootType);
}
}
libraryModel.commit();
libraryTableModel.commit();
}
private boolean validateNestedFormInsert(final ComponentItem item) {
PsiFile boundForm = item.getBoundForm();
if (boundForm != null) {
try {
final String formName = FormEditingUtil.buildResourceName(boundForm);
final String targetForm = FormEditingUtil.buildResourceName(myEditor.getPsiFile());
Utils.validateNestedFormLoop(formName, new PsiNestedFormLoader(myEditor.getModule()), targetForm);
}
catch (Exception ex) {
Messages.showErrorDialog(myEditor, ex.getMessage(), CommonBundle.getErrorTitle());
return false;
}
}
return true;
}
public static RadContainer createPanelComponent(GuiEditor editor) {
RadComponent c = createInsertedComponent(editor, Palette.getInstance(editor.getProject()).getPanelItem());
LOG.assertTrue(c != null);
return (RadContainer)c;
}
@Nullable
public static ComponentItem replaceAnyComponentItem(GuiEditor editor, ComponentItem item, final String title) {
if (item.isAnyComponent()) {
ComponentItem newItem = item.clone();
ComponentItemDialog dlg = new ComponentItemDialog(editor.getProject(), editor, newItem, true);
dlg.setTitle(title);
if (!dlg.showAndGet()) {
return null;
}
return newItem;
}
return item;
}
@Nullable
public static RadComponent createInsertedComponent(GuiEditor editor, ComponentItem item) {
RadComponent result;
final String id = FormEditingUtil.generateId(editor.getRootContainer());
final ClassLoader loader = LoaderFactory.getInstance(editor.getProject()).getLoader(editor.getFile());
RadComponentFactory factory = getRadComponentFactory(item.getClassName(), loader);
if (factory != null) {
try {
result = factory.newInstance(editor, item.getClassName(), id);
}
catch (Exception e) {
LOG.error(e);
return null;
}
}
else {
PsiFile boundForm = item.getBoundForm();
if (boundForm != null) {
final String formFileName = FormEditingUtil.buildResourceName(boundForm);
try {
result = new RadNestedForm(editor, formFileName, id);
}
catch (Exception ex) {
String errorMessage = UIDesignerBundle.message("error.instantiating.nested.form", formFileName,
(ex.getMessage() != null ? ex.getMessage() : ex.toString()));
result = RadErrorComponent.create(
editor,
id,
item.getClassName(),
null,
errorMessage
);
}
}
else {
try {
final Class aClass = Class.forName(item.getClassName(), true, loader);
if (item.isContainer()) {
LOG.debug("Creating custom container instance");
result = new RadContainer(editor, aClass, id);
}
else {
result = new RadAtomicComponent(editor, aClass, id);
}
}
catch (final UnsupportedClassVersionError ucve) {
result = RadErrorComponent.create(editor, id, item.getClassName(), null,
UIDesignerBundle.message("unsupported.component.class.version")
);
}
catch (final Exception exc) {
String errorDescription = Utils.validateJComponentClass(loader, item.getClassName(), true);
if (errorDescription == null) {
errorDescription = UIDesignerBundle.message("error.class.cannot.be.instantiated", item.getClassName());
final String message = FormEditingUtil.getExceptionMessage(exc);
if (message != null) {
errorDescription += ": " + message;
}
}
result = RadErrorComponent.create(
editor,
id,
item.getClassName(),
null,
errorDescription
);
}
}
}
result.init(editor, item);
return result;
}
@Nullable
public static RadComponentFactory getRadComponentFactory(final Project project, final String className) {
ClassLoader loader =
ReadAction.compute(() -> LoaderFactory.getInstance(project).getProjectClassLoader());
return getRadComponentFactory(className, loader);
}
@Nullable
private static RadComponentFactory getRadComponentFactory(final String className, final ClassLoader loader) {
Class componentClass;
try {
componentClass = Class.forName(className, false, loader);
}
catch (ClassNotFoundException e) {
return myComponentClassMap.get(className);
}
return getRadComponentFactory(componentClass);
}
@Nullable
public static RadComponentFactory getRadComponentFactory(Class componentClass) {
while (componentClass != null) {
RadComponentFactory c = myComponentClassMap.get(componentClass.getName());
if (c != null) return c;
componentClass = componentClass.getSuperclass();
// if a component item is a JPanel subclass, a RadContainer should be created for it only
// if it's marked as "Is Container"
if (JPanel.class.equals(componentClass)) return null;
}
return null;
}
private void checkBindTopLevelPanel() {
if (myEditor.getRootContainer().getComponentCount() == 1) {
final RadComponent component = myEditor.getRootContainer().getComponent(0);
if (component.getBinding() == null) {
if (component == myInsertedComponent ||
(component instanceof RadContainer && ((RadContainer)component).getComponentCount() == 1 &&
component == myInsertedComponent.getParent())) {
doCreateBindingWhenDrop(myEditor, component);
}
}
}
}
@Override
protected boolean cancelOperation() {
myEditor.setDesignTimeInsets(2);
myEditor.getActiveDecorationLayer().removeFeedback();
return true;
}
public Cursor processMouseMoveEvent(final MouseEvent e) {
final ComponentItem componentItem = PaletteToolWindowManager.getInstance(myEditor).getActiveItem(ComponentItem.class);
if (componentItem != null) {
return myGridInsertProcessor.processMouseMoveEvent(e.getPoint(), false, new ComponentItemDragObject(componentItem));
}
return FormEditingUtil.getMoveNoDropCursor();
}
@Override
public boolean needMousePressed() {
return true;
}
}
| apache-2.0 |
aws/aws-sdk-cpp | aws-cpp-sdk-lex-models/source/model/Message.cpp | 1792 | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/lex-models/model/Message.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace LexModelBuildingService
{
namespace Model
{
Message::Message() :
m_contentType(ContentType::NOT_SET),
m_contentTypeHasBeenSet(false),
m_contentHasBeenSet(false),
m_groupNumber(0),
m_groupNumberHasBeenSet(false)
{
}
Message::Message(JsonView jsonValue) :
m_contentType(ContentType::NOT_SET),
m_contentTypeHasBeenSet(false),
m_contentHasBeenSet(false),
m_groupNumber(0),
m_groupNumberHasBeenSet(false)
{
*this = jsonValue;
}
Message& Message::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("contentType"))
{
m_contentType = ContentTypeMapper::GetContentTypeForName(jsonValue.GetString("contentType"));
m_contentTypeHasBeenSet = true;
}
if(jsonValue.ValueExists("content"))
{
m_content = jsonValue.GetString("content");
m_contentHasBeenSet = true;
}
if(jsonValue.ValueExists("groupNumber"))
{
m_groupNumber = jsonValue.GetInteger("groupNumber");
m_groupNumberHasBeenSet = true;
}
return *this;
}
JsonValue Message::Jsonize() const
{
JsonValue payload;
if(m_contentTypeHasBeenSet)
{
payload.WithString("contentType", ContentTypeMapper::GetNameForContentType(m_contentType));
}
if(m_contentHasBeenSet)
{
payload.WithString("content", m_content);
}
if(m_groupNumberHasBeenSet)
{
payload.WithInteger("groupNumber", m_groupNumber);
}
return payload;
}
} // namespace Model
} // namespace LexModelBuildingService
} // namespace Aws
| apache-2.0 |
santais/iotivity_1.1 | cloud/account/src/main/java/org/iotivity/cloud/accountserver/AccountServerManager.java | 5380 | /*
* //******************************************************************
* //
* // Copyright 2016 Samsung Electronics All Rights Reserved.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
* //
* // Licensed under the Apache License, Version 2.0 (the "License");
* // you may not use this file except in compliance with the License.
* // You may obtain a copy of the License at
* //
* // http://www.apache.org/licenses/LICENSE-2.0
* //
* // Unless required by applicable law or agreed to in writing, software
* // distributed under the License is distributed on an "AS IS" BASIS,
* // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* // See the License for the specific language governing permissions and
* // limitations under the License.
* //
* //-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
*/
package org.iotivity.cloud.accountserver;
import java.util.ArrayList;
import java.util.Random;
import org.iotivity.cloud.accountserver.db.AccountDBManager;
import org.iotivity.cloud.accountserver.oauth.GitHub;
import org.iotivity.cloud.util.Logger;
/**
*
* This class provides a set of APIs to handle requests about account
* information of authorized user.
*
*/
public class AccountServerManager {
/**
* API for requesting user account
*
* @param userId
* user identifier
* @param deviceId
* device identifier
* @return Boolean - true if registered, otherwise false
*/
public Boolean registerUserAccount(String userId, String deviceId) {
Boolean ret = false;
// store info to OAuthDBManager
ret = AccountDBManager.getInstance().registerUserDevice(userId,
deviceId);
return ret;
}
/**
* API for requesting user account and getting session code for registered
* user.
*
* @param userId
* user identifier
* @return String - session code for registered user
*/
public String registerUserAccount(String userId) {
String sessionCode = null;
sessionCode = generateSessionCode();
// store info to OAuthDBManager
AccountDBManager.getInstance().registerUserSessionCode(userId,
sessionCode);
return sessionCode;
}
/**
* API for requesting user identifier corresponding with authorization
* information.
*
* @param authCode
* authorization code
* @param authServer
* authorization server
* @return String - user identifier
*/
public String requestUserId(String authCode, String authServer) {
String userId = null;
String accessToken = getAccessToken(authCode, authServer);
userId = getUserId(accessToken, authServer);
return userId;
}
/**
* API for requesting user identifier corresponding with session code.
*
* @param sessionCode
* session code
* @return String - user identifier
*/
public String requestUserId(String sessionCode) {
String userId = null;
// get userId from MongDB
userId = AccountDBManager.getInstance().getUserId(sessionCode);
return userId;
}
/**
* API for getting devices corresponding with user identifier.
*
* @param userId
* user identifier
* @return ArrayList<String> - list of devices
*/
public ArrayList<String> requestAccountDevices(String userId) {
Logger.d("userId= " + userId);
ArrayList<String> deviceList = AccountDBManager.getInstance()
.getDevices(userId);
return deviceList;
}
private String getAccessToken(String authCode, String authServer) {
String accessToken = null;
if (authServer.equals(Constants.GITHUB)) {
GitHub gitHub = new GitHub();
accessToken = gitHub.requestAccessToken(authCode);
} else {
Logger.e("unsupported auth.server = " + authServer);
}
return accessToken;
}
private String getUserId(String accessToken, String authServer) {
String userId = null;
if (authServer.equals(Constants.GITHUB)) {
GitHub gitHub = new GitHub();
userId = gitHub.requestGetUserInfo(accessToken);
} else {
Logger.e("unsupported auth.server = " + authServer);
}
return userId;
}
private String generateSessionCode() {
StringBuffer sessionCode = new StringBuffer();
Random random = new Random();
int randomNum = random.nextInt(122);
char code;
// generate 16byte key with 0-9, A-Z, a-z
for (int k = 0; k < 16; k++) {
while (true) {
if ((randomNum >= 48 && randomNum <= 57)
|| (randomNum >= 65 && randomNum <= 90)
|| (randomNum >= 97 && randomNum <= 122)) {
code = (char) randomNum;
sessionCode.append(code);
randomNum = random.nextInt(122);
break;
} else {
randomNum = random.nextInt(122);
}
}
}
return sessionCode.toString();
}
}
| apache-2.0 |
pjulien/flatbuffers | src/util.cpp | 8627 | /*
* Copyright 2016 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// clang-format off
// Dont't remove `format off`, it prevent reordering of win-includes.
#define _POSIX_C_SOURCE 200112L // For stat from stat/stat.h and fseeko() (POSIX extensions).
#ifdef _WIN32
# ifndef WIN32_LEAN_AND_MEAN
# define WIN32_LEAN_AND_MEAN
# endif
# ifndef NOMINMAX
# define NOMINMAX
# endif
# ifdef _MSC_VER
# include <crtdbg.h>
# endif
# include <windows.h> // Must be included before <direct.h>
# include <direct.h>
# include <winbase.h>
# undef interface // This is also important because of reasons
#else
# define _XOPEN_SOURCE 600 // For PATH_MAX from limits.h (SUSv2 extension)
# include <limits.h>
#endif
// clang-format on
#include "flatbuffers/base.h"
#include "flatbuffers/util.h"
#include <sys/stat.h>
#include <clocale>
#include <fstream>
namespace flatbuffers {
bool FileExistsRaw(const char *name) {
std::ifstream ifs(name);
return ifs.good();
}
bool LoadFileRaw(const char *name, bool binary, std::string *buf) {
if (DirExists(name)) return false;
std::ifstream ifs(name, binary ? std::ifstream::binary : std::ifstream::in);
if (!ifs.is_open()) return false;
if (binary) {
// The fastest way to read a file into a string.
ifs.seekg(0, std::ios::end);
auto size = ifs.tellg();
(*buf).resize(static_cast<size_t>(size));
ifs.seekg(0, std::ios::beg);
ifs.read(&(*buf)[0], (*buf).size());
} else {
// This is slower, but works correctly on all platforms for text files.
std::ostringstream oss;
oss << ifs.rdbuf();
*buf = oss.str();
}
return !ifs.bad();
}
static LoadFileFunction g_load_file_function = LoadFileRaw;
static FileExistsFunction g_file_exists_function = FileExistsRaw;
bool LoadFile(const char *name, bool binary, std::string *buf) {
FLATBUFFERS_ASSERT(g_load_file_function);
return g_load_file_function(name, binary, buf);
}
bool FileExists(const char *name) {
FLATBUFFERS_ASSERT(g_file_exists_function);
return g_file_exists_function(name);
}
bool DirExists(const char *name) {
// clang-format off
#ifdef _WIN32
#define flatbuffers_stat _stat
#define FLATBUFFERS_S_IFDIR _S_IFDIR
#else
#define flatbuffers_stat stat
#define FLATBUFFERS_S_IFDIR S_IFDIR
#endif
// clang-format on
struct flatbuffers_stat file_info;
if (flatbuffers_stat(name, &file_info) != 0) return false;
return (file_info.st_mode & FLATBUFFERS_S_IFDIR) != 0;
}
LoadFileFunction SetLoadFileFunction(LoadFileFunction load_file_function) {
LoadFileFunction previous_function = g_load_file_function;
g_load_file_function = load_file_function ? load_file_function : LoadFileRaw;
return previous_function;
}
FileExistsFunction SetFileExistsFunction(
FileExistsFunction file_exists_function) {
FileExistsFunction previous_function = g_file_exists_function;
g_file_exists_function =
file_exists_function ? file_exists_function : FileExistsRaw;
return previous_function;
}
bool SaveFile(const char *name, const char *buf, size_t len, bool binary) {
std::ofstream ofs(name, binary ? std::ofstream::binary : std::ofstream::out);
if (!ofs.is_open()) return false;
ofs.write(buf, len);
return !ofs.bad();
}
// We internally store paths in posix format ('/'). Paths supplied
// by the user should go through PosixPath to ensure correct behavior
// on Windows when paths are string-compared.
static const char kPathSeparatorWindows = '\\';
static const char *PathSeparatorSet = "\\/"; // Intentionally no ':'
std::string StripExtension(const std::string &filepath) {
size_t i = filepath.find_last_of('.');
return i != std::string::npos ? filepath.substr(0, i) : filepath;
}
std::string GetExtension(const std::string &filepath) {
size_t i = filepath.find_last_of('.');
return i != std::string::npos ? filepath.substr(i + 1) : "";
}
std::string StripPath(const std::string &filepath) {
size_t i = filepath.find_last_of(PathSeparatorSet);
return i != std::string::npos ? filepath.substr(i + 1) : filepath;
}
std::string StripFileName(const std::string &filepath) {
size_t i = filepath.find_last_of(PathSeparatorSet);
return i != std::string::npos ? filepath.substr(0, i) : "";
}
std::string ConCatPathFileName(const std::string &path,
const std::string &filename) {
std::string filepath = path;
if (filepath.length()) {
char &filepath_last_character = string_back(filepath);
if (filepath_last_character == kPathSeparatorWindows) {
filepath_last_character = kPathSeparator;
} else if (filepath_last_character != kPathSeparator) {
filepath += kPathSeparator;
}
}
filepath += filename;
// Ignore './' at the start of filepath.
if (filepath[0] == '.' && filepath[1] == kPathSeparator) {
filepath.erase(0, 2);
}
return filepath;
}
std::string PosixPath(const char *path) {
std::string p = path;
std::replace(p.begin(), p.end(), '\\', '/');
return p;
}
void EnsureDirExists(const std::string &filepath) {
auto parent = StripFileName(filepath);
if (parent.length()) EnsureDirExists(parent);
// clang-format off
#ifdef _WIN32
(void)_mkdir(filepath.c_str());
#else
mkdir(filepath.c_str(), S_IRWXU|S_IRGRP|S_IXGRP);
#endif
// clang-format on
}
std::string AbsolutePath(const std::string &filepath) {
// clang-format off
#ifdef FLATBUFFERS_NO_ABSOLUTE_PATH_RESOLUTION
return filepath;
#else
#ifdef _WIN32
char abs_path[MAX_PATH];
return GetFullPathNameA(filepath.c_str(), MAX_PATH, abs_path, nullptr)
#else
char abs_path[PATH_MAX];
return realpath(filepath.c_str(), abs_path)
#endif
? abs_path
: filepath;
#endif // FLATBUFFERS_NO_ABSOLUTE_PATH_RESOLUTION
// clang-format on
}
// Locale-independent code.
#if defined(FLATBUFFERS_LOCALE_INDEPENDENT) && \
(FLATBUFFERS_LOCALE_INDEPENDENT > 0)
// clang-format off
// Allocate locale instance at startup of application.
ClassicLocale ClassicLocale::instance_;
#ifdef _MSC_VER
ClassicLocale::ClassicLocale()
: locale_(_create_locale(LC_ALL, "C")) {}
ClassicLocale::~ClassicLocale() { _free_locale(locale_); }
#else
ClassicLocale::ClassicLocale()
: locale_(newlocale(LC_ALL, "C", nullptr)) {}
ClassicLocale::~ClassicLocale() { freelocale(locale_); }
#endif
// clang-format on
#endif // !FLATBUFFERS_LOCALE_INDEPENDENT
std::string RemoveStringQuotes(const std::string &s) {
auto ch = *s.c_str();
return ((s.size() >= 2) && (ch == '\"' || ch == '\'') &&
(ch == string_back(s)))
? s.substr(1, s.length() - 2)
: s;
}
bool SetGlobalTestLocale(const char *locale_name, std::string *_value) {
const auto the_locale = setlocale(LC_ALL, locale_name);
if (!the_locale) return false;
if (_value) *_value = std::string(the_locale);
return true;
}
bool ReadEnvironmentVariable(const char *var_name, std::string *_value) {
#ifdef _MSC_VER
__pragma(warning(disable : 4996)); // _CRT_SECURE_NO_WARNINGS
#endif
auto env_str = std::getenv(var_name);
if (!env_str) return false;
if (_value) *_value = std::string(env_str);
return true;
}
void SetupDefaultCRTReportMode() {
// clang-format off
#ifdef _MSC_VER
// By default, send all reports to STDOUT to prevent CI hangs.
// Enable assert report box [Abort|Retry|Ignore] if a debugger is present.
const int dbg_mode = (_CRTDBG_MODE_FILE | _CRTDBG_MODE_DEBUG) |
(IsDebuggerPresent() ? _CRTDBG_MODE_WNDW : 0);
(void)dbg_mode; // release mode fix
// CrtDebug reports to _CRT_WARN channel.
_CrtSetReportMode(_CRT_WARN, dbg_mode);
_CrtSetReportFile(_CRT_WARN, _CRTDBG_FILE_STDOUT);
// The assert from <assert.h> reports to _CRT_ERROR channel
_CrtSetReportMode(_CRT_ERROR, dbg_mode);
_CrtSetReportFile(_CRT_ERROR, _CRTDBG_FILE_STDOUT);
// Internal CRT assert channel?
_CrtSetReportMode(_CRT_ASSERT, dbg_mode);
_CrtSetReportFile(_CRT_ASSERT, _CRTDBG_FILE_STDOUT);
#endif
// clang-format on
}
} // namespace flatbuffers
| apache-2.0 |
bazelbuild/bazel | src/main/java/com/google/devtools/build/lib/analysis/EventHandlingErrorReporter.java | 4140 | // Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.analysis;
import com.google.devtools.build.lib.analysis.config.BuildConfigurationValue;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.packages.Attribute;
import net.starlark.java.syntax.Location;
/**
* Base class for implementations of {@link
* com.google.devtools.build.lib.analysis.RuleErrorConsumer}.
*
* <p>Do not create new implementations of this class - instead, use {@link RuleContext} in Native
* rule definitions, and {@link StarlarkErrorReporter} in Starlark API definitions. For use in
* testing, implement {@link RuleErrorConsumer} instead.
*/
public abstract class EventHandlingErrorReporter implements RuleErrorConsumer {
private final String ruleClassNameForLogging;
private final AnalysisEnvironment env;
protected EventHandlingErrorReporter(String ruleClassNameForLogging, AnalysisEnvironment env) {
this.ruleClassNameForLogging = ruleClassNameForLogging;
this.env = env;
}
private void reportError(Location location, String message) {
// TODO(ulfjack): Consider generating the error message from the root cause event rather than
// the other way round.
if (!hasErrors()) {
// We must not report duplicate events, so we only report the first one for now.
env.getEventHandler()
.post(new AnalysisRootCauseEvent(getConfiguration(), getLabel(), message));
}
env.getEventHandler().handle(Event.error(location, message));
}
@Override
public void ruleError(String message) {
reportError(getRuleLocation(), prefixRuleMessage(message));
}
@Override
public void attributeError(String attrName, String message) {
reportError(getRuleLocation(), completeAttributeMessage(attrName, message));
}
@Override
public boolean hasErrors() {
return env.hasErrors();
}
public void reportWarning(Location location, String message) {
env.getEventHandler().handle(Event.warn(location, message));
}
@Override
public void ruleWarning(String message) {
env.getEventHandler().handle(Event.warn(getRuleLocation(), prefixRuleMessage(message)));
}
@Override
public void attributeWarning(String attrName, String message) {
reportWarning(getRuleLocation(), completeAttributeMessage(attrName, message));
}
private String prefixRuleMessage(String message) {
return String.format("in %s rule %s: %s", ruleClassNameForLogging, getLabel(), message);
}
private String maskInternalAttributeNames(String name) {
return Attribute.isImplicit(name) ? "(an implicit dependency)" : name;
}
/**
* Prefixes the given message with details about the rule and appends details about the macro that
* created this rule, if applicable.
*/
private String completeAttributeMessage(String attrName, String message) {
// Appends a note to the given message if the offending rule was created by a macro.
return String.format(
"in %s attribute of %s rule %s: %s%s",
maskInternalAttributeNames(attrName),
ruleClassNameForLogging,
getLabel(),
message,
getMacroMessageAppendix(attrName));
}
/** Returns a string describing the macro that created this rule, or an empty string. */
protected abstract String getMacroMessageAppendix(String attrName);
protected abstract Label getLabel();
protected abstract BuildConfigurationValue getConfiguration();
protected abstract Location getRuleLocation();
}
| apache-2.0 |
mani-monaj/ros_buildfarm | scripts/doc/run_doc_reconfigure_job.py | 1657 | #!/usr/bin/env python3
import argparse
import copy
import sys
from ros_buildfarm.argument import add_argument_build_name
from ros_buildfarm.argument import add_argument_config_url
from ros_buildfarm.argument import \
add_argument_distribution_repository_key_files
from ros_buildfarm.argument import add_argument_distribution_repository_urls
from ros_buildfarm.argument import add_argument_dockerfile_dir
from ros_buildfarm.argument import add_argument_groovy_script
from ros_buildfarm.argument import add_argument_rosdistro_name
from ros_buildfarm.common import get_distribution_repository_keys
from ros_buildfarm.common import get_user_id
from ros_buildfarm.templates import create_dockerfile
def main(argv=sys.argv[1:]):
parser = argparse.ArgumentParser(
description="Run the 'doc' job")
add_argument_config_url(parser)
add_argument_rosdistro_name(parser)
add_argument_build_name(parser, 'doc')
add_argument_distribution_repository_urls(parser)
add_argument_distribution_repository_key_files(parser)
add_argument_groovy_script(parser)
add_argument_dockerfile_dir(parser)
args = parser.parse_args(argv)
data = copy.deepcopy(args.__dict__)
data.update({
'distribution_repository_urls': args.distribution_repository_urls,
'distribution_repository_keys': get_distribution_repository_keys(
args.distribution_repository_urls,
args.distribution_repository_key_files),
'uid': get_user_id(),
})
create_dockerfile(
'doc/doc_create_reconfigure_task.Dockerfile.em',
data, args.dockerfile_dir)
if __name__ == '__main__':
main()
| apache-2.0 |
AleksNeStu/ggrc-core | src/ggrc/assets/javascripts/components/questions-link/questions-link.js | 885 | /*!
Copyright (C) 2017 Google Inc.
Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
*/
(function (can, GGRCQ) {
'use strict';
GGRC.Components('questionsLink', {
tag: 'questions-link',
template: can.view(
GGRC.mustache_path +
'/components/questions-link/questions-link.mustache'
),
viewModel: {
define: {
hasQuestions: {
type: Boolean,
get: function () {
var instance = this.attr('instance');
return GGRCQ.hasQuestions(instance.class.title_singular);
}
},
questionsUrl: {
type: String,
get: function () {
var instance = this.attr('instance');
return GGRCQ.getQuestionsUrl(instance);
}
}
},
instance: null
}
});
})(window.can, window.GGRC.Utils.GGRCQ);
| apache-2.0 |
IllusionRom-deprecated/android_platform_tools_idea | plugins/git4idea/src/git4idea/history/wholeTree/GitLogAssembler.java | 3190 | /*
* Copyright 2000-2010 JetBrains s.r.o.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.history.wholeTree;
import com.intellij.openapi.application.ModalityState;
import com.intellij.openapi.progress.BackgroundTaskQueue;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.vfs.VirtualFile;
import javax.swing.*;
import java.util.List;
/**
* @author irengrig
*/
public class GitLogAssembler implements GitLog {
private final Project myProject;
private final boolean myProjectScope;
private GitLogUI myGitLogUI;
private MediatorImpl myMediator;
private DetailsLoaderImpl myDetailsLoader;
private DetailsCache myDetailsCache;
private LoadController myLoadController;
private BigTableTableModel myTableModel;
private boolean myInitialized;
//@CalledInAwt
public GitLogAssembler(final Project project, boolean projectScope, final GitCommitsSequentially gitCommitsSequentially) {
myProject = project;
myProjectScope = projectScope;
myMediator = new MediatorImpl(myProject, gitCommitsSequentially);
myGitLogUI = new GitLogUI(myProject, myMediator);
myTableModel = myGitLogUI.getTableModel();
final BackgroundTaskQueue queue = new BackgroundTaskQueue(project, "Git log details");
myDetailsLoader = new DetailsLoaderImpl(myProject, queue);
myDetailsCache = new DetailsCache(myProject, myGitLogUI.getUIRefresh(), myDetailsLoader, queue);
myDetailsLoader.setDetailsCache(myDetailsCache);
myGitLogUI.setDetailsCache(myDetailsCache);
myGitLogUI.createMe();
myGitLogUI.setProjectScope(projectScope);
// modality state?
myLoadController = new LoadController(myProject, myMediator, myDetailsCache, gitCommitsSequentially);
myMediator.setLoader(myLoadController);
myMediator.setTableModel(myTableModel);
myMediator.setUIRefresh(myGitLogUI.getRefreshObject());
myMediator.setDetailsLoader(myDetailsLoader);
myTableModel.setCache(myDetailsCache);
Disposer.register(this, myGitLogUI);
}
@Override
public JComponent getVisualComponent() {
return myGitLogUI.getPanel();
}
@Override
public void setModalityState(ModalityState state) {
myDetailsCache.setModalityState(state);
myDetailsLoader.setModalityState(state);
}
@Override
public void selectCommit(String commitId) {
myGitLogUI.selectCommit(commitId);
}
@Override
public void rootsChanged(List<VirtualFile> roots) {
myGitLogUI.rootsChanged(roots);
if (myProjectScope && ! myInitialized) {
myInitialized = true;
myGitLogUI.initFromSettings();
}
}
@Override
public void dispose() {
}
}
| apache-2.0 |
bryanl/doit | vendor/github.com/bryanl/godomock/account.go | 193 | package godomock
import "github.com/digitalocean/godo"
// AccountService is the godo AccountService interface.
type AccountService interface {
Get() (*godo.Account, *godo.Response, error)
}
| apache-2.0 |
RachelTucker/ds3_net_sdk | Ds3/ResponseParsers/ListMultiPartUploadPartsResponseParser.cs | 1739 | /*
* ******************************************************************************
* Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
// This code is auto-generated, do not modify
using Ds3.Calls;
using Ds3.Models;
using Ds3.Runtime;
using System.Linq;
using System.Net;
using System.Xml.Linq;
namespace Ds3.ResponseParsers
{
internal class ListMultiPartUploadPartsResponseParser : IResponseParser<ListMultiPartUploadPartsRequest, ListMultiPartUploadPartsResponse>
{
public ListMultiPartUploadPartsResponse Parse(ListMultiPartUploadPartsRequest request, IWebResponse response)
{
using (response)
{
ResponseParseUtilities.HandleStatusCode(response, (HttpStatusCode)200);
using (var stream = response.GetResponseStream())
{
return new ListMultiPartUploadPartsResponse(
ModelParsers.ParseListPartsResult(
XmlExtensions.ReadDocument(stream).ElementOrThrow("ListPartsResult"))
);
}
}
}
}
} | apache-2.0 |
ricardoquesada/c64-the-muni-race | tools/convert.py | 1323 | import os
with open("mainscreen.vchar64proj", "rb") as orig:
with open("mainscreen2.vchar64proj", "wb") as new:
# read until data
# char id[5]; // must be VChar
# char version; // must be 2
# char colors[4]; // BGR, MC1, MC2, RAM.
# char vic_res; // 0 = Hi Resolution, 1 = Multicolour.
#
# quint16 num_chars; // 16-bits, Number of chars - 1 (low, high).
#
# quint8 tile_width; // between 1-8
# quint8 tile_height; // between 1-8
# quint8 char_interleaved; // between 1-128
#
# // until here, it shares same structure as version 1
#
# char color_mode; // 0 = Global, 1 = Per Tile
#
# quint16 map_width; // 16-bit Map width (low, high).
# quint16 map_height; // 16-bit Map height (low, high).
#
# char reserved[11]; // Must be 32 bytes in total
buf = orig.read(32 + 8 * 256 + 256)
new.write(buf)
invert = orig.read(40*14)
invert2 = bytearray(i for i in invert)
for i in range(len(invert2)):
invert2[i] = invert2[i] | 0x80 if invert2[i] < 0x80 else invert2[i] & 0x7f
new.write(invert2)
buf = orig.read(40*11)
new.write(buf)
| apache-2.0 |
alexryndin/ambari | ambari-server/src/main/java/org/apache/ambari/server/api/services/ActionService.java | 4745 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.api.services;
import org.apache.ambari.server.api.resources.ResourceInstance;
import org.apache.ambari.server.controller.spi.Resource;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriInfo;
import java.util.Collections;
/**
* Service responsible for action definition resource requests.
*/
@Path("/actions/")
public class ActionService extends BaseService {
/**
* Handles: GET /actions/{actionName}
* Get a specific action definition.
*
* @param headers http headers
* @param ui uri info
* @param actionName action name
* @return action definition instance representation
*/
@GET
@Path("{actionName}")
@Produces("text/plain")
public Response getActionDefinition(String body, @Context HttpHeaders headers, @Context UriInfo ui,
@PathParam("actionName") String actionName) {
return handleRequest(headers, body, ui, Request.Type.GET, createActionDefinitionResource(actionName));
}
/**
* Handles: GET /actions
* Get all action definitions.
*
* @param headers http headers
* @param ui uri info
* @return action definition collection resource representation
*/
@GET
@Produces("text/plain")
public Response getActionDefinitions(String body, @Context HttpHeaders headers, @Context UriInfo ui) {
return handleRequest(headers, body, ui, Request.Type.GET, createActionDefinitionResource(null));
}
/**
* Handles: POST /actions/{actionName}
* Create a specific action definition.
*
* @param headers http headers
* @param ui uri info
* @param actionName action name
* @return information regarding the action definition being created
*/
@POST
@Path("{actionName}")
@Produces("text/plain")
public Response createActionDefinition(String body, @Context HttpHeaders headers, @Context UriInfo ui,
@PathParam("actionName") String actionName) {
return handleRequest(headers, body, ui, Request.Type.POST, createActionDefinitionResource(actionName));
}
/**
* Handles: PUT /actions/{actionName}
* Update a specific action definition.
*
* @param headers http headers
* @param ui uri info
* @param actionName action name
* @return information regarding the updated action
*/
@PUT
@Path("{actionName}")
@Produces("text/plain")
public Response updateActionDefinition(String body, @Context HttpHeaders headers, @Context UriInfo ui,
@PathParam("actionName") String actionName) {
return handleRequest(headers, body, ui, Request.Type.PUT, createActionDefinitionResource(actionName));
}
/**
* Handles: DELETE /actions/{actionName}
* Delete a specific action definition.
*
* @param headers http headers
* @param ui uri info
* @param actionName action name
* @return information regarding the deleted action definition
*/
@DELETE
@Path("{actionName}")
@Produces("text/plain")
public Response deleteActionDefinition(@Context HttpHeaders headers, @Context UriInfo ui,
@PathParam("actionName") String actionName) {
return handleRequest(headers, null, ui, Request.Type.DELETE, createActionDefinitionResource(actionName));
}
/**
* Create a action definition resource instance.
*
* @param actionName action name
*
* @return a action definition resource instance
*/
ResourceInstance createActionDefinitionResource(String actionName) {
return createResource(Resource.Type.Action,
Collections.singletonMap(Resource.Type.Action, actionName));
}
}
| apache-2.0 |
dump247/aws-sdk-java | aws-java-sdk-s3/src/main/java/com/amazonaws/services/s3/internal/crypto/EncryptionUtils.java | 47937 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.s3.internal.crypto;
import static com.amazonaws.util.LengthCheckInputStream.EXCLUDE_SKIPPED_BYTES;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import java.security.Provider;
import java.security.SecureRandom;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import javax.crypto.Cipher;
import javax.crypto.KeyGenerator;
import javax.crypto.SecretKey;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.SecretKeySpec;
import com.amazonaws.AmazonClientException;
import com.amazonaws.services.s3.Headers;
import com.amazonaws.services.s3.internal.InputSubstream;
import com.amazonaws.services.s3.internal.Mimetypes;
import com.amazonaws.services.s3.internal.RepeatableCipherInputStream;
import com.amazonaws.services.s3.internal.RepeatableFileInputStream;
import com.amazonaws.services.s3.model.DeleteObjectRequest;
import com.amazonaws.services.s3.model.EncryptionMaterials;
import com.amazonaws.services.s3.model.EncryptionMaterialsAccessor;
import com.amazonaws.services.s3.model.EncryptionMaterialsProvider;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest;
import com.amazonaws.services.s3.model.ObjectMetadata;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectId;
import com.amazonaws.services.s3.model.S3ObjectInputStream;
import com.amazonaws.services.s3.model.StaticEncryptionMaterialsProvider;
import com.amazonaws.services.s3.model.UploadPartRequest;
import com.amazonaws.util.Base64;
import com.amazonaws.util.LengthCheckInputStream;
import com.amazonaws.util.StringUtils;
import com.amazonaws.util.json.JSONException;
import com.amazonaws.util.json.JSONObject;
/**
* This internal class is no longer used, and will be removed in the future.
*
* The EncryptionUtils class encrypts and decrypts data stored in S3. It can be used to prepare
* requests for encryption before they are stored in S3 and to decrypt objects that are retrieved from S3.
*/
@Deprecated
public class EncryptionUtils {
/** Suffix appended to the end of instruction file names */
@Deprecated
static final String INSTRUCTION_SUFFIX = ".instruction";
/**
* Returns an updated request where the metadata contains encryption information and the input stream contains
* the encrypted object contents. The specified encryption materials will be used to encrypt and decrypt data.
*
* @param request
* The request whose contents are to be encrypted.
* @param materials
* The encryption materials to be used to encrypt and decrypt data.
* @param cryptoProvider
* The crypto provider whose encryption implementation will be used to encrypt data
* @return
* The updated request where the metadata is set up for encryption and input stream contains
* the encrypted contents.
*
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static PutObjectRequest encryptRequestUsingMetadata(PutObjectRequest request, EncryptionMaterials materials, Provider cryptoProvider) {
// Create instruction
EncryptionInstruction instruction = EncryptionUtils.generateInstruction(materials, cryptoProvider);
// Encrypt the object data with the instruction
PutObjectRequest encryptedObjectRequest = EncryptionUtils.encryptRequestUsingInstruction(request, instruction);
// Update the metadata
EncryptionUtils.updateMetadataWithEncryptionInstruction( request, instruction );
return encryptedObjectRequest;
}
/**
* Returns an updated object where the object content input stream contains the decrypted contents.
*
* @param object
* The object whose contents are to be decrypted.
* @param materials
* The encryption materials to be used to encrypt and decrypt data.
* @param cryptoProvider
* The crypto provider whose encryption implementation will be used to decrypt data
* @return
* The updated object where the object content input stream contains the decrypted contents.
*
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static S3Object decryptObjectUsingMetadata(S3Object object, EncryptionMaterials materials, Provider cryptoProvider) {
// Create an instruction object from the object headers
EncryptionInstruction instruction = EncryptionUtils.buildInstructionFromObjectMetadata( object, materials, cryptoProvider );
// Decrypt the object file with the instruction
return EncryptionUtils.decryptObjectUsingInstruction(object, instruction);
}
/**
* Generates an instruction that will be used to encrypt an object.
*
* @param materials
* The encryption materials to be used to encrypt and decrypt data.
* @param cryptoProvider
* The crypto provider whose encryption implementation will be used to encrypt and decrypt data.
* @return
* The instruction that will be used to encrypt an object.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static EncryptionInstruction generateInstruction(EncryptionMaterials materials, Provider cryptoProvider) {
return generateInstruction(new StaticEncryptionMaterialsProvider(materials), cryptoProvider);
}
/**
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static EncryptionInstruction generateInstruction(EncryptionMaterialsProvider materialsProvider,
Provider cryptoProvider) {
return buildInstruction(materialsProvider.getEncryptionMaterials(), cryptoProvider);
}
/**
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static EncryptionInstruction generateInstruction(EncryptionMaterialsProvider materialsProvider,
Map<String, String> materialsDescription, Provider cryptoProvider) {
return buildInstruction(materialsProvider.getEncryptionMaterials(materialsDescription), cryptoProvider);
}
/**
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static EncryptionInstruction buildInstruction(EncryptionMaterials materials, Provider cryptoProvider) {
// Generate a one-time use symmetric key and initialize a cipher to
// encrypt object data
SecretKey envelopeSymmetricKey = generateOneTimeUseSymmetricKey();
CipherFactory cipherFactory = new CipherFactory(envelopeSymmetricKey, Cipher.ENCRYPT_MODE, null, cryptoProvider);
// Encrypt the envelope symmetric key
byte[] encryptedEnvelopeSymmetricKey = getEncryptedSymmetricKey(envelopeSymmetricKey, materials, cryptoProvider);
// Return a new instruction with the appropriate fields.
return new EncryptionInstruction(materials.getMaterialsDescription(), encryptedEnvelopeSymmetricKey,
envelopeSymmetricKey, cipherFactory);
}
/**
* Builds an instruction object from the contents of an instruction file.
*
* @param instructionFile
* A non-null instruction file retrieved from S3 that contains encryption information
* @param materials
* The non-null encryption materials to be used to encrypt and decrypt data.
* @param cryptoProvider
* The crypto provider whose encryption implementation will be used to encrypt and decrypt data. Null is ok and uses the
* preferred provider from Security.getProviders().
* @return
* A non-null instruction object containing encryption information
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static EncryptionInstruction buildInstructionFromInstructionFile(S3Object instructionFile, EncryptionMaterials materials, Provider cryptoProvider) {
return buildInstructionFromInstructionFile(instructionFile, new StaticEncryptionMaterialsProvider(materials), cryptoProvider);
}
/**
* Builds an instruction object from the contents of an instruction file.
*
* @param instructionFile
* A non-null instruction file retrieved from S3 that contains encryption information
* @param materialsProvider
* The non-null encryption materials provider to be used to encrypt and decrypt data.
* @param cryptoProvider
* The crypto provider whose encryption implementation will be used to encrypt and decrypt data. Null is ok and uses the
* preferred provider from Security.getProviders().
* @return
* A non-null instruction object containing encryption information
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static EncryptionInstruction buildInstructionFromInstructionFile(S3Object instructionFile, EncryptionMaterialsProvider materialsProvider, Provider cryptoProvider) {
JSONObject instructionJSON = parseJSONInstruction(instructionFile);
try {
// Get fields from instruction object
String encryptedSymmetricKeyB64 = instructionJSON.getString(Headers.CRYPTO_KEY);
String ivB64 = instructionJSON.getString(Headers.CRYPTO_IV);
String materialsDescriptionString = instructionJSON.tryGetString(Headers.MATERIALS_DESCRIPTION);
Map<String, String> materialsDescription = convertJSONToMap(materialsDescriptionString);
// Decode from Base 64 to standard binary bytes
byte[] encryptedSymmetricKey = Base64.decode(encryptedSymmetricKeyB64);
byte[] iv = Base64.decode(ivB64);
if (encryptedSymmetricKey == null || iv == null) {
// If necessary encryption info was not found in the instruction file, throw an exception.
throw new AmazonClientException(
String.format("Necessary encryption info not found in the instruction file '%s' in bucket '%s'",
instructionFile.getKey(), instructionFile.getBucketName()));
}
EncryptionMaterials materials = retrieveOriginalMaterials(materialsDescription, materialsProvider);
// If we're unable to retrieve the original encryption materials, we can't decrypt the object, so
// throw an exception.
if (materials == null) {
throw new AmazonClientException(
String.format("Unable to retrieve the encryption materials that originally " +
"encrypted object corresponding to instruction file '%s' in bucket '%s'.",
instructionFile.getKey(), instructionFile.getBucketName()));
}
// Decrypt the symmetric key and create the symmetric cipher
SecretKey symmetricKey = getDecryptedSymmetricKey(encryptedSymmetricKey, materials, cryptoProvider);
CipherFactory cipherFactory = new CipherFactory(symmetricKey, Cipher.DECRYPT_MODE, iv, cryptoProvider);
return new EncryptionInstruction(materialsDescription, encryptedSymmetricKey, symmetricKey, cipherFactory);
} catch (JSONException e) {
throw new AmazonClientException("Unable to parse retrieved instruction file : " + e.getMessage());
}
}
/**
* Builds an instruction object from the object metadata.
*
* @param object
* A non-null object that contains encryption information in its headers
* @param materials
* The non-null encryption materials to be used to encrypt and decrypt data.
* @param cryptoProvider
* The crypto provider whose encryption implementation will be used to encrypt and decrypt data. Null is ok and uses the
* preferred provider from Security.getProviders().
* @return
* A non-null instruction object containing encryption information
*
* @throws AmazonClientException
* if encryption information is missing in the metadata, or the encryption
* materials used to encrypt the object are not available via the materials Accessor
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static EncryptionInstruction buildInstructionFromObjectMetadata(S3Object object, EncryptionMaterials materials, Provider cryptoProvider) {
return buildInstructionFromObjectMetadata(object, new StaticEncryptionMaterialsProvider(materials), cryptoProvider);
}
/**
* Builds an instruction object from the object metadata.
*
* @param object
* A non-null object that contains encryption information in its headers
* @param materialsProvider
* The non-null encryption materials provider to be used to encrypt and decrypt data.
* @param cryptoProvider
* The crypto provider whose encryption implementation will be used to encrypt and decrypt data. Null is ok and uses the
* preferred provider from Security.getProviders().
* @return
* A non-null instruction object containing encryption information
*
* @throws AmazonClientException
* if encryption information is missing in the metadata, or the encryption
* materials used to encrypt the object are not available via the materials Accessor
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static EncryptionInstruction buildInstructionFromObjectMetadata(S3Object object, EncryptionMaterialsProvider materialsProvider, Provider cryptoProvider) {
ObjectMetadata metadata = object.getObjectMetadata();
// Get encryption info from metadata.
byte[] encryptedSymmetricKeyBytes = getCryptoBytesFromMetadata(Headers.CRYPTO_KEY, metadata);
byte[] initVectorBytes = getCryptoBytesFromMetadata(Headers.CRYPTO_IV, metadata);
String materialsDescriptionString = getStringFromMetadata(Headers.MATERIALS_DESCRIPTION, metadata);
Map<String, String> materialsDescription = convertJSONToMap(materialsDescriptionString);
if (encryptedSymmetricKeyBytes == null || initVectorBytes == null) {
// If necessary encryption info was not found in the instruction file, throw an exception.
throw new AmazonClientException(
String.format("Necessary encryption info not found in the headers of file '%s' in bucket '%s'",
object.getKey(), object.getBucketName()));
}
EncryptionMaterials materials = retrieveOriginalMaterials(materialsDescription, materialsProvider);
// If we're unable to retrieve the original encryption materials, we can't decrypt the object, so
// throw an exception.
if (materials == null) {
throw new AmazonClientException(
String.format("Unable to retrieve the encryption materials that originally " +
"encrypted file '%s' in bucket '%s'.",
object.getKey(), object.getBucketName()));
}
// Decrypt the symmetric key and create the symmetric cipher
SecretKey symmetricKey = getDecryptedSymmetricKey(encryptedSymmetricKeyBytes, materials, cryptoProvider);
CipherFactory cipherFactory = new CipherFactory(symmetricKey, Cipher.DECRYPT_MODE, initVectorBytes, cryptoProvider);
return new EncryptionInstruction(materialsDescription, encryptedSymmetricKeyBytes, symmetricKey, cipherFactory);
}
/**
* Returns an updated request where the input stream contains the encrypted object contents.
* The specified instruction will be used to encrypt data.
*
* @param request
* The request whose contents are to be encrypted.
* @param instruction
* The instruction that will be used to encrypt the object data.
* @return
* The updated request where the input stream contains the encrypted contents.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static PutObjectRequest encryptRequestUsingInstruction(PutObjectRequest request, EncryptionInstruction instruction) {
// Create a new metadata object if there is no metadata already.
ObjectMetadata metadata = request.getMetadata();
if (metadata == null) {
metadata = new ObjectMetadata();
}
// Record the original Content MD5, if present, for the unencrypted data
if (metadata.getContentMD5() != null) {
metadata.addUserMetadata(Headers.UNENCRYPTED_CONTENT_MD5, metadata.getContentMD5());
}
// Removes the original content MD5 if present from the meta data.
metadata.setContentMD5(null);
// Record the original, unencrypted content-length so it can be accessed later
final long plaintextLength = getUnencryptedContentLength(request, metadata);
if (plaintextLength >= 0) {
metadata.addUserMetadata(Headers.UNENCRYPTED_CONTENT_LENGTH,
Long.toString(plaintextLength));
}
// Put the calculated length of the encrypted contents in the metadata
long cryptoContentLength = calculateCryptoContentLength(instruction.getSymmetricCipher(), request, metadata);
if (cryptoContentLength >= 0) {
metadata.setContentLength(cryptoContentLength);
}
request.setMetadata(metadata);
// Create encrypted input stream
request.setInputStream(getEncryptedInputStream(request, instruction.getCipherFactory(), plaintextLength));
// Treat all encryption requests as input stream upload requests, not as file upload requests.
request.setFile(null);
return request;
}
/**
* Returns an updated object where the object content input stream contains the decrypted contents.
*
* @param object
* The object whose contents are to be decrypted.
* @param instruction
* The instruction that will be used to decrypt the object data.
* @return
* The updated object where the object content input stream contains the decrypted contents.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static S3Object decryptObjectUsingInstruction(S3Object object, EncryptionInstruction instruction) {
S3ObjectInputStream objectContent = object.getObjectContent();
InputStream decryptedInputStream = new RepeatableCipherInputStream(objectContent, instruction.getCipherFactory());
object.setObjectContent(new S3ObjectInputStream(decryptedInputStream, objectContent.getHttpRequest()));
return object;
}
/**
* Creates a put request to store the specified instruction object in S3.
*
* @param request
* The put request for the original object to be stored in S3.
* @param instruction
* The instruction object to be stored in S3.
* @return
* A put request to store the specified instruction object in S3.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static PutObjectRequest createInstructionPutRequest(PutObjectRequest request, EncryptionInstruction instruction) {
JSONObject instructionJSON = convertInstructionToJSONObject(instruction);
byte[] instructionBytes = instructionJSON.toString().getBytes(StringUtils.UTF8);
InputStream instructionInputStream = new ByteArrayInputStream(instructionBytes);
ObjectMetadata metadata = request.getMetadata();
// Set the content-length of the upload
metadata.setContentLength(instructionBytes.length);
// Set the crypto instruction file header
metadata.addUserMetadata(Headers.CRYPTO_INSTRUCTION_FILE, "");
// Update the instruction request
request.setKey(request.getKey() + INSTRUCTION_SUFFIX);
request.setMetadata(metadata);
request.setInputStream(instructionInputStream);
return request;
}
/**
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static PutObjectRequest createInstructionPutRequest(String bucketName, String key, EncryptionInstruction instruction) {
JSONObject instructionJSON = convertInstructionToJSONObject(instruction);
byte[] instructionBytes = instructionJSON.toString().getBytes(StringUtils.UTF8);
InputStream instructionInputStream = new ByteArrayInputStream(instructionBytes);
ObjectMetadata metadata = new ObjectMetadata();
metadata.setContentLength(instructionBytes.length);
metadata.addUserMetadata(Headers.CRYPTO_INSTRUCTION_FILE, "");
return new PutObjectRequest(bucketName, key + INSTRUCTION_SUFFIX, instructionInputStream, metadata);
}
/**
* Creates a get object request for an instruction file using
* the default instruction file suffix.
*
* @param id
* an S3 object id (not the instruction file id)
* @return
* A get request to retrieve an instruction file from S3.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static GetObjectRequest createInstructionGetRequest(S3ObjectId id) {
return createInstructionGetRequest(id, null);
}
/**
* Creates and return a get object request for an instruction file.
*
* @param s3objectId
* an S3 object id (not the instruction file id)
* @param instFileSuffix
* suffix of the specific instruction file to be used, or null if
* the default instruction file is to be used.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static GetObjectRequest createInstructionGetRequest(
S3ObjectId s3objectId, String instFileSuffix) {
return new GetObjectRequest(
s3objectId.instructionFileId(instFileSuffix));
}
/**
* Creates a delete request to delete an instruction file in S3.
*
* @param request
* The delete request for the original object to be deleted from S3.
* @return
* A delete request to delete an instruction file in S3.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static DeleteObjectRequest createInstructionDeleteObjectRequest(DeleteObjectRequest request) {
return new DeleteObjectRequest(request.getBucketName(), request.getKey() + INSTRUCTION_SUFFIX);
}
/**
* Returns true if the specified S3Object contains encryption info in its
* metadata, false otherwise.
*
* @param retrievedObject
* An S3Object
* @return
* True if the specified S3Object contains encryption info in its
* metadata, false otherwise.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static boolean isEncryptionInfoInMetadata(S3Object retrievedObject) {
Map<String, String> metadata = retrievedObject.getObjectMetadata().getUserMetadata();
return metadata != null
&& metadata.containsKey(Headers.CRYPTO_IV)
&& metadata.containsKey(Headers.CRYPTO_KEY);
}
/**
* Returns true if the specified S3Object is an instruction file containing
* encryption info, false otherwise.
*
* @param instructionFile
* An S3Object that may potentially be an instruction file
* @return
* True if the specified S3Object is an instruction file containing
* encryption info, false otherwise.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static boolean isEncryptionInfoInInstructionFile(S3Object instructionFile) {
if (instructionFile == null) {
return false;
}
Map<String, String> metadata = instructionFile.getObjectMetadata().getUserMetadata();
if (metadata == null) {
return false;
}
return metadata.containsKey(Headers.CRYPTO_INSTRUCTION_FILE);
}
/**
* Adjusts a user specified range to retrieve all of the cipher blocks (each of size 16 bytes) that
* contain the specified range.
*
* For Chained Block Cipher decryption to function properly, we need to retrieve the cipher block that precedes
* the range, all of the cipher blocks that contain the range, and the cipher block that follows the range.
*
* @param range
* A two-element array of longs corresponding to the start and finish (inclusive) of a desired
* range of bytes.
* @return
* A two-element array of longs corresponding to the start and finish of the cipher blocks to
* be retrieved. If the range is invalid, then return null.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static long[] getAdjustedCryptoRange(long[] range) {
// If range is invalid, then return null.
if (range == null || range[0] > range[1]) {
return null;
}
long[] adjustedCryptoRange = new long[2];
adjustedCryptoRange[0] = getCipherBlockLowerBound(range[0]);
adjustedCryptoRange[1] = getCipherBlockUpperBound(range[1]);
return adjustedCryptoRange;
}
/**
* Adjusts the retrieved S3Object so that the object contents contain only the range of bytes
* desired by the user. Since encrypted contents can only be retrieved in CIPHER_BLOCK_SIZE
* (16 bytes) chunks, the S3Object potentially contains more bytes than desired, so this method
* adjusts the contents range.
*
* @param object
* The S3Object retrieved from S3 that could possibly contain more bytes than desired
* by the user.
* @param range
* A two-element array of longs corresponding to the start and finish (inclusive) of a desired
* range of bytes.
* @return
* The S3Object with adjusted object contents containing only the range desired by the user.
* If the range specified is invalid, then the S3Object is returned without any modifications.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static S3Object adjustOutputToDesiredRange(S3Object object, long[] range) {
if (range == null || range[0] > range[1]) {
// Make no modifications if range is invalid.
return object;
} else {
try {
S3ObjectInputStream objectContent = object.getObjectContent();
InputStream adjustedRangeContents = new AdjustedRangeInputStream(objectContent, range[0], range[1]);
object.setObjectContent(new S3ObjectInputStream(adjustedRangeContents, objectContent.getHttpRequest()));
return object;
} catch (IOException e) {
throw new AmazonClientException("Error adjusting output to desired byte range: " + e.getMessage());
}
}
}
/**
* Generates a one-time use Symmetric Key on-the-fly for use in envelope encryption.
*/
public static SecretKey generateOneTimeUseSymmetricKey() {
KeyGenerator generator;
try {
generator = KeyGenerator.getInstance(JceEncryptionConstants.SYMMETRIC_KEY_ALGORITHM);
generator.init(JceEncryptionConstants.SYMMETRIC_KEY_LENGTH, new SecureRandom());
return generator.generateKey();
} catch (NoSuchAlgorithmException e) {
throw new AmazonClientException("Unable to generate envelope symmetric key:" + e.getMessage(), e);
}
}
/**
* Creates a symmetric cipher in the specified mode from the given symmetric key and IV. The given
* crypto provider will provide the encryption implementation. If the crypto provider is null, then
* the default JCE crypto provider will be used.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static Cipher createSymmetricCipher(SecretKey symmetricCryptoKey, int encryptMode, Provider cryptoProvider, byte[] initVector) {
try {
Cipher cipher;
if (cryptoProvider != null) {
cipher = Cipher.getInstance(JceEncryptionConstants.SYMMETRIC_CIPHER_METHOD, cryptoProvider);
} else {
cipher = Cipher.getInstance(JceEncryptionConstants.SYMMETRIC_CIPHER_METHOD);
}
if (initVector != null) {
cipher.init(encryptMode, symmetricCryptoKey, new IvParameterSpec(initVector));
} else {
cipher.init(encryptMode, symmetricCryptoKey);
}
return cipher;
} catch (Exception e) {
throw new AmazonClientException("Unable to build cipher: " + e.getMessage() +
"\nMake sure you have the JCE unlimited strength policy files installed and " +
"configured for your JVM: http://www.ngs.ac.uk/tools/jcepolicyfiles", e);
}
}
/**
* Encrypts a symmetric key using the provided encryption materials and returns
* it in raw byte array form.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static byte[] getEncryptedSymmetricKey(SecretKey toBeEncrypted, EncryptionMaterials materials, Provider cryptoProvider) {
Key keyToDoEncryption;
if (materials.getKeyPair() != null) {
// Do envelope encryption with public key from key pair
keyToDoEncryption = materials.getKeyPair().getPublic();
} else {
// Do envelope encryption with symmetric key
keyToDoEncryption= materials.getSymmetricKey();
}
try {
Cipher cipher;
byte[] toBeEncryptedBytes = toBeEncrypted.getEncoded();
if (cryptoProvider != null) {
cipher = Cipher.getInstance(keyToDoEncryption.getAlgorithm(), cryptoProvider);
} else {
cipher = Cipher.getInstance(keyToDoEncryption.getAlgorithm()); // Use default JCE Provider
}
cipher.init(Cipher.ENCRYPT_MODE, keyToDoEncryption);
return cipher.doFinal(toBeEncryptedBytes);
} catch (Exception e) {
throw new AmazonClientException("Unable to encrypt symmetric key: " + e.getMessage(), e);
}
}
/**
* Decrypts an encrypted symmetric key using the provided encryption materials and returns
* it as a SecretKey object.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static SecretKey getDecryptedSymmetricKey(byte[] encryptedSymmetricKeyBytes, EncryptionMaterials materials, Provider cryptoProvider) {
Key keyToDoDecryption;
if (materials.getKeyPair() != null) {
// Do envelope decryption with private key from key pair
keyToDoDecryption = materials.getKeyPair().getPrivate();
} else {
// Do envelope decryption with symmetric key
keyToDoDecryption = materials.getSymmetricKey();
}
try {
Cipher cipher;
if (cryptoProvider != null) {
cipher = Cipher.getInstance(keyToDoDecryption.getAlgorithm(), cryptoProvider);
} else {
cipher = Cipher.getInstance(keyToDoDecryption.getAlgorithm());
}
cipher.init(Cipher.DECRYPT_MODE, keyToDoDecryption);
byte[] decryptedSymmetricKeyBytes = cipher.doFinal(encryptedSymmetricKeyBytes);
return new SecretKeySpec(decryptedSymmetricKeyBytes, JceEncryptionConstants.SYMMETRIC_KEY_ALGORITHM);
} catch (Exception e) {
throw new AmazonClientException("Unable to decrypt symmetric key from object metadata : " + e.getMessage(), e);
}
}
/**
* @param plaintextLength
* the expected total number of bytes of the plaintext; or -1 if
* not available.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static InputStream getEncryptedInputStream(
PutObjectRequest request, CipherFactory cipherFactory,
long plaintextLength) {
try {
InputStream is = request.getInputStream();
if (request.getFile() != null) {
// Historically file takes precedence over the original input
// stream
is = new RepeatableFileInputStream(request.getFile());
}
if (plaintextLength > -1) {
// This ensures the plain-text read from the underlying data
// stream has the same length as the expected total
is = new LengthCheckInputStream(is, plaintextLength,
EXCLUDE_SKIPPED_BYTES);
}
return new RepeatableCipherInputStream(is, cipherFactory);
} catch (Exception e) {
throw new AmazonClientException("Unable to create cipher input stream: " + e.getMessage(), e);
}
}
/**
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static ByteRangeCapturingInputStream getEncryptedInputStream(UploadPartRequest request, CipherFactory cipherFactory) {
try {
InputStream originalInputStream = request.getInputStream();
if (request.getFile() != null) {
originalInputStream = new InputSubstream(new RepeatableFileInputStream(request.getFile()),
request.getFileOffset(), request.getPartSize(), request.isLastPart());
}
originalInputStream = new RepeatableCipherInputStream(originalInputStream, cipherFactory);
if (request.isLastPart() == false) {
// We want to prevent the final padding from being sent on the stream...
originalInputStream = new InputSubstream(originalInputStream, 0, request.getPartSize(), false);
}
long partSize = request.getPartSize();
int cipherBlockSize = cipherFactory.createCipher().getBlockSize();
return new ByteRangeCapturingInputStream(originalInputStream, partSize - cipherBlockSize, partSize);
} catch (Exception e) {
throw new AmazonClientException("Unable to create cipher input stream: " + e.getMessage(), e);
}
}
/**
* Retrieves the byte[] value of either the crypto key or crypto IV. If these fields are not found in
* the metadata, returns null.
*
* Note: The bytes are transported in Base64-encoding, so they are decoded before they are returned.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static byte[] getCryptoBytesFromMetadata(String headerName, ObjectMetadata metadata) throws NullPointerException {
Map<String, String> userMetadata = metadata.getUserMetadata();
if (userMetadata == null || !userMetadata.containsKey(headerName)) {
return null;
} else {
// Convert Base64 bytes to binary data.
return Base64.decode(userMetadata.get(headerName));
}
}
/**
* Retrieves the String value of the given header from the metadata. Returns null if the field is not
* found in the metadata.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static String getStringFromMetadata(String headerName, ObjectMetadata metadata) throws NullPointerException {
Map<String, String> userMetadata = metadata.getUserMetadata();
if (userMetadata == null || !userMetadata.containsKey(headerName)) {
return null;
} else {
return userMetadata.get(headerName);
}
}
/**
* Converts the JSON encoded materials description to a Map<String, String>
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
@SuppressWarnings("unchecked") // Suppresses Iterator<String> type warning
private static Map<String, String> convertJSONToMap(String descriptionJSONString) {
if (descriptionJSONString == null) {
return null;
}
try {
JSONObject descriptionJSON = new JSONObject(descriptionJSONString);
Iterator<String> keysIterator = descriptionJSON.keys();
Map<String, String> materialsDescription = new HashMap<String, String>();
while(keysIterator.hasNext()) {
String key = keysIterator.next();
materialsDescription.put(key, descriptionJSON.getString(key));
}
return materialsDescription;
} catch (JSONException e) {
throw new AmazonClientException("Unable to parse encryption materials description from metadata :" + e.getMessage());
}
}
/**
* Update the request's ObjectMetadata with the necessary information for decrypting the object
*
* @param request
* Non-null PUT request encrypted using the given instruction
* @param instruction
* Non-null instruction used to encrypt the data in this PUT request.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static void updateMetadataWithEncryptionInstruction(PutObjectRequest request, EncryptionInstruction instruction){
byte[] keyBytesToStoreInMetadata = instruction.getEncryptedSymmetricKey();
Cipher symmetricCipher = instruction.getSymmetricCipher();
Map<String, String> materialsDescription = instruction.getMaterialsDescription();
ObjectMetadata metadata = request.getMetadata();
if (metadata == null) metadata = new ObjectMetadata();
if (request.getFile() != null) {
Mimetypes mimetypes = Mimetypes.getInstance();
metadata.setContentType(mimetypes.getMimetype(request.getFile()));
}
updateMetadata(metadata, keyBytesToStoreInMetadata, symmetricCipher, materialsDescription);
request.setMetadata( metadata );
}
/**
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static void updateMetadata(ObjectMetadata metadata, byte[] keyBytesToStoreInMetadata, Cipher symmetricCipher, Map<String, String> materialsDescription) {
// If we generated a symmetric key to encrypt the data, store it in the object metadata.
if (keyBytesToStoreInMetadata != null) {
metadata.addUserMetadata(Headers.CRYPTO_KEY,
Base64.encodeAsString(keyBytesToStoreInMetadata));
}
// Put the cipher initialization vector (IV) into the object metadata
metadata.addUserMetadata(Headers.CRYPTO_IV,
Base64.encodeAsString(symmetricCipher.getIV()));
// Put the materials description into the object metadata as JSON
JSONObject descriptionJSON = new JSONObject(materialsDescription);
metadata.addUserMetadata(Headers.MATERIALS_DESCRIPTION, descriptionJSON.toString());
}
/**
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static ObjectMetadata updateMetadataWithEncryptionInfo(InitiateMultipartUploadRequest request, byte[] keyBytesToStoreInMetadata, Cipher symmetricCipher, Map<String, String> materialsDescription) {
ObjectMetadata metadata = request.getObjectMetadata();
if (metadata == null) metadata = new ObjectMetadata();
updateMetadata(metadata, keyBytesToStoreInMetadata, symmetricCipher, materialsDescription);
return metadata;
}
/**
* Retrieve the original materials corresponding to the specified materials description.
* Returns null if unable to retrieve the original materials.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static EncryptionMaterials retrieveOriginalMaterials(Map<String, String> materialsDescription, EncryptionMaterialsAccessor accessor) {
if (accessor == null)
return null;
return accessor.getEncryptionMaterials(materialsDescription);
}
/**
* Calculates the length of the encrypted file given the original plaintext
* file length and the cipher that will be used for encryption.
*
* @return
* The size of the encrypted file in bytes, or -1 if no content length
* has been set yet.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static long calculateCryptoContentLength(Cipher symmetricCipher, PutObjectRequest request, ObjectMetadata metadata) {
long plaintextLength = getUnencryptedContentLength(request, metadata);
// If we don't know the unencrypted size, then report -1
if (plaintextLength < 0) return -1;
long cipherBlockSize = symmetricCipher.getBlockSize();
long offset = cipherBlockSize - (plaintextLength % cipherBlockSize);
return plaintextLength + offset;
}
/**
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
public static long calculateCryptoContentLength(Cipher symmetricCipher, UploadPartRequest request) {
long plaintextLength;
if (request.getFile() != null) {
if (request.getPartSize() > 0) plaintextLength = request.getPartSize();
else plaintextLength = request.getFile().length();
} else if (request.getInputStream() != null) {
plaintextLength = request.getPartSize();
} else {
return -1;
}
long cipherBlockSize = symmetricCipher.getBlockSize();
long offset = cipherBlockSize - (plaintextLength % cipherBlockSize);
return plaintextLength + offset;
}
/**
* Returns the content length of the unencrypted data in a PutObjectRequest,
* or -1 if the original content-length isn't known.
*
* @param request
* The request to examine.
* @param metadata
* The metadata for the request.
*
* @return The content length of the unencrypted data in the request, or -1
* if it isn't known.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static long getUnencryptedContentLength(PutObjectRequest request, ObjectMetadata metadata) {
if (request.getFile() != null) {
return request.getFile().length();
} else if (request.getInputStream() != null
&& metadata.getRawMetadataValue(Headers.CONTENT_LENGTH) != null) {
return metadata.getContentLength();
}
return -1;
}
/**
* Returns a JSONObject representation of the instruction object.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static JSONObject convertInstructionToJSONObject(EncryptionInstruction instruction) {
JSONObject instructionJSON = new JSONObject();
try {
JSONObject materialsDescriptionJSON = new JSONObject(
instruction.getMaterialsDescription());
instructionJSON.put(Headers.MATERIALS_DESCRIPTION,
materialsDescriptionJSON.toString());
instructionJSON.put(Headers.CRYPTO_KEY,
Base64.encodeAsString(instruction.getEncryptedSymmetricKey()));
byte[] iv = instruction.getSymmetricCipher().getIV();
instructionJSON.put(Headers.CRYPTO_IV, Base64.encodeAsString(iv));
} catch (JSONException e) {} // Keys are never null, so JSONException will never be thrown.
return instructionJSON;
}
/**
* Parses instruction data retrieved from S3 and returns a JSONObject representing the instruction
*/
@Deprecated
private static JSONObject parseJSONInstruction(S3Object instructionObject) {
try {
String instructionString = convertStreamToString(instructionObject.getObjectContent());
return new JSONObject(instructionString);
} catch (Exception e) {
throw new AmazonClientException("Error parsing JSON instruction file: " + e.getMessage());
}
}
/**
* Converts the contents of an input stream to a String
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static String convertStreamToString(InputStream inputStream) throws IOException {
if (inputStream == null) {
return "";
}else {
StringBuilder stringBuilder = new StringBuilder();
String line;
try {
BufferedReader reader = new BufferedReader(new
InputStreamReader(inputStream, StringUtils.UTF8));
while ((line = reader.readLine()) != null) {
stringBuilder.append(line);
}
} finally {
inputStream.close();
}
return stringBuilder.toString();
}
}
/**
* Takes the position of the leftmost desired byte of a user specified range and returns the
* position of the start of the previous cipher block, or returns 0 if the leftmost byte is in
* the first cipher block.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static long getCipherBlockLowerBound(long leftmostBytePosition) {
long cipherBlockSize = JceEncryptionConstants.SYMMETRIC_CIPHER_BLOCK_SIZE;
long offset = leftmostBytePosition % cipherBlockSize;
long lowerBound = leftmostBytePosition - offset - cipherBlockSize;
if (lowerBound < 0) {
return 0;
} else {
return lowerBound;
}
}
/**
* Takes the position of the rightmost desired byte of a user specified range and returns the
* position of the end of the following cipher block.
* @deprecated no longer used and will be removed in the future
*/
@Deprecated
private static long getCipherBlockUpperBound(long rightmostBytePosition) {
long cipherBlockSize = JceEncryptionConstants.SYMMETRIC_CIPHER_BLOCK_SIZE;
long offset = cipherBlockSize - (rightmostBytePosition % cipherBlockSize);
return rightmostBytePosition + offset + cipherBlockSize;
}
}
| apache-2.0 |
radicalbit/ambari | contrib/views/hawq/src/main/resources/ui/tests/unit/utils/utils-test.js | 4058 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*jshint node:true*/
/* global sinon */
import Utils from 'hawq-view/utils/utils';
import {module, test} from 'qunit';
import ENV from 'hawq-view/config/environment';
module('Unit | Utility | utils');
test('#computeClientAddress: compute clientAddress', function (assert) {
assert.equal(Utils.computeClientAddress('host', '9999'), 'host:9999');
});
test('#computeClientAddress: computed host address', function (assert) {
assert.equal(Utils.computeClientAddress('host', 7777), 'host:7777', 'invalid computed client address');
});
test('#computeClientAddress: computed host address, -1 port', function (assert) {
assert.equal(Utils.computeClientAddress('host', -1), 'local', 'invalid computed client address');
});
test('#computeClientAddress: computed host address, Undefined/Null/Empty Host', function (assert) {
assert.equal(Utils.computeClientAddress(undefined, 7777), 'local', 'Invalid hostname: Undefined clientHost');
assert.equal(Utils.computeClientAddress(null, 7777), 'local', 'Invalid hostname: Null clientHost');
assert.equal(Utils.computeClientAddress('', 7777), 'local', 'Invalid hostname: Empty clientHost');
});
test('#formatDuration returns 00:00:00 for null or undefined duration', function (assert) {
assert.equal(Utils.formatDuration(null), "00:00:00");
assert.equal(Utils.formatDuration(), "00:00:00");
});
test('#formatDuration returns correct string for query running for seconds', function (assert) {
assert.equal(Utils.formatDuration(0), "00:00:00");
assert.equal(Utils.formatDuration(32), "00:00:32");
assert.equal(Utils.formatDuration(59), "00:00:59");
});
test('#formatDuration returns correct string for query running for minutes', function (assert) {
assert.equal(Utils.formatDuration(60), "00:01:00");
assert.equal(Utils.formatDuration(72), "00:01:12");
assert.equal(Utils.formatDuration(3599), "00:59:59");
});
test('#formatDuration returns correct string for query running for hours', function (assert) {
assert.equal(Utils.formatDuration(3600), "01:00:00");
assert.equal(Utils.formatDuration(7272), "02:01:12");
assert.equal(Utils.formatDuration(363599), "100:59:59");
});
test('#getNamespace returns namespace for testing', function (assert) {
assert.equal(Utils.getNamespace(), ENV.apiURL);
});
test('#getNamespace returns namespace for production', function (assert) {
var baseNamespace = '/views/HAWQ/1.0.0/HAWQView/';
var oldEnvironment = ENV.environment;
ENV.environment = 'production';
sinon.stub(Utils, 'getWindowPathname').returns(baseNamespace);
assert.equal(Utils.getNamespace(), '/api/v1/views/HAWQ/versions/1.0.0/instances/HAWQView');
Utils.getWindowPathname.restore();
ENV.environment = oldEnvironment;
});
test('#generateStatusString returns "Running" when waiting and waitingResource are both false', function (assert) {
assert.equal(Utils.generateStatusString(false, false), 'Running');
});
test('#generateStatusString returns "Waiting on Lock" when waiting is true', function (assert) {
assert.equal(Utils.generateStatusString(true, false), 'Waiting on Lock');
});
test('#generateStatusString returns "Queued" when waitingResource is true', function (assert) {
assert.equal(Utils.generateStatusString(true, true), 'Queued');
}); | apache-2.0 |
adit-chandra/tensorflow | tensorflow/compiler/mlir/lite/transforms/legalize_tf.cc | 17007 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// This transformation pass converts operations in TensorFlow dialect into
// operations that are legal in the TensorFlow Lite dialect. Operations that
// can be legalized to TensorFlow Lite dialect with simple replacements are part
// of this pass and other operations that may create extra ops should be part of
// the PrepareTF pass which should be run before this pass. That way any
// constant folding opportunities from the extra ops can be exploited by the
// constant folding support for the TensorFlow ops.
#include <climits>
#include <cstdint>
#include "llvm/ADT/APInt.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/StringSwitch.h"
#include "mlir/Dialect/QuantOps/FakeQuantSupport.h" // TF:local_config_mlir
#include "mlir/Dialect/QuantOps/UniformSupport.h" // TF:local_config_mlir
#include "mlir/IR/Attributes.h" // TF:local_config_mlir
#include "mlir/IR/Operation.h" // TF:local_config_mlir
#include "mlir/IR/PatternMatch.h" // TF:local_config_mlir
#include "mlir/IR/StandardTypes.h" // TF:local_config_mlir
#include "mlir/Pass/Pass.h" // TF:local_config_mlir
#include "mlir/Support/Functional.h" // TF:local_config_mlir
#include "mlir/Support/LLVM.h" // TF:local_config_mlir
#include "tensorflow/compiler/mlir/lite/ir/tfl_ops.h"
#include "tensorflow/compiler/mlir/lite/quantization/quantization_utils.h"
#include "tensorflow/compiler/mlir/lite/transforms/passes.h"
#include "tensorflow/compiler/mlir/lite/utils/attribute_utils.h"
#include "tensorflow/compiler/mlir/lite/utils/validators.h"
#include "tensorflow/compiler/mlir/tensorflow/ir/tf_ops.h"
namespace mlir {
namespace TFL {
//===----------------------------------------------------------------------===//
// The actual LegalizeTF Pass.
namespace {
// Legalize operations in functions.
struct LegalizeTF : public FunctionPass<LegalizeTF> {
void runOnFunction() override;
};
#include "tensorflow/compiler/mlir/lite/transforms/generated_legalize_tf.inc"
#define DECL_CONVERT_OP(tf_op) \
struct ConvertTF##tf_op##Op : public RewritePattern { \
explicit ConvertTF##tf_op##Op(MLIRContext* context) \
: RewritePattern(TF::tf_op##Op::getOperationName(), 1, context) {} \
PatternMatchResult matchAndRewrite( \
Operation* op, PatternRewriter& rewriter) const override; \
}
// TODO(antiagainst): Define this pattern in a table-driven manner once variadic
// operands are properly supported in declarative rewrite rule specification.
DECL_CONVERT_OP(Concat);
DECL_CONVERT_OP(ConcatV2);
DECL_CONVERT_OP(MatMul);
DECL_CONVERT_OP(MatrixDiagV2);
DECL_CONVERT_OP(MatrixDiagV3);
DECL_CONVERT_OP(Pack);
DECL_CONVERT_OP(Reshape);
DECL_CONVERT_OP(Split);
DECL_CONVERT_OP(SplitV);
DECL_CONVERT_OP(StridedSlice);
DECL_CONVERT_OP(Unpack);
#undef DECL_CONVERT_OP
PatternMatchResult ConvertTFConcatOp::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
auto tf_concat_op = cast<TF::ConcatOp>(op);
SmallVector<Value*, 4> values(tf_concat_op.values());
auto output_type = tf_concat_op.output()->getType();
// Extract axis attribute from constant concat_dims tensor
ElementsAttr axis;
if (!matchPattern(tf_concat_op.concat_dim(), m_Constant(&axis)))
return matchFailure();
StringAttr fused_activation_function =
StringAttr::get("NONE", rewriter.getContext());
rewriter.replaceOpWithNewOp<TFL::ConcatenationOp>(
op, output_type, values, mlir::TFL::ExtractSingleElementAsInteger(axis),
fused_activation_function);
return matchSuccess();
}
PatternMatchResult ConvertTFConcatV2Op::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
auto tf_concat_op = cast<TF::ConcatV2Op>(op);
SmallVector<Value*, 4> values(tf_concat_op.values());
auto output_type = tf_concat_op.output()->getType();
// Extract axis attribute from constant axis tensor
ElementsAttr axis;
if (!matchPattern(tf_concat_op.axis(), m_Constant(&axis)))
return matchFailure();
StringAttr fused_activation_function =
StringAttr::get("NONE", rewriter.getContext());
rewriter.replaceOpWithNewOp<ConcatenationOp>(
op, output_type, values, ExtractSingleElementAsInteger(axis),
fused_activation_function);
return matchSuccess();
}
// The following is effectively:
// def : Pat<
// (TF_MatMulOp $a, $b, ConstBoolAttrFalse:$transpose_a,
// ConstBoolAttrTrue:$transpose_b),
// (TFL_FullyConnectedOp:$__0 $a, $b,
// NoInput.pattern, TFL_AF_None, TFL_FCWO_Default, ConstBoolAttrFalse)>;
PatternMatchResult ConvertTFMatMulOp::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
auto tf_matmul_op = cast<TF::MatMulOp>(op);
if (tf_matmul_op.transpose_a()) return matchFailure();
if (!tf_matmul_op.transpose_b()) return matchFailure();
Type output_type = tf_matmul_op.getResult()->getType();
// TODO(jpienaar): Follow up post shuffle discussion.
auto no_input = rewriter.create<ConstantOp>(
op->getLoc(), rewriter.getNoneType(), rewriter.getUnitAttr());
auto fc_op = rewriter.create<FullyConnectedOp>(
op->getLoc(), ArrayRef<Type>{output_type}, op->getOperand(0),
op->getOperand(1), no_input, rewriter.getStringAttr("NONE"),
rewriter.getStringAttr("DEFAULT"), rewriter.getBoolAttr(false));
rewriter.replaceOp(op, {fc_op.getResult(0)});
return matchSuccess();
}
PatternMatchResult ConvertTFPackOp::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
auto tf_pack_op = cast<TF::PackOp>(op);
SmallVector<Value*, 4> values(tf_pack_op.values());
auto output_type = tf_pack_op.output()->getType();
auto values_count = rewriter.getI32IntegerAttr(tf_pack_op.N());
// Axis can be negative.
auto axis = rewriter.getI32IntegerAttr(tf_pack_op.axis().getSExtValue());
rewriter.replaceOpWithNewOp<PackOp>(op, output_type, values, values_count,
axis);
return matchSuccess();
}
PatternMatchResult ConvertTFReshapeOp::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
auto tf_reshape_op = cast<TF::ReshapeOp>(op);
auto* input = tf_reshape_op.tensor();
auto* shape = tf_reshape_op.shape();
ShapedType shape_type = shape->getType().cast<ShapedType>();
// The tfl reshape's #2 operand needs to i32 tensor type, so we have to cast.
if (!shape_type.getElementType().isInteger(32)) {
auto new_shape = shape_type.getShape();
IntegerType new_ele_type = rewriter.getIntegerType(32);
ShapedType new_type = RankedTensorType::get(new_shape, new_ele_type);
// Uses TF::CastOp to be folded if the shape input is a constant.
shape = rewriter
.create<TF::CastOp>(op->getLoc(), new_type, shape,
rewriter.getBoolAttr(false))
.y();
}
rewriter.replaceOpWithNewOp<ReshapeOp>(op, tf_reshape_op.output()->getType(),
input, shape);
return matchSuccess();
}
PatternMatchResult ConvertTFSplitOp::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
auto tf_split_op = cast<TF::SplitOp>(op);
auto output_types = functional::map([](Value* v) { return v->getType(); },
tf_split_op.output());
// Number of splits cannot be negative.
auto num_split = rewriter.getI32IntegerAttr(tf_split_op.num_split());
rewriter.replaceOpWithNewOp<TFL::SplitOp>(op, output_types,
tf_split_op.split_dim(),
tf_split_op.value(), num_split);
return matchSuccess();
}
PatternMatchResult ConvertTFSplitVOp::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
auto tf_splitv_op = cast<TF::SplitVOp>(op);
auto output_types = functional::map([](Value* v) { return v->getType(); },
tf_splitv_op.output());
// Number of splits cannot be negative.
auto num_split = rewriter.getI32IntegerAttr(tf_splitv_op.num_split());
rewriter.replaceOpWithNewOp<TFL::SplitVOp>(
op, output_types, tf_splitv_op.value(), tf_splitv_op.size_splits(),
tf_splitv_op.split_dim(), num_split);
return matchSuccess();
}
Value* PadStridedSliceAttributeArray(Operation* op, PatternRewriter& rewriter,
Value* attribute,
ArrayRef<int32_t> padding_val, int* mask) {
DenseIntElementsAttr dense_elem_attr;
SmallVector<int32_t, 8> padded_val;
auto ranked_attr_type = attribute->getType().dyn_cast<RankedTensorType>();
if (!ranked_attr_type ||
!matchPattern(attribute, m_Constant(&dense_elem_attr))) {
// If the input attribute is neither ranked type nor constant, we
// can't do any padding. Instead we just return it.
return attribute;
}
for (auto idx : dense_elem_attr.getIntValues()) {
padded_val.push_back(idx.getSExtValue());
}
auto attr_dim_count = ranked_attr_type.getShape()[0];
int full_dim_count = padding_val.size();
for (int i = attr_dim_count; i < full_dim_count; ++i) {
padded_val.push_back(padding_val[i]);
if (mask) *mask |= 1 << i;
}
auto type =
RankedTensorType::get({full_dim_count}, rewriter.getIntegerType(32));
auto attr = DenseElementsAttr::get<int32_t>(type, padded_val);
return rewriter.create<ConstantOp>(op->getLoc(), type, attr);
}
PatternMatchResult ConvertTFStridedSliceOp::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
auto tf_strided_slice_op = cast<TF::StridedSliceOp>(op);
auto ranked_input_type =
tf_strided_slice_op.input()->getType().dyn_cast<RankedTensorType>();
if (!ranked_input_type) {
// If input is not a ranked tensor, we can't deduce the padding dimensions
// from it, so we just do a plain conversion here.
rewriter.replaceOpWithNewOp<TFL::StridedSliceOp>(
op, tf_strided_slice_op.output()->getType(),
tf_strided_slice_op.input(), tf_strided_slice_op.begin(),
tf_strided_slice_op.end(), tf_strided_slice_op.strides(),
rewriter.getI32IntegerAttr(
tf_strided_slice_op.begin_mask().getSExtValue()),
rewriter.getI32IntegerAttr(
tf_strided_slice_op.end_mask().getSExtValue()),
rewriter.getI32IntegerAttr(
tf_strided_slice_op.ellipsis_mask().getSExtValue()),
rewriter.getI32IntegerAttr(
tf_strided_slice_op.new_axis_mask().getSExtValue()),
rewriter.getI32IntegerAttr(
tf_strided_slice_op.shrink_axis_mask().getSExtValue()));
return matchSuccess();
}
int num_input_dims = ranked_input_type.getRank();
// Pad `begin` array with zero values and update the `begin_mask`.
SmallVector<int32_t, 8> begin_pad_val(num_input_dims, 0);
int begin_mask = tf_strided_slice_op.begin_mask().getSExtValue();
Value* padded_begin = PadStridedSliceAttributeArray(
op, rewriter, tf_strided_slice_op.begin(), begin_pad_val, &begin_mask);
// Pad `end` array with `input_shape` and update the `end_mask`.
int end_mask = tf_strided_slice_op.end_mask().getSExtValue();
auto input_shape = ranked_input_type.getShape();
SmallVector<int32_t, 8> end_pad_val(input_shape.begin(), input_shape.end());
Value* padded_end = PadStridedSliceAttributeArray(
op, rewriter, tf_strided_slice_op.end(), end_pad_val, &end_mask);
// Pad `strides` array with ones.
SmallVector<int32_t, 8> strides_pad_val(num_input_dims, 1);
Value* padded_strides = PadStridedSliceAttributeArray(
op, rewriter, tf_strided_slice_op.strides(), strides_pad_val, nullptr);
rewriter.replaceOpWithNewOp<TFL::StridedSliceOp>(
op, tf_strided_slice_op.output()->getType(), tf_strided_slice_op.input(),
padded_begin, padded_end, padded_strides,
rewriter.getI32IntegerAttr(begin_mask),
rewriter.getI32IntegerAttr(end_mask),
rewriter.getI32IntegerAttr(
tf_strided_slice_op.ellipsis_mask().getSExtValue()),
rewriter.getI32IntegerAttr(
tf_strided_slice_op.new_axis_mask().getSExtValue()),
rewriter.getI32IntegerAttr(
tf_strided_slice_op.shrink_axis_mask().getSExtValue()));
return matchSuccess();
}
PatternMatchResult ConvertTFUnpackOp::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
auto tf_unpack_op = cast<TF::UnpackOp>(op);
auto* input = tf_unpack_op.value();
auto output_types = functional::map([](Value* v) { return v->getType(); },
tf_unpack_op.output());
auto num = rewriter.getI32IntegerAttr(tf_unpack_op.num());
// Axis can be negative.
auto axis = rewriter.getI32IntegerAttr(tf_unpack_op.axis().getSExtValue());
rewriter.replaceOpWithNewOp<UnpackOp>(op, output_types, input, num, axis);
return matchSuccess();
}
// MatrixDiagV3 is MatrixDiagV2 with an alignment attribute. This attribute
// only has effects when processing multiple diagonals. Since TFLite converts
// MatrixDiagV{2,3} to MatrixDiag, which only takes single-diagonal inputs, we
// can safely ignore this V3 attribute.
// We can't pass `rewriter` by reference because clang-tidy will want it to be
// constant (`const PatternRewriter& rewriter`). If we do that, we won't be able
// to call `rewriter::replaceOpWihNewOp`, which is not a const member function.
template <typename MatrixDiagV2OrV3Op>
bool ConvertTFMatrixDiagV2orV3(Operation* op, PatternRewriter* rewriter) {
auto tf_matrix_diag_v2_or_v3_op = cast<MatrixDiagV2OrV3Op>(op);
if (tf_matrix_diag_v2_or_v3_op.getNumOperands() != 5) return false;
auto input = tf_matrix_diag_v2_or_v3_op.diagonal();
auto output_type = tf_matrix_diag_v2_or_v3_op.output()->getType();
// Extract k constant tensor and check value = 0.
ElementsAttr k;
if (!matchPattern(tf_matrix_diag_v2_or_v3_op.k(), m_Constant(&k)))
return false;
if (ExtractSingleElementAsInteger(k).getInt() != 0) return false;
// Extract num_rows constant tensor and check value = -1.
ElementsAttr num_rows;
if (!matchPattern(tf_matrix_diag_v2_or_v3_op.num_rows(),
m_Constant(&num_rows)))
return false;
if (ExtractSingleElementAsInteger(num_rows).getInt() != -1) return false;
// Extract num_cols constant tensor and check value = -1.
ElementsAttr num_cols;
if (!matchPattern(tf_matrix_diag_v2_or_v3_op.num_cols(),
m_Constant(&num_cols)))
return false;
if (ExtractSingleElementAsInteger(num_cols).getInt() != -1) return false;
// Verify padding_value is an integer tensor with all 0s.
ElementsAttr padding_value;
if (!matchPattern(tf_matrix_diag_v2_or_v3_op.padding_value(),
m_Constant(&padding_value)))
return false;
for (auto value : padding_value.getValues<APInt>()) {
if (value != 0) return false;
}
rewriter->replaceOpWithNewOp<MatrixDiagOp>(op, output_type, input);
return true;
}
PatternMatchResult ConvertTFMatrixDiagV2Op::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
if (ConvertTFMatrixDiagV2orV3<TF::MatrixDiagV2Op>(op, &rewriter))
return matchSuccess();
return matchFailure();
}
PatternMatchResult ConvertTFMatrixDiagV3Op::matchAndRewrite(
Operation* op, PatternRewriter& rewriter) const {
if (ConvertTFMatrixDiagV2orV3<TF::MatrixDiagV3Op>(op, &rewriter))
return matchSuccess();
return matchFailure();
}
void LegalizeTF::runOnFunction() {
OwningRewritePatternList patterns;
auto* ctx = &getContext();
auto func = getFunction();
// Add the generated patterns to the list.
populateWithGenerated(ctx, &patterns);
patterns
.insert<ConvertTFConcatOp, ConvertTFConcatV2Op, ConvertTFMatMulOp,
ConvertTFMatrixDiagV2Op, ConvertTFMatrixDiagV3Op, ConvertTFPackOp,
ConvertTFReshapeOp, ConvertTFSplitOp, ConvertTFSplitVOp,
ConvertTFStridedSliceOp, ConvertTFUnpackOp>(ctx);
applyPatternsGreedily(func, patterns);
}
} // namespace
// Creates an instance of the TensorFlow Lite dialect LegalizeTF pass.
std::unique_ptr<OpPassBase<FuncOp>> CreateLegalizeTFPass() {
return std::make_unique<LegalizeTF>();
}
static PassRegistration<LegalizeTF> pass(
"tfl-legalize-tf", "Legalize from TensorFlow to TensorFlow Lite dialect");
} // namespace TFL
} // namespace mlir
| apache-2.0 |
nishantmonu51/druid | server/src/main/java/org/apache/druid/segment/realtime/appenderator/AppenderatorsManager.java | 6464 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.segment.realtime.appenderator;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.druid.client.cache.Cache;
import org.apache.druid.client.cache.CacheConfig;
import org.apache.druid.client.cache.CachePopulatorStats;
import org.apache.druid.java.util.emitter.service.ServiceEmitter;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryProcessingPool;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.query.SegmentDescriptor;
import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.IndexMerger;
import org.apache.druid.segment.incremental.ParseExceptionHandler;
import org.apache.druid.segment.incremental.RowIngestionMeters;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.join.JoinableFactory;
import org.apache.druid.segment.loading.DataSegmentPusher;
import org.apache.druid.segment.realtime.FireDepartmentMetrics;
import org.apache.druid.server.coordination.DataSegmentAnnouncer;
import org.joda.time.Interval;
/**
* This interface defines entities that create and manage potentially multiple {@link Appenderator} instances.
*
* The AppenderatorsManager should be used by tasks running in a Peon or an CliIndexer process when it needs
* an Appenderator.
*
* The AppenderatorsManager also provides methods for creating {@link QueryRunner} instances that read the data
* held by the Appenderators created through the AppenderatorsManager.
*
* In later updates, this interface will be used to manage memory usage across multiple Appenderators,
* useful for the Indexer where all Tasks run in the same process.
*
* The methods on AppenderatorsManager can be called by multiple threads.
*
* This class provides similar functionality to the {@link org.apache.druid.server.coordination.ServerManager} and
* {@link org.apache.druid.server.SegmentManager} on the Historical processes.
*/
public interface AppenderatorsManager
{
/**
* Creates an Appenderator suited for realtime ingestion. Note that this method's parameters include objects
* used for query processing.
*/
Appenderator createRealtimeAppenderatorForTask(
String taskId,
DataSchema schema,
AppenderatorConfig config,
FireDepartmentMetrics metrics,
DataSegmentPusher dataSegmentPusher,
ObjectMapper objectMapper,
IndexIO indexIO,
IndexMerger indexMerger,
QueryRunnerFactoryConglomerate conglomerate,
DataSegmentAnnouncer segmentAnnouncer,
ServiceEmitter emitter,
QueryProcessingPool queryProcessingPool,
JoinableFactory joinableFactory,
Cache cache,
CacheConfig cacheConfig,
CachePopulatorStats cachePopulatorStats,
RowIngestionMeters rowIngestionMeters,
ParseExceptionHandler parseExceptionHandler
);
/**
* Creates an Appenderator suited for batch ingestion.
*/
Appenderator createOpenSegmentsOfflineAppenderatorForTask(
String taskId,
DataSchema schema,
AppenderatorConfig config,
FireDepartmentMetrics metrics,
DataSegmentPusher dataSegmentPusher,
ObjectMapper objectMapper,
IndexIO indexIO,
IndexMerger indexMerger,
RowIngestionMeters rowIngestionMeters,
ParseExceptionHandler parseExceptionHandler
);
Appenderator createClosedSegmentsOfflineAppenderatorForTask(
String taskId,
DataSchema schema,
AppenderatorConfig config,
FireDepartmentMetrics metrics,
DataSegmentPusher dataSegmentPusher,
ObjectMapper objectMapper,
IndexIO indexIO,
IndexMerger indexMerger,
RowIngestionMeters rowIngestionMeters,
ParseExceptionHandler parseExceptionHandler
);
Appenderator createOfflineAppenderatorForTask(
String taskId,
DataSchema schema,
AppenderatorConfig config,
FireDepartmentMetrics metrics,
DataSegmentPusher dataSegmentPusher,
ObjectMapper objectMapper,
IndexIO indexIO,
IndexMerger indexMerger,
RowIngestionMeters rowIngestionMeters,
ParseExceptionHandler parseExceptionHandler
);
/**
* Removes any internal Appenderator-tracking state associated with the provided taskId.
*
* This method should be called when a task is finished using its Appenderators that were previously created by
* createRealtimeAppenderatorForTask or createOfflineAppenderatorForTask.
*
* The method can be called by the entity managing Tasks when the Tasks finish, such as ThreadingTaskRunner.
*/
void removeAppenderatorsForTask(String taskId, String dataSource);
/**
* Returns a query runner for the given intervals over the Appenderators managed by this AppenderatorsManager.
*/
<T> QueryRunner<T> getQueryRunnerForIntervals(
Query<T> query,
Iterable<Interval> intervals
);
/**
* Returns a query runner for the given segment specs over the Appenderators managed by this AppenderatorsManager.
*/
<T> QueryRunner<T> getQueryRunnerForSegments(
Query<T> query,
Iterable<SegmentDescriptor> specs
);
/**
* As AppenderatorsManager implementions are service dependent (i.e., Peons and Indexers have different impls),
* this method allows Tasks to know whether they should announce themselves as nodes and segment servers
* to the rest of the cluster.
*
* Only Tasks running in Peons (i.e., as separate processes) should make their own individual node announcements.
*/
boolean shouldTaskMakeNodeAnnouncements();
/**
* Shut down the AppenderatorsManager.
*/
void shutdown();
}
| apache-2.0 |
nakomis/cassandra | tools/stress/src/org/apache/cassandra/stress/StressMetrics.java | 7180 | package org.apache.cassandra.stress;
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
import java.io.PrintStream;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ThreadFactory;
import org.apache.commons.lang3.time.DurationFormatUtils;
import org.apache.cassandra.concurrent.NamedThreadFactory;
import org.apache.cassandra.stress.util.Timing;
import org.apache.cassandra.stress.util.TimingInterval;
import org.apache.cassandra.stress.util.Uncertainty;
public class StressMetrics
{
private static final ThreadFactory tf = new NamedThreadFactory("StressMetrics");
private final PrintStream output;
private final Thread thread;
private volatile boolean stop = false;
private volatile boolean cancelled = false;
private final Uncertainty opRateUncertainty = new Uncertainty();
private final CountDownLatch stopped = new CountDownLatch(1);
private final Timing timing = new Timing();
public StressMetrics(PrintStream output, final long logIntervalMillis)
{
this.output = output;
printHeader("", output);
thread = tf.newThread(new Runnable()
{
@Override
public void run()
{
timing.start();
try {
while (!stop)
{
try
{
long sleep = timing.getHistory().endMillis() + logIntervalMillis - System.currentTimeMillis();
if (sleep < logIntervalMillis >>> 3)
// if had a major hiccup, sleep full interval
Thread.sleep(logIntervalMillis);
else
Thread.sleep(sleep);
update();
} catch (InterruptedException e)
{
break;
}
}
update();
}
catch (InterruptedException e)
{}
catch (Exception e)
{
cancel();
e.printStackTrace(StressMetrics.this.output);
}
finally
{
stopped.countDown();
}
}
});
}
public void start()
{
thread.start();
}
public void waitUntilConverges(double targetUncertainty, int minMeasurements, int maxMeasurements) throws InterruptedException
{
opRateUncertainty.await(targetUncertainty, minMeasurements, maxMeasurements);
}
public void cancel()
{
cancelled = true;
stop = true;
thread.interrupt();
opRateUncertainty.wakeAll();
}
public void stop() throws InterruptedException
{
stop = true;
thread.interrupt();
stopped.await();
}
private void update() throws InterruptedException
{
TimingInterval interval = timing.snapInterval();
printRow("", interval, timing.getHistory(), opRateUncertainty, output);
opRateUncertainty.update(interval.adjustedOpRate());
}
// PRINT FORMATTING
public static final String HEADFORMAT = "%-10s,%8s,%8s,%8s,%8s,%8s,%8s,%8s,%8s,%7s,%9s";
public static final String ROWFORMAT = "%-10d,%8.0f,%8.0f,%8.1f,%8.1f,%8.1f,%8.1f,%8.1f,%8.1f,%7.1f,%9.5f";
private static void printHeader(String prefix, PrintStream output)
{
output.println(prefix + String.format(HEADFORMAT, "ops","op/s", "key/s","mean","med",".95",".99",".999","max","time","stderr"));
}
private static void printRow(String prefix, TimingInterval interval, TimingInterval total, Uncertainty opRateUncertainty, PrintStream output)
{
output.println(prefix + String.format(ROWFORMAT,
total.operationCount,
interval.realOpRate(),
interval.keyRate(),
interval.meanLatency(),
interval.medianLatency(),
interval.rankLatency(0.95f),
interval.rankLatency(0.99f),
interval.rankLatency(0.999f),
interval.maxLatency(),
total.runTime() / 1000f,
opRateUncertainty.getUncertainty()));
}
public void summarise()
{
output.println("\n");
output.println("Results:");
TimingInterval history = timing.getHistory();
output.println(String.format("real op rate : %.0f", history.realOpRate()));
output.println(String.format("adjusted op rate stderr : %.0f", opRateUncertainty.getUncertainty()));
output.println(String.format("key rate : %.0f", history.keyRate()));
output.println(String.format("latency mean : %.1f", history.meanLatency()));
output.println(String.format("latency median : %.1f", history.medianLatency()));
output.println(String.format("latency 95th percentile : %.1f", history.rankLatency(.95f)));
output.println(String.format("latency 99th percentile : %.1f", history.rankLatency(0.99f)));
output.println(String.format("latency 99.9th percentile : %.1f", history.rankLatency(0.999f)));
output.println(String.format("latency max : %.1f", history.maxLatency()));
output.println("Total operation time : " + DurationFormatUtils.formatDuration(
history.runTime(), "HH:mm:ss", true));
}
public static final void summarise(List<String> ids, List<StressMetrics> summarise, PrintStream out)
{
int idLen = 0;
for (String id : ids)
idLen = Math.max(id.length(), idLen);
String formatstr = "%" + idLen + "s, ";
printHeader(String.format(formatstr, "id"), out);
for (int i = 0 ; i < ids.size() ; i++)
printRow(String.format(formatstr, ids.get(i)),
summarise.get(i).timing.getHistory(),
summarise.get(i).timing.getHistory(),
summarise.get(i).opRateUncertainty,
out
);
}
public Timing getTiming()
{
return timing;
}
public boolean wasCancelled()
{
return cancelled;
}
}
| apache-2.0 |
sibok666/flowable-engine | modules/flowable-form-engine/src/main/java/org/flowable/form/engine/impl/io/ResourceStreamSource.java | 1411 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.form.engine.impl.io;
import java.io.BufferedInputStream;
import java.io.InputStream;
import org.flowable.engine.common.api.FlowableException;
import org.flowable.engine.common.impl.util.io.StreamSource;
/**
* @author Joram Barrez
*/
public class ResourceStreamSource implements StreamSource {
String resource;
public ResourceStreamSource(String resource) {
this.resource = resource;
}
public InputStream getInputStream() {
InputStream inputStream = this.getClass().getClassLoader().getResourceAsStream(resource);
if (inputStream == null) {
throw new FlowableException("resource '" + resource + "' doesn't exist");
}
return new BufferedInputStream(inputStream);
}
public String toString() {
return "Resource[" + resource + "]";
}
}
| apache-2.0 |
grzesuav/gjpf-core | main/src/main/java/gov/nasa/jpf/jvm/bytecode/CALOAD.java | 1470 | /*
* Copyright (C) 2014, United States Government, as represented by the
* Administrator of the National Aeronautics and Space Administration.
* All rights reserved.
*
* The Java Pathfinder core (jpf-core) platform is licensed under the
* Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gov.nasa.jpf.jvm.bytecode;
import gov.nasa.jpf.vm.ArrayIndexOutOfBoundsExecutiveException;
import gov.nasa.jpf.vm.ElementInfo;
import gov.nasa.jpf.vm.StackFrame;
/**
* Load char from array
* ..., arrayref, index => ..., value
*/
public class CALOAD extends ArrayLoadInstruction {
@Override
protected void push (StackFrame frame, ElementInfo e, int index) throws ArrayIndexOutOfBoundsExecutiveException {
e.checkArrayBounds(index);
frame.push( e.getCharElement(index), isReference());
}
@Override
public int getByteCode () {
return 0x34;
}
@Override
public void accept(JVMInstructionVisitor insVisitor) {
insVisitor.visit(this);
}
}
| apache-2.0 |
apache/curator | curator-x-discovery/src/main/java/org/apache/curator/x/discovery/ServiceProviderBuilder.java | 3143 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.curator.x.discovery;
import org.apache.curator.x.discovery.strategies.RoundRobinStrategy;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ThreadFactory;
public interface ServiceProviderBuilder<T>
{
/**
* Allocate a new service provider based on the current builder settings
*
* @return provider
*/
ServiceProvider<T> build();
/**
* required - set the name of the service to be provided
*
* @param serviceName the name of the service
* @return this
*/
ServiceProviderBuilder<T> serviceName(String serviceName);
/**
* optional - set the provider strategy. The default is {@link RoundRobinStrategy}
*
* @param providerStrategy strategy to use
* @return this
*/
ServiceProviderBuilder<T> providerStrategy(ProviderStrategy<T> providerStrategy);
/**
* optional - the thread factory to use for creating internal threads. The specified ThreadFactory overrides
* any prior ThreadFactory or ClosableExecutorService set on the ServiceProviderBuilder
*
* @param threadFactory factory to use
* @return this
* @deprecated use {@link #executorService(ExecutorService)} instead
*/
@Deprecated
ServiceProviderBuilder<T> threadFactory(ThreadFactory threadFactory);
/**
* Set the down instance policy
*
* @param downInstancePolicy new policy
* @return this
*/
ServiceProviderBuilder<T> downInstancePolicy(DownInstancePolicy downInstancePolicy);
/**
* Add an instance filter. NOTE: this does not remove previously added filters. i.e.
* a l;ist is created of all added filters. Filters are called in the order they were
* added.
*
* @param filter filter to add
* @return this
*/
ServiceProviderBuilder<T> additionalFilter(InstanceFilter<T> filter);
/**
* Optional ExecutorService to use for the cache's background thread. The specified ExecutorService
* will be wrapped in a CloseableExecutorService and overrides any prior ThreadFactory or CloseableExecutorService
* set on the ServiceProviderBuilder.
*
* @param executorService executor service
* @return this
*/
ServiceProviderBuilder<T> executorService(ExecutorService executorService);
}
| apache-2.0 |
spandanb/horizon | openstack_dashboard/dashboards/admin/domains/urls.py | 1044 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf.urls import patterns # noqa
from django.conf.urls import url # noqa
from openstack_dashboard.dashboards.admin.domains import views
urlpatterns = patterns('',
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^create$', views.CreateDomainView.as_view(), name='create'),
url(r'^(?P<domain_id>[^/]+)/update/$',
views.UpdateDomainView.as_view(), name='update')
)
| apache-2.0 |
pombredanne/SourceForge-Allura | ForgeDiscussion/forgediscussion/tests/test_forum_roles.py | 927 | from pylons import c
from alluratest.controller import setup_basic_test, setup_global_objects
from allura import model as M
from allura.lib import security
from allura.tests import decorators as td
def setUp():
setup_basic_test()
setup_global_objects()
@td.with_discussion
def test_role_assignments():
admin = M.User.by_username('test-admin')
user = M.User.by_username('test-user')
anon = M.User.anonymous()
def check_access(perm):
pred = security.has_access(c.app, perm)
return pred(user=admin), pred(user=user), pred(user=anon)
assert check_access('configure') == (True, False, False)
assert check_access('read') == (True, True, True)
assert check_access('unmoderated_post') == (True, True, False)
assert check_access('post') == (True, True, False)
assert check_access('moderate') == (True, False, False)
assert check_access('admin') == (True, False, False)
| apache-2.0 |
remi/getting-started-ruby | optional-container-engine/structured_data/active_record/app/models/book.rb | 1832 | # Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class Book < ActiveRecord::Base
validates :title, presence: true
attr_accessor :cover_image
after_create :upload_image, if: :cover_image
before_update :update_image, if: :cover_image
before_destroy :delete_image, if: :image_url
# [START enqueue_job]
after_create :lookup_book_details
private
def lookup_book_details
if [author, description, published_on, image_url].any? {|attr| attr.blank? }
LookupBookDetailsJob.perform_later self
end
end
# [END enqueue_job]
def upload_image
image = StorageBucket.files.new(
key: "cover_images/#{id}/#{cover_image.original_filename}",
body: cover_image.read,
public: true
)
image.save
update_columns image_url: image.public_url
end
def delete_image
bucket_name = StorageBucket.key
image_uri = URI.parse image_url
if image_uri.host == "#{bucket_name}.storage.googleapis.com"
# Remove leading forward slash from image path
# The result will be the image key, eg. "cover_images/:id/:filename"
image_key = image_uri.path.sub("/", "")
image = StorageBucket.files.new key: image_key
image.destroy
end
end
def update_image
delete_image if image_url?
upload_image
end
end
| apache-2.0 |
DEVSENSE/Phalanger | Testing/Tests/Arrays/array_diff_intersect2.php | 1596 | [expect php]
[file]
<?
function key_compare_func($a, $b)
{
if ($a === $b) {
return 0;
}
return ($a > $b)? 1:-1;
}
class cr
{
private $priv_member;
function cr($val)
{
$this->priv_member = $val;
}
function comp_func_cr($a, $b)
{
if ($a->priv_member === $b->priv_member) return 0;
return ($a->priv_member > $b->priv_member)? 1:-1;
}
function comp_func_key($a, $b)
{
if ($a === $b) return 0;
return ($a > $b)? 1:-1;
}
}
$a = array("0.1" => new cr(9), "0.5" => new cr(12), 0 => new cr(23), 1=> new cr(4), 2 => new cr(-15),);
$b = array("0.2" => new cr(9), "0.5" => new cr(22), 0 => new cr(3), 1=> new cr(4), 2 => new cr(-15),);
$array1 = array("a" => "green", "b" => "brown", "c" => "blue", "red");
$array2 = array("a" => "green", "yellow", "red");
print_r(array_udiff($a, $b, array("cr", "comp_func_cr")));
print_r(array_udiff_assoc($a, $b, array("cr", "comp_func_cr")));
print_r(array_udiff_uassoc($a, $b, array("cr", "comp_func_cr"), array("cr", "comp_func_key")));
print_r(array_diff_uassoc($array1, $array2, "key_compare_func"));
print("------------------------------------\n");
print_r(array_uintersect($a, $b, array("cr", "comp_func_cr")));
print_r(array_uintersect_assoc($a, $b, array("cr", "comp_func_cr")));
print_r(array_uintersect_uassoc($a, $b, array("cr", "comp_func_cr"), array("cr", "comp_func_key")));
print_r(array_intersect_uassoc($array1, $array2, "key_compare_func"));
?> | apache-2.0 |
eribeiro/kafka | core/src/main/scala/kafka/log/TimeIndex.scala | 9359 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.log
import java.io.File
import java.nio.ByteBuffer
import kafka.common.InvalidOffsetException
import kafka.utils.CoreUtils._
import kafka.utils.Logging
import org.apache.kafka.common.record.Record
/**
* An index that maps from the timestamp to the logical offsets of the messages in a segment. This index might be
* sparse, i.e. it may not hold an entry for all the messages in the segment.
*
* The index is stored in a file that is preallocated to hold a fixed maximum amount of 12-byte time index entries.
* The file format is a series of time index entries. The physical format is a 8 bytes timestamp and a 4 bytes "relative"
* offset used in the [[OffsetIndex]]. A time index entry (TIMESTAMP, OFFSET) means that the biggest timestamp seen
* before OFFSET is TIMESTAMP. i.e. Any message whose timestamp is greater than TIMESTAMP must come after OFFSET.
*
* All external APIs translate from relative offsets to full offsets, so users of this class do not interact with the internal
* storage format.
*
* The timestamps in the same time index file are guaranteed to be monotonically increasing.
*
* The index support timestamp lookup for a memory map of this file. The lookup is done using a binary search to find
* the offset of the message whose indexed timestamp is closest but smaller or equals to the target timestamp.
*
* Time index files can be opened in two ways: either as an empty, mutable index that allows appends or
* an immutable read-only index file that has previously been populated. The makeReadOnly method will turn a mutable file into an
* immutable one and truncate off any extra bytes. This is done when the index file is rolled over.
*
* No attempt is made to checksum the contents of this file, in the event of a crash it is rebuilt.
*
*/
class TimeIndex(file: File,
baseOffset: Long,
maxIndexSize: Int = -1)
extends AbstractIndex[Long, Long](file, baseOffset, maxIndexSize) with Logging {
override def entrySize = 12
// We override the full check to reserve the last time index entry slot for the on roll call.
override def isFull: Boolean = entries >= maxEntries - 1
private def timestamp(buffer: ByteBuffer, n: Int): Long = buffer.getLong(n * entrySize)
private def relativeOffset(buffer: ByteBuffer, n: Int): Int = buffer.getInt(n * entrySize + 8)
/**
* The last entry in the index
*/
def lastEntry: TimestampOffset = {
inLock(lock) {
_entries match {
case 0 => TimestampOffset(Record.NO_TIMESTAMP, baseOffset)
case s => parseEntry(mmap, s - 1).asInstanceOf[TimestampOffset]
}
}
}
/**
* Get the nth timestamp mapping from the time index
* @param n The entry number in the time index
* @return The timestamp/offset pair at that entry
*/
def entry(n: Int): TimestampOffset = {
maybeLock(lock) {
if(n >= _entries)
throw new IllegalArgumentException("Attempt to fetch the %dth entry from a time index of size %d.".format(n, _entries))
val idx = mmap.duplicate
TimestampOffset(timestamp(idx, n), relativeOffset(idx, n))
}
}
override def parseEntry(buffer: ByteBuffer, n: Int): IndexEntry = {
TimestampOffset(timestamp(buffer, n), baseOffset + relativeOffset(buffer, n))
}
/**
* Attempt to append a time index entry to the time index.
* The new entry is appended only if both the timestamp and offsets are greater than the last appended timestamp and
* the last appended offset.
*
* @param timestamp The timestamp of the new time index entry
* @param offset The offset of the new time index entry
* @param skipFullCheck To skip checking whether the segment is full or not. We only skip the check when the segment
* gets rolled or the segment is closed.
*/
def maybeAppend(timestamp: Long, offset: Long, skipFullCheck: Boolean = false) {
inLock(lock) {
if (!skipFullCheck)
require(!isFull, "Attempt to append to a full time index (size = " + _entries + ").")
// We do not throw exception when the offset equals to the offset of last entry. That means we are trying
// to insert the same time index entry as the last entry.
// If the timestamp index entry to be inserted is the same as the last entry, we simply ignore the insertion
// because that could happen in the following two scenarios:
// 1. An log segment is closed.
// 2. LogSegment.onBecomeInactiveSegment() is called when an active log segment is rolled.
if (_entries != 0 && offset < lastEntry.offset)
throw new InvalidOffsetException("Attempt to append an offset (%d) to slot %d no larger than the last offset appended (%d) to %s."
.format(offset, _entries, lastEntry.offset, file.getAbsolutePath))
if (_entries != 0 && timestamp < lastEntry.timestamp)
throw new IllegalStateException("Attempt to append an timestamp (%d) to slot %d no larger than the last timestamp appended (%d) to %s."
.format(timestamp, _entries, lastEntry.timestamp, file.getAbsolutePath))
// We only append to the time index when the timestamp is greater than the last inserted timestamp.
// If all the messages are in message format v0, the timestamp will always be NoTimestamp. In that case, the time
// index will be empty.
if (timestamp > lastEntry.timestamp) {
debug("Adding index entry %d => %d to %s.".format(timestamp, offset, file.getName))
mmap.putLong(timestamp)
mmap.putInt((offset - baseOffset).toInt)
_entries += 1
require(_entries * entrySize == mmap.position, _entries + " entries but file position in index is " + mmap.position + ".")
}
}
}
/**
* Find the time index entry whose timestamp is less than or equal to the given timestamp.
* If the target timestamp is smaller than the least timestamp in the time index, (NoTimestamp, baseOffset) is
* returned.
*
* @param targetTimestamp The timestamp to look up.
* @return The time index entry found.
*/
def lookup(targetTimestamp: Long): TimestampOffset = {
maybeLock(lock) {
val idx = mmap.duplicate
val slot = indexSlotFor(idx, targetTimestamp, IndexSearchType.KEY)
if (slot == -1)
TimestampOffset(Record.NO_TIMESTAMP, baseOffset)
else {
val entry = parseEntry(idx, slot).asInstanceOf[TimestampOffset]
TimestampOffset(entry.timestamp, entry.offset)
}
}
}
override def truncate() = truncateToEntries(0)
/**
* Remove all entries from the index which have an offset greater than or equal to the given offset.
* Truncating to an offset larger than the largest in the index has no effect.
*/
override def truncateTo(offset: Long) {
inLock(lock) {
val idx = mmap.duplicate
val slot = indexSlotFor(idx, offset, IndexSearchType.VALUE)
/* There are 3 cases for choosing the new size
* 1) if there is no entry in the index <= the offset, delete everything
* 2) if there is an entry for this exact offset, delete it and everything larger than it
* 3) if there is no entry for this offset, delete everything larger than the next smallest
*/
val newEntries =
if(slot < 0)
0
else if(relativeOffset(idx, slot) == offset - baseOffset)
slot
else
slot + 1
truncateToEntries(newEntries)
}
}
/**
* Truncates index to a known number of entries.
*/
private def truncateToEntries(entries: Int) {
inLock(lock) {
_entries = entries
mmap.position(_entries * entrySize)
}
}
override def sanityCheck() {
val entry = lastEntry
val lastTimestamp = entry.timestamp
val lastOffset = entry.offset
require(_entries == 0 || (lastTimestamp >= timestamp(mmap, 0)),
s"Corrupt time index found, time index file (${file.getAbsolutePath}) has non-zero size but the last timestamp " +
s"is $lastTimestamp which is no larger than the first timestamp ${timestamp(mmap, 0)}")
require(_entries == 0 || lastOffset >= baseOffset,
s"Corrupt time index found, time index file (${file.getAbsolutePath}) has non-zero size but the last offset " +
s"is $lastOffset which is smaller than the first offset $baseOffset")
val len = file.length()
require(len % entrySize == 0,
"Time index file " + file.getAbsolutePath + " is corrupt, found " + len +
" bytes which is not positive or not a multiple of 12.")
}
} | apache-2.0 |
k0l0ssus/codesicles | trustline-demo/src/test/java/com/hundredpercent/sample/TrustlineDemoApplicationTests.java | 349 | package com.hundredpercent.sample;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.test.context.junit4.SpringRunner;
@RunWith(SpringRunner.class)
@SpringBootTest
public class TrustlineDemoApplicationTests {
@Test
public void contextLoads() {
}
}
| apache-2.0 |
bgrozev/jitsi-meet | react/features/base/react/constants.js | 194 | // @flow
/**
* Z-index for components that are to be rendered like an overlay, to be over
* everything, such as modal-type of components, or dialogs.
*/
export const OVERLAY_Z_INDEX = 1000;
| apache-2.0 |
Tycheo/coffeemud | com/planet_ink/coffee_mud/Abilities/Songs/Skill_MarkDisguise.java | 4682 | package com.planet_ink.coffee_mud.Abilities.Songs;
import com.planet_ink.coffee_mud.Abilities.Thief.Thief_Mark;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("rawtypes")
public class Skill_MarkDisguise extends Skill_Disguise
{
@Override public String ID() { return "Skill_MarkDisguise"; }
private final static String localizedName = CMLib.lang().L("Mark Disguise");
@Override public String name() { return localizedName; }
private static final String[] triggerStrings =I(new String[] {"MARKDISGUISE"});
@Override public String[] triggerStrings(){return triggerStrings;}
private MOB mark=null;
public MOB getMark(MOB mob)
{
final Thief_Mark A=(Thief_Mark)mob.fetchEffect("Thief_Mark");
if(A!=null)
return A.mark;
return null;
}
public int getMarkTicks(MOB mob)
{
final Thief_Mark A=(Thief_Mark)mob.fetchEffect("Thief_Mark");
if((A!=null)&&(A.mark!=null))
return A.ticks;
return -1;
}
@Override
public boolean invoke(MOB mob, Vector commands, Physical givenTarget, boolean auto, int asLevel)
{
Skill_Disguise A=(Skill_Disguise)mob.fetchEffect("Skill_Disguise");
if(A==null)
A=(Skill_Disguise)mob.fetchEffect("Skill_MarkDisguise");
if(A!=null)
{
A.unInvoke();
mob.tell(L("You remove your disguise."));
return true;
}
MOB target=getMark(mob);
if(CMParms.combine(commands,0).equalsIgnoreCase("!"))
target=mark;
if(target==null)
{
mob.tell(L("You need to have marked someone before you can disguise yourself as him or her."));
return false;
}
if(target.charStats().getClassLevel("Archon")>=0)
{
mob.tell(L("You may not disguise yourself as an Archon."));
return false;
}
final int ticksWaited=getMarkTicks(mob);
if(ticksWaited<15)
{
if(target==getMark(mob))
{
mob.tell(L("You'll need to observe your mark a little longer (@x1/15 ticks) before you can get the disguise right.",""+ticksWaited));
return false;
}
}
mark=target;
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,mob,null,CMMsg.MSG_DELICATE_HANDS_ACT|(auto?CMMsg.MASK_ALWAYS:0),L("<S-NAME> turn(s) away for a second."));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
beneficialAffect(mob,mob,asLevel,0);
A=(Skill_Disguise)mob.fetchEffect("Skill_MarkDisguise");
A.values[0]=""+target.basePhyStats().weight();
A.values[1]=""+target.basePhyStats().level();
A.values[2]=target.charStats().genderName();
A.values[3]=target.charStats().raceName();
A.values[4]=""+target.phyStats().height();
A.values[5]=target.name();
A.values[6]=target.charStats().displayClassName();
if(CMLib.flags().isGood(target))
A.values[7]="good";
else
if(CMLib.flags().isEvil(target))
A.values[7]="evil";
A.makeLongLasting();
mob.recoverCharStats();
mob.recoverPhyStats();
mob.location().recoverRoomStats();
}
}
else
return beneficialVisualFizzle(mob,null,L("<S-NAME> turn(s) away and then back, but look(s) the same."));
return success;
}
}
| apache-2.0 |
foryou2030/incubator-carbondata | core/src/main/java/org/apache/carbondata/core/datastorage/store/compression/type/UnCompressMaxMinFloat.java | 3505 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.core.datastorage.store.compression.type;
import java.nio.ByteBuffer;
import org.apache.carbondata.common.logging.LogService;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.datastorage.store.compression.Compressor;
import org.apache.carbondata.core.datastorage.store.compression.SnappyCompression;
import org.apache.carbondata.core.datastorage.store.compression.ValueCompressonHolder.UnCompressValue;
import org.apache.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
import org.apache.carbondata.core.util.ValueCompressionUtil;
import org.apache.carbondata.core.util.ValueCompressionUtil.DataType;
public class UnCompressMaxMinFloat implements UnCompressValue<float[]> {
/**
* Attribute for Carbon LOGGER
*/
private static final LogService LOGGER =
LogServiceFactory.getLogService(UnCompressMaxMinFloat.class.getName());
/**
* floatCompressor
*/
private static Compressor<float[]> floatCompressor =
SnappyCompression.SnappyFloatCompression.INSTANCE;
/**
* value.
*/
private float[] value;
@Override public void setValue(float[] value) {
this.value = (float[]) value;
}
@Override public UnCompressValue getNew() {
try {
return (UnCompressValue) clone();
} catch (CloneNotSupportedException ex4) {
LOGGER.error(ex4, ex4.getMessage());
}
return null;
}
@Override public UnCompressValue compress() {
UnCompressMaxMinByte byte1 = new UnCompressMaxMinByte();
byte1.setValue(floatCompressor.compress(value));
return byte1;
}
@Override public UnCompressValue uncompress(DataType dTypeVal) {
return null;
}
@Override public byte[] getBackArrayData() {
return ValueCompressionUtil.convertToBytes(value);
}
@Override public void setValueInBytes(byte[] value) {
ByteBuffer buffer = ByteBuffer.wrap(value);
this.value = ValueCompressionUtil.convertToFloatArray(buffer, value.length);
}
/**
* @see ValueCompressonHolder.UnCompressValue#getCompressorObject()
*/
@Override public UnCompressValue getCompressorObject() {
return new UnCompressMaxMinByte();
}
@Override public CarbonReadDataHolder getValues(int decimal, Object maxValueObject) {
double maxValue = (double) maxValueObject;
double[] vals = new double[value.length];
CarbonReadDataHolder dataHolderVal = new CarbonReadDataHolder();
for (int i = 0; i < vals.length; i++) {
if (value[i] == 0) {
vals[i] = maxValue;
} else {
vals[i] = maxValue - value[i];
}
}
dataHolderVal.setReadableDoubleValues(vals);
return dataHolderVal;
}
}
| apache-2.0 |
1and1/camunda-bpm-platform | engine/src/test/java/org/camunda/bpm/engine/test/concurrency/CompetingJobAcquisitionTest.java | 2920 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.concurrency;
import java.util.logging.Logger;
import org.camunda.bpm.engine.OptimisticLockingException;
import org.camunda.bpm.engine.impl.cmd.AcquireJobsCmd;
import org.camunda.bpm.engine.impl.jobexecutor.AcquiredJobs;
import org.camunda.bpm.engine.impl.jobexecutor.JobExecutor;
import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase;
import org.camunda.bpm.engine.test.Deployment;
/**
* @author Tom Baeyens
*/
public class CompetingJobAcquisitionTest extends PluggableProcessEngineTestCase {
private static Logger log = Logger.getLogger(CompetingSignalsTest.class.getName());
Thread testThread = Thread.currentThread();
static ControllableThread activeThread;
static String jobId;
public class JobAcquisitionThread extends ControllableThread {
OptimisticLockingException exception;
@Override
public synchronized void startAndWaitUntilControlIsReturned() {
activeThread = this;
super.startAndWaitUntilControlIsReturned();
}
public void run() {
try {
JobExecutor jobExecutor = processEngineConfiguration.getJobExecutor();
AcquiredJobs jobs = (AcquiredJobs) processEngineConfiguration
.getCommandExecutorTxRequired()
.execute(new ControlledCommand(activeThread, new AcquireJobsCmd(jobExecutor)));
} catch (OptimisticLockingException e) {
this.exception = e;
}
log.fine(getName()+" ends");
}
}
@Deployment
public void testCompetingJobAcquisitions() throws Exception {
runtimeService.startProcessInstanceByKey("CompetingJobAcquisitionProcess");
log.fine("test thread starts thread one");
JobAcquisitionThread threadOne = new JobAcquisitionThread();
threadOne.startAndWaitUntilControlIsReturned();
log.fine("test thread continues to start thread two");
JobAcquisitionThread threadTwo = new JobAcquisitionThread();
threadTwo.startAndWaitUntilControlIsReturned();
log.fine("test thread notifies thread 1");
threadOne.proceedAndWaitTillDone();
assertNull(threadOne.exception);
log.fine("test thread notifies thread 2");
threadTwo.proceedAndWaitTillDone();
assertNotNull(threadTwo.exception);
assertTextPresent("was updated by another transaction concurrently", threadTwo.exception.getMessage());
}
}
| apache-2.0 |
realazthat/noisepp.extended | editor/modules/EditorCurveModule.cpp | 5824 | // This file is part of the Noise++ Editor.
// Copyright (c) 2008, Urs C. Hanselmann
//
// The Noise++ Editor is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The Noise++ Editor is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with the Noise++ Editor. If not, see <http://www.gnu.org/licenses/>.
//
#include "EditorCurveModule.h"
#define MIN_POINTS 4
#define MAX_POINTS 30
std::string EditorCurveModule::FACTORY_NAME = "Curve";
EditorCurveModule::EditorCurveModule() : EditorModule(1), mPointCount(MIN_POINTS)
{
}
void EditorCurveModule::fillPropertyGrid (wxPropertyGrid *pg)
{
mControlPointIDs.clear ();
pg->Append( wxPropertyCategory(wxT("Source modules")) );
appendSourceModuleProperty (pg, wxT("Source module"), mSourceModules[0]);
pg->Append( wxPropertyCategory(wxT("Parameters")) );
pg->Append( wxIntProperty(wxT("Control points"), wxPG_LABEL, mPointCount) );
mPointCategoryID = pg->Append( wxPropertyCategory(wxT("Points")) );
changeNumberOfControlPoints(pg);
}
void EditorCurveModule::changeNumberOfControlPoints (wxPropertyGrid *pg)
{
if (mControlPointIDs.size() > mPointCount)
{
while (mControlPointIDs.size() > MIN_POINTS && mControlPointIDs.size() > mPointCount)
{
pg->Delete (mControlPointIDs.back().parent);
mControlPointIDs.pop_back ();
}
}
else if (mControlPointIDs.size() < mPointCount)
{
noisepp::CurveControlPointVector &points = mModule.getControlPoints ();
while (mControlPointIDs.size() < MAX_POINTS && mControlPointIDs.size() < mPointCount)
{
wxString name = wxT("Point ");
name << mControlPointIDs.size() + 1;
ControlPoint point;
point.parent = pg->AppendIn( mPointCategoryID, wxParentProperty(name,wxPG_LABEL) );
double inval = 0.0;
double outval = 0.0;
if (mControlPointIDs.size() < points.size())
{
inval = points[mControlPointIDs.size()].inValue;
outval = points[mControlPointIDs.size()].outValue;
}
point.in = pg->AppendIn( point.parent, wxFloatProperty(wxT("In value"),wxPG_LABEL,inval) );
point.out = pg->AppendIn( point.parent, wxFloatProperty(wxT("Out value"),wxPG_LABEL,outval) );
mControlPointIDs.push_back (point);
}
}
}
void EditorCurveModule::onPropertyChange (wxPropertyGrid *pg, wxPropertyGridEvent& event)
{
const wxString &name = event.GetPropertyName();
if (name == _("Control points"))
{
int val = pg->GetPropertyValueAsInt (name);
mPointCount = val;
changeNumberOfControlPoints (pg);
if (mPointCount < MIN_POINTS || mPointCount > MAX_POINTS)
{
if (mPointCount < MIN_POINTS)
mPointCount = MIN_POINTS;
if (mPointCount > MAX_POINTS)
mPointCount = MAX_POINTS;
pg->SetPropertyValue (name, mPointCount);
}
}
else if (name == _("Source module"))
{
wxString val = pg->GetPropertyValueAsString (name);
mSourceModules[0] = val;
}
}
bool EditorCurveModule::validate (wxPropertyGrid *pg)
{
bool valid = true;
EditorModule *module = NULL;
module = getSourceModule(0);
if (module && module->validateTree(this))
{
mModule.setSourceModule(0, module->getModule());
}
valid = setValid (pg, "Source module", module != NULL && module->validateTree(this) && module->validate(NULL)) && valid;
if (pg)
{
mModule.clearControlPoints ();
for (ControlPointList::iterator it=mControlPointIDs.begin();it!=mControlPointIDs.end();++it)
{
mModule.addControlPoint (pg->GetPropertyValueAsDouble(it->in), pg->GetPropertyValueAsDouble(it->out));
}
for (ControlPointList::iterator it=mControlPointIDs.begin();it!=mControlPointIDs.end();++it)
{
bool v = getNumberOfMatches(pg->GetPropertyValueAsDouble(it->in)) == 1;
wxString name = pg->GetPropertyName(it->parent);
wxString outname = name+wxT(".")+pg->GetPropertyName(it->out);
valid = setValid (pg, name.mb_str(), v) && valid;
setValid (pg, outname.mb_str(), true);
}
if (!valid)
mModule.clearControlPoints ();
}
else
{
valid = !mModule.getControlPoints().empty() && valid;
}
return valid;
}
void EditorCurveModule::writeProperties (TiXmlElement *element)
{
TiXmlElement *prop;
writeSourceModules (element);
noisepp::CurveControlPointVector &points = mModule.getControlPoints ();
for (noisepp::CurveControlPointVector::iterator it=points.begin();it!=points.end();++it)
{
prop = new TiXmlElement ("Point");
prop->SetDoubleAttribute ("in", it->inValue);
prop->SetDoubleAttribute ("out", it->outValue);
element->LinkEndChild (prop);
}
}
bool EditorCurveModule::readProperties (TiXmlElement *element)
{
TiXmlElement *prop = NULL;
TiXmlNode *child = NULL;
if (!readSourceModules (element))
return false;
mModule.clearControlPoints ();
while( (child = element->IterateChildren( "Point", child )) != NULL )
{
prop = child->ToElement();
if (prop)
{
double in, out;
if (prop->QueryDoubleAttribute ("in", &in) != TIXML_SUCCESS || prop->QueryDoubleAttribute ("out", &out) != TIXML_SUCCESS)
return false;
mModule.addControlPoint (in, out);
}
}
mPointCount = mModule.getControlPoints().size();
return true;
}
int EditorCurveModule::getNumberOfMatches (const noisepp::Real &v)
{
int matches = 0;
noisepp::CurveControlPointVector &points = mModule.getControlPoints ();
for (noisepp::CurveControlPointVector::iterator it=points.begin();it!=points.end();++it)
{
if (fabs(it->inValue - v) < 1.0e-7)
++matches;
}
return matches;
}
| bsd-2-clause |
sebastienros/jint | Jint.Tests.Test262/test/built-ins/TypedArrayConstructors/of/argument-is-symbol-throws.js | 1248 | // Copyright (C) 2016 the V8 project authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
esid: sec-%typedarray%.of
description: >
Throws a TypeError if argument is a Symbol
info: |
IntegerIndexedElementSet ( O, index, value )
Assert: O is an Integer-Indexed exotic object.
If O.[[ContentType]] is BigInt, let numValue be ? ToBigInt(value).
Otherwise, let numValue be ? ToNumber(value).
Let buffer be O.[[ViewedArrayBuffer]].
If IsDetachedBuffer(buffer) is false and ! IsValidIntegerIndex(O, index) is true, then
Let offset be O.[[ByteOffset]].
Let arrayTypeName be the String value of O.[[TypedArrayName]].
Let elementSize be the Element Size value specified in Table 62 for arrayTypeName.
Let indexedPosition be (ℝ(index) × elementSize) + offset.
Let elementType be the Element Type value in Table 62 for arrayTypeName.
Perform SetValueInBuffer(buffer, indexedPosition, elementType, numValue, true, Unordered).
Return NormalCompletion(undefined).
includes: [testTypedArray.js]
features: [Symbol, TypedArray]
---*/
var s = Symbol("1");
testWithTypedArrayConstructors(function(TA) {
assert.throws(TypeError, function() {
TA.of(s);
});
});
| bsd-2-clause |
wgerlach/Shock | shock-server/node/file/format/multi/multi.go | 2279 | // Package to read and auto-detect format of fasta & fastq files
package multi
import (
"errors"
e "github.com/MG-RAST/Shock/shock-server/errors"
"github.com/MG-RAST/Shock/shock-server/node/file"
"github.com/MG-RAST/Shock/shock-server/node/file/format/fasta"
"github.com/MG-RAST/Shock/shock-server/node/file/format/fastq"
"github.com/MG-RAST/Shock/shock-server/node/file/format/sam"
"github.com/MG-RAST/Shock/shock-server/node/file/format/seq"
"io"
"regexp"
)
//the order matters as it determines the order for checking format.
var validators = map[string]*regexp.Regexp{
"fasta": fasta.Regex,
"fastq": fastq.Regex,
"sam": sam.Regex,
}
var readers = map[string]func(f file.SectionReader) seq.ReadRewinder{
"fasta": fasta.NewReader,
"fastq": fastq.NewReader,
"sam": sam.NewReader,
}
type Reader struct {
f file.SectionReader
r seq.ReadRewinder
format string
}
func NewReader(f file.SectionReader) *Reader {
return &Reader{
f: f,
r: nil,
format: "",
}
}
func (r *Reader) DetermineFormat() error {
if r.format != "" && r.r != nil {
return nil
}
reader := io.NewSectionReader(r.f, 0, 32768)
buf := make([]byte, 32768)
if _, err := reader.Read(buf); err != nil && err != io.EOF {
return err
}
for format, re := range validators {
if re.Match(buf) {
r.format = format
r.r = readers[format](r.f)
return nil
}
}
return errors.New(e.InvalidFileTypeForFilter)
}
func (r *Reader) Read() (*seq.Seq, error) {
if r.r == nil {
err := r.DetermineFormat()
if err != nil {
return nil, err
}
}
return r.r.Read()
}
func (r *Reader) GetReadOffset() (n int, err error) {
if r.r == nil {
err := r.DetermineFormat()
if err != nil {
return 0, err
}
}
return r.r.GetReadOffset()
}
func (r *Reader) SeekChunk(carryOver int64, lastIndex bool) (n int64, err error) {
if r.r == nil {
err := r.DetermineFormat()
if err != nil {
return 0, err
}
}
return r.r.SeekChunk(carryOver, lastIndex)
}
func (r *Reader) Format(s *seq.Seq, w io.Writer) (n int, err error) {
switch {
case r.format == "fastq":
return fastq.Format(s, w)
case r.format == "fasta":
return fasta.Format(s, w)
case r.format == "sam":
return sam.Format(s, w)
}
return 0, errors.New("unknown sequence format")
}
| bsd-2-clause |
tangestani/homebrew-cask | Casks/openwebstart.rb | 1256 | cask 'openwebstart' do
version '1.1.8'
sha256 'f36af59cc9229e90a71dd6940ef14416824e65e4214ae7b61a705cc11023d23c'
# github.com/karakun/OpenWebStart/ was verified as official when first introduced to the cask
url "https://github.com/karakun/OpenWebStart/releases/download/v#{version}/OpenWebStart_macos_#{version.dots_to_underscores}.dmg"
appcast 'https://github.com/karakun/OpenWebStart/releases.atom'
name 'OpenWebStart'
homepage 'https://openwebstart.com/'
installer script: {
executable: "#{staged_path}/OpenWebStart Installer.app/Contents/MacOS/JavaApplicationStub",
args: ['-q'],
sudo: true,
print_stderr: false,
}
uninstall_preflight do
set_ownership '/Applications/OpenWebStart'
end
uninstall script: {
executable: '/Applications/OpenWebStart/OpenWebStart Uninstaller.app/Contents/MacOS/JavaApplicationStub',
args: ['-c'],
sudo: true,
}
zap trash: [
'~/.config/icedtea-web',
'~/.cache/icedtea-web',
'/Applications/OpenWebStart',
]
end
| bsd-2-clause |
ucbtrans/sumo-project | examples/timingPlan_simulation/Throughput/data/2minAccel1.8-Tau/tau_plots.py | 1144 | import sys
import optparse
import subprocess
import random
import pdb
import matplotlib.pyplot as plt
import math
import numpy as np
import scipy.io
a0 = np.loadtxt('2min0RCT_taus',dtype=int)
t0 = np.loadtxt('2min0RCT_taus_time',dtype=int)
a1 = np.loadtxt('2min1RCT_taus',dtype=int)
t1 = np.loadtxt('2min1RCT_taus_time',dtype=int)
a2 = np.loadtxt('2min2RCT_taus',dtype=int)
t2 = np.loadtxt('2min2RCT_taus_time',dtype=int)
a3 = np.loadtxt('2min3RCT_taus',dtype=int)
t3 = np.loadtxt('2min3RCT_taus_time',dtype=int)
a4 = np.loadtxt('2min4RCT_taus',dtype=int)
t4 = np.loadtxt('2min4RCT_taus_time',dtype=int)
a5 = np.loadtxt('2min5RCT_taus',dtype=int)
t5 = np.loadtxt('2min5RCT_taus_time',dtype=int)
plt.figure(1)
m0, = plt.plot(t0,a0,label='RCT=0')
#m1, = plt.plot(t1,a1,label='RCT=1')
#m2, = plt.plot(t2,a2,label='RCT=2')
m3, = plt.plot(t3,a3,label='RCT=3')
#m4, = plt.plot(t4,a4,label='RCT=4')
m5, = plt.plot(t5,a5,label='RCT=5')
plt.legend(handles=[m0,m3,m5],loc='upper left')
plt.xlabel("Time (s)")
plt.ylabel("Throughput per hr")
plt.title("Instantaneous Throughput vs time for 2 min cycle")
plt.axis([240,480,400,2000])
plt.show()
| bsd-2-clause |
JCount/brew | Library/Homebrew/test/support/fixtures/cask/Casks/with-depends-on-x11-false.rb | 290 | cask 'with-depends-on-x11-false' do
version '1.2.3'
sha256 '67cdb8a02803ef37fdbf7e0be205863172e41a561ca446cd84f0d7ab35a99d94'
url "file://#{TEST_FIXTURE_DIR}/cask/caffeine.zip"
homepage 'https://brew.sh/with-depends-on-x11-false'
depends_on x11: false
app 'Caffeine.app'
end
| bsd-2-clause |
miho/iNumerics | odeint/boost/geometry/algorithms/length.hpp | 5791 | // Boost.Geometry (aka GGL, Generic Geometry Library)
// Copyright (c) 2007-2011 Barend Gehrels, Amsterdam, the Netherlands.
// Copyright (c) 2008-2011 Bruno Lalande, Paris, France.
// Copyright (c) 2009-2011 Mateusz Loskot, London, UK.
// Parts of Boost.Geometry are redesigned from Geodan's Geographic Library
// (geolib/GGL), copyright (c) 1995-2010 Geodan, Amsterdam, the Netherlands.
// Use, modification and distribution is subject to the Boost Software License,
// Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_GEOMETRY_ALGORITHMS_LENGTH_HPP
#define BOOST_GEOMETRY_ALGORITHMS_LENGTH_HPP
#include <iterator>
#include <boost/range.hpp>
#include <boost/mpl/if.hpp>
#include <boost/type_traits.hpp>
#include <boost/geometry/core/cs.hpp>
#include <boost/geometry/core/closure.hpp>
#include <boost/geometry/geometries/concepts/check.hpp>
#include <boost/geometry/algorithms/assign.hpp>
#include <boost/geometry/algorithms/detail/calculate_null.hpp>
#include <boost/geometry/views/closeable_view.hpp>
#include <boost/geometry/strategies/distance.hpp>
#include <boost/geometry/strategies/default_length_result.hpp>
namespace boost { namespace geometry
{
#ifndef DOXYGEN_NO_DETAIL
namespace detail { namespace length
{
template<typename Segment, typename Strategy>
struct segment_length
{
static inline typename default_length_result<Segment>::type apply(
Segment const& segment, Strategy const& strategy)
{
typedef typename point_type<Segment>::type point_type;
point_type p1, p2;
geometry::detail::assign_point_from_index<0>(segment, p1);
geometry::detail::assign_point_from_index<1>(segment, p2);
return strategy.apply(p1, p2);
}
};
/*!
\brief Internal, calculates length of a linestring using iterator pairs and
specified strategy
\note for_each could be used here, now that point_type is changed by boost
range iterator
*/
template<typename Range, typename Strategy, closure_selector Closure>
struct range_length
{
typedef typename default_length_result<Range>::type return_type;
static inline return_type apply(
Range const& range, Strategy const& strategy)
{
typedef typename closeable_view<Range const, Closure>::type view_type;
typedef typename boost::range_iterator
<
view_type const
>::type iterator_type;
return_type sum = return_type();
view_type view(range);
iterator_type it = boost::begin(view), end = boost::end(view);
if(it != end)
{
for(iterator_type previous = it++;
it != end;
++previous, ++it)
{
// Add point-point distance using the return type belonging
// to strategy
sum += strategy.apply(*previous, *it);
}
}
return sum;
}
};
}} // namespace detail::length
#endif // DOXYGEN_NO_DETAIL
#ifndef DOXYGEN_NO_DISPATCH
namespace dispatch
{
template <typename Tag, typename Geometry, typename Strategy>
struct length : detail::calculate_null
<
typename default_length_result<Geometry>::type,
Geometry,
Strategy
>
{};
template <typename Geometry, typename Strategy>
struct length<linestring_tag, Geometry, Strategy>
: detail::length::range_length<Geometry, Strategy, closed>
{};
// RING: length is currently 0; it might be argued that it is the "perimeter"
template <typename Geometry, typename Strategy>
struct length<segment_tag, Geometry, Strategy>
: detail::length::segment_length<Geometry, Strategy>
{};
} // namespace dispatch
#endif // DOXYGEN_NO_DISPATCH
/*!
\brief \brief_calc{length}
\ingroup length
\details \details_calc{length, length (the sum of distances between consecutive points)}. \details_default_strategy
\tparam Geometry \tparam_geometry
\param geometry \param_geometry
\return \return_calc{length}
\qbk{[include reference/algorithms/length.qbk]}
\qbk{[length] [length_output]}
*/
template<typename Geometry>
inline typename default_length_result<Geometry>::type length(
Geometry const& geometry)
{
concept::check<Geometry const>();
typedef typename strategy::distance::services::default_strategy
<
point_tag, typename point_type<Geometry>::type
>::type strategy_type;
return dispatch::length
<
typename tag<Geometry>::type,
Geometry,
strategy_type
>::apply(geometry, strategy_type());
}
/*!
\brief \brief_calc{length} \brief_strategy
\ingroup length
\details \details_calc{length, length (the sum of distances between consecutive points)} \brief_strategy. \details_strategy_reasons
\tparam Geometry \tparam_geometry
\tparam Strategy \tparam_strategy{distance}
\param geometry \param_geometry
\param strategy \param_strategy{distance}
\return \return_calc{length}
\qbk{distinguish,with strategy}
\qbk{[include reference/algorithms/length.qbk]}
\qbk{[length_with_strategy] [length_with_strategy_output]}
*/
template<typename Geometry, typename Strategy>
inline typename default_length_result<Geometry>::type length(
Geometry const& geometry, Strategy const& strategy)
{
concept::check<Geometry const>();
return dispatch::length
<
typename tag<Geometry>::type,
Geometry,
Strategy
>::apply(geometry, strategy);
}
}} // namespace boost::geometry
#endif // BOOST_GEOMETRY_ALGORITHMS_LENGTH_HPP
| bsd-2-clause |
gab1one/imagej-ops | src/main/java/net/imagej/ops/math/divide/DivideHandleZeroMap.java | 3243 | /*
* #%L
* ImageJ software for multidimensional image processing and analysis.
* %%
* Copyright (C) 2014 - 2017 Board of Regents of the University of
* Wisconsin-Madison, University of Konstanz and Brian Northan.
* %%
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package net.imagej.ops.math.divide;
import net.imagej.ops.Contingent;
import net.imagej.ops.Ops;
import net.imagej.ops.map.MapBinaryInplace1s;
import net.imagej.ops.special.inplace.AbstractBinaryInplace1Op;
import net.imagej.ops.special.inplace.BinaryInplace1Op;
import net.imagej.ops.special.inplace.Inplaces;
import net.imglib2.IterableInterval;
import net.imglib2.type.numeric.RealType;
import org.scijava.Priority;
import org.scijava.plugin.Plugin;
@Plugin(type = Ops.Math.Divide.class, priority = Priority.NORMAL_PRIORITY)
public class DivideHandleZeroMap<T extends RealType<T>> extends
AbstractBinaryInplace1Op<IterableInterval<T>, IterableInterval<T>> implements
Ops.Math.Divide, Contingent
{
private BinaryInplace1Op<T, T, T> divide;
private BinaryInplace1Op<IterableInterval<T>, IterableInterval<T>, IterableInterval<T>> map;
@Override
@SuppressWarnings("unchecked")
public void initialize() {
super.initialize();
divide = (BinaryInplace1Op) Inplaces.binary1(ops(),
DivideHandleZeroOp.class, RealType.class, RealType.class);
map = (BinaryInplace1Op) Inplaces.binary1(ops(),
MapBinaryInplace1s.IIAndII.class, IterableInterval.class,
IterableInterval.class, divide);
}
@Override
public boolean conforms() {
return true;
/* if (!Intervals.equalDimensions(in1(), in2())) return false;
if (!in1().iterationOrder().equals(in2().iterationOrder())) return false;
if (out() == null) return true;
return Intervals.equalDimensions(in1(), out()) && in1().iterationOrder()
.equals(out().iterationOrder());*/
}
@Override
public void mutate1(final IterableInterval<T> input1,
final IterableInterval<T> input2)
{
map.mutate1(input1, input2);
}
}
| bsd-2-clause |
dsanders11/mezzanine | mezzanine/pages/migrations/0002_auto_20141227_0224.py | 740 | from django.db import migrations, models
import mezzanine.core.fields
import mezzanine.pages.fields
class Migration(migrations.Migration):
dependencies = [
("pages", "0001_initial"),
]
operations = [
migrations.AlterField(
model_name="page",
name="_order",
field=mezzanine.core.fields.OrderField(null=True, verbose_name="Order"),
preserve_default=True,
),
migrations.AlterField(
model_name="page",
name="in_menus",
field=mezzanine.pages.fields.MenusField(
max_length=100, null=True, verbose_name="Show in menus", blank=True
),
preserve_default=True,
),
]
| bsd-2-clause |
wmakeev/jsonix | nodejs/demos/user/tests/tests.js | 141 | process.on('uncaughtException', function(err) {
console.error(err.stack);
});
module.exports = {
"user-tests" : require('./user-tests')
};
| bsd-2-clause |
C2SM-RCM/serialbox | python/test.py | 343 | #!/usr/bin/env python
#This file is released under terms of BSD license`
#See LICENSE.txt for more information
import unittest
class Test(unittest.TestCase):
def test_load_serializer(self):
from serialbox import Serializer
def test_load_visualizer(self):
from serialbox import Visualizer
if __name__ == '__main__':
unittest.main()
| bsd-2-clause |
NanoHttpd/nanohttpd | fileupload/src/test/java/org/nanohttpd/junit/fileupload/TestNanoFileUpLoad.java | 10553 | package org.nanohttpd.junit.fileupload;
/*
* #%L
* NanoHttpd-apache file upload integration
* %%
* Copyright (C) 2012 - 2015 nanohttpd
* %%
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of the nanohttpd nor the names of its contributors
* may be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
* BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.InetAddress;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.fileupload.FileItemIterator;
import org.apache.commons.fileupload.FileItemStream;
import org.apache.commons.fileupload.disk.DiskFileItemFactory;
import org.apache.commons.fileupload.util.Streams;
import org.apache.http.HttpEntity;
import org.apache.http.HttpResponse;
import org.apache.http.client.ClientProtocolException;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.client.methods.HttpTrace;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.entity.mime.content.FileBody;
import org.apache.http.entity.mime.content.StringBody;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.FixMethodOrder;
import org.junit.Test;
import org.nanohttpd.fileupload.NanoFileUpload;
import org.nanohttpd.protocols.http.HTTPSession;
import org.nanohttpd.protocols.http.IHTTPSession;
import org.nanohttpd.protocols.http.NanoHTTPD;
import org.nanohttpd.protocols.http.request.Method;
import org.nanohttpd.protocols.http.response.Response;
import org.nanohttpd.protocols.http.response.Status;
import org.nanohttpd.protocols.http.tempfiles.ITempFileManager;
/**
* very strange but if the file upload is the first request the test fails.
*
* @author ritchieGitHub
*/
@FixMethodOrder
public class TestNanoFileUpLoad {
private static final String UPLOAD_JAVA_FILE = "src/test/java/" + TestNanoFileUpLoad.class.getName().replace('.', '/') + ".java";
protected TestServer testServer;
public static class TestServer extends NanoHTTPD {
public Response response = Response.newFixedLengthResponse("");
public String uri;
public Method method;
public Map<String, String> header;
public Map<String, String> parms;
public Map<String, List<FileItem>> files;
public Map<String, List<String>> decodedParamters;
public Map<String, List<String>> decodedParamtersFromParameter;
public String queryParameterString;
public TestServer() {
super(8192);
uploader = new NanoFileUpload(new DiskFileItemFactory());
}
public HTTPSession createSession(ITempFileManager tempFileManager, InputStream inputStream, OutputStream outputStream) {
return new HTTPSession(this, tempFileManager, inputStream, outputStream);
}
public HTTPSession createSession(ITempFileManager tempFileManager, InputStream inputStream, OutputStream outputStream, InetAddress inetAddress) {
return new HTTPSession(this, tempFileManager, inputStream, outputStream, inetAddress);
}
NanoFileUpload uploader;
@Override
public Response serve(IHTTPSession session) {
this.uri = session.getUri();
this.method = session.getMethod();
this.header = session.getHeaders();
this.parms = session.getParms();
if (NanoFileUpload.isMultipartContent(session)) {
try {
if ("/uploadFile1".equals(this.uri)) {
session.getHeaders().put("content-length", "AA");
files = uploader.parseParameterMap(session);
}
if ("/uploadFile2".equals(this.uri)) {
files = new HashMap<String, List<FileItem>>();
List<FileItem> parseRequest = uploader.parseRequest(session);
files.put(parseRequest.get(0).getFieldName(), parseRequest);
}
if ("/uploadFile3".equals(this.uri)) {
files = new HashMap<String, List<FileItem>>();
FileItemIterator iter = uploader.getItemIterator(session);
while (iter.hasNext()) {
FileItemStream item = iter.next();
final String fileName = item.getName();
FileItem fileItem = uploader.getFileItemFactory().createItem(item.getFieldName(), item.getContentType(), item.isFormField(), fileName);
files.put(fileItem.getFieldName(), Arrays.asList(new FileItem[]{
fileItem
}));
try {
Streams.copy(item.openStream(), fileItem.getOutputStream(), true);
} catch (Exception e) {
}
fileItem.setHeaders(item.getHeaders());
}
}
} catch (Exception e) {
this.response.setStatus(Status.INTERNAL_ERROR);
e.printStackTrace();
}
}
this.queryParameterString = session.getQueryParameterString();
this.decodedParamtersFromParameter = decodeParameters(this.queryParameterString);
this.decodedParamters = decodeParameters(session.getQueryParameterString());
return this.response;
}
}
@Test
public void testNormalRequest() throws Exception {
CloseableHttpClient httpclient = HttpClients.createDefault();
HttpTrace httphead = new HttpTrace("http://localhost:8192/index.html");
CloseableHttpResponse response = httpclient.execute(httphead);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
response.close();
}
@Test
public void testPostWithMultipartFormUpload1() throws Exception {
CloseableHttpClient httpclient = HttpClients.createDefault();
String textFileName = UPLOAD_JAVA_FILE;
HttpPost post = new HttpPost("http://localhost:8192/uploadFile1");
executeUpload(httpclient, textFileName, post);
FileItem file = this.testServer.files.get("upfile").get(0);
Assert.assertEquals(file.getSize(), new File(textFileName).length());
}
@Test
public void testPostWithMultipartFormUpload2() throws Exception {
CloseableHttpClient httpclient = HttpClients.createDefault();
String textFileName = UPLOAD_JAVA_FILE;
HttpPost post = new HttpPost("http://localhost:8192/uploadFile2");
executeUpload(httpclient, textFileName, post);
FileItem file = this.testServer.files.get("upfile").get(0);
Assert.assertEquals(file.getSize(), new File(textFileName).length());
}
@Test
public void testPostWithMultipartFormUpload3() throws Exception {
CloseableHttpClient httpclient = HttpClients.createDefault();
String textFileName = UPLOAD_JAVA_FILE;
HttpPost post = new HttpPost("http://localhost:8192/uploadFile3");
executeUpload(httpclient, textFileName, post);
FileItem file = this.testServer.files.get("upfile").get(0);
Assert.assertEquals(file.getSize(), new File(textFileName).length());
}
private void executeUpload(CloseableHttpClient httpclient, String textFileName, HttpPost post) throws IOException, ClientProtocolException {
FileBody fileBody = new FileBody(new File(textFileName), ContentType.DEFAULT_BINARY);
StringBody stringBody1 = new StringBody("Message 1", ContentType.MULTIPART_FORM_DATA);
MultipartEntityBuilder builder = MultipartEntityBuilder.create();
builder.setMode(HttpMultipartMode.BROWSER_COMPATIBLE);
builder.addPart("upfile", fileBody);
builder.addPart("text1", stringBody1);
HttpEntity entity = builder.build();
//
post.setEntity(entity);
HttpResponse response = httpclient.execute(post);
Assert.assertEquals(200, response.getStatusLine().getStatusCode());
}
@Before
public void setUp() throws IOException {
this.testServer = new TestServer();
this.testServer.start();
try {
long start = System.currentTimeMillis();
Thread.sleep(100L);
while (!this.testServer.wasStarted()) {
Thread.sleep(100L);
if (System.currentTimeMillis() - start > 2000) {
Assert.fail("could not start server");
}
}
} catch (InterruptedException e) {
}
}
@After
public void tearDown() {
this.testServer.stop();
}
}
| bsd-3-clause |
r3ap3r2004/spraycan | app/controllers/spraycan/files_controller.rb | 1114 | class Spraycan::FilesController < Spraycan::BaseController
respond_to :json
before_filter :set_theme, :only => [:index, :create]
def index
@files = @theme.files
respond_with @files
end
#sets id to preference passed and returns url
def create
if @file = Spraycan::File.where(:name => params[:file][:file].original_filename).first
@file.destroy
@file = @theme.files.create params[:file]
else
@file = @theme.files.create params[:file]
end
if !@file.new_record?
if params.key? :preference
Spraycan::Config.send "#{params[:preference]}=", @file.guid
end
render :json => {:id => @file.id, :guid => @file.guid, :url => @file.url }.to_json
else
render :json => {:id => false }.to_json
end
end
def update
@file = Spraycan::File.where(:id => params.delete(:id)).first
@file.update_attributes params[:file]
respond_with @file
end
def destroy
render :js => Spraycan::File.destroy(params[:id])
end
private
def set_theme
@theme = Spraycan::Theme.find(params[:theme_id])
end
end
| bsd-3-clause |
toontownfunserver/Panda3D-1.9.0 | direct/wxwidgets/ViewPort.py | 8280 | """
Contains classes useful for 3D viewports.
Originally written by pro-rsoft,
Modified by gjeon.
Modified by Summer 2010 Carnegie Mellon University ETC PandaLE team: fixed a bug in Viewport.Close
"""
__all__ = ["Viewport", "ViewportManager"]
from direct.showbase.DirectObject import DirectObject
from direct.directtools.DirectGrid import DirectGrid
from direct.showbase.ShowBase import WindowControls
from direct.directtools.DirectGlobals import *
from WxPandaWindow import WxPandaWindow
from pandac.PandaModules import WindowProperties, OrthographicLens, Point3, Plane, CollisionPlane, CollisionNode, NodePath
import wx
HORIZONTAL = wx.SPLIT_HORIZONTAL
VERTICAL = wx.SPLIT_VERTICAL
CREATENEW = 99
VPLEFT = 10
VPFRONT = 11
VPTOP = 12
VPPERSPECTIVE = 13
class ViewportManager:
"""Manages the global viewport stuff."""
viewports = []
gsg = None
@staticmethod
def initializeAll(*args, **kwargs):
"""Calls initialize() on all the viewports."""
for v in ViewportManager.viewports:
v.initialize(*args, **kwargs)
@staticmethod
def updateAll(*args, **kwargs):
"""Calls Update() on all the viewports."""
for v in ViewportManager.viewports:
v.Update(*args, **kwargs)
@staticmethod
def layoutAll(*args, **kwargs):
"""Calls Layout() on all the viewports."""
for v in ViewportManager.viewports:
v.Layout(*args, **kwargs)
class Viewport(WxPandaWindow, DirectObject):
"""Class representing a 3D Viewport."""
CREATENEW = CREATENEW
VPLEFT = VPLEFT
VPFRONT = VPFRONT
VPTOP = VPTOP
VPPERSPECTIVE = VPPERSPECTIVE
def __init__(self, name, *args, **kwargs):
self.name = name
DirectObject.__init__(self)
kwargs['gsg'] = ViewportManager.gsg
WxPandaWindow.__init__(self, *args, **kwargs)
ViewportManager.viewports.append(self)
if ViewportManager.gsg == None:
ViewportManager.gsg = self.win.getGsg()
self.camera = None
self.lens = None
self.camPos = None
self.camLookAt = None
self.initialized = False
self.grid = None
self.collPlane = None
def initialize(self):
self.Update()
if self.win:
self.cam2d = base.makeCamera2d(self.win)
self.cam2d.node().setCameraMask(LE_CAM_MASKS[self.name])
self.cam = base.camList[-1]
self.camera = render.attachNewNode(self.name)
#self.camera.setName(self.name)
#self.camera.reparentTo(render)
self.cam.reparentTo(self.camera)
self.camNode = self.cam.node()
self.camNode.setCameraMask(LE_CAM_MASKS[self.name])
self.bt = base.setupMouse(self.win, True)
self.bt.node().setPrefix('_le_%s_'%self.name[:3])
mw = self.bt.getParent()
mk = mw.getParent()
winCtrl = WindowControls(
self.win, mouseWatcher=mw,
cam=self.camera,
camNode = self.camNode,
cam2d=None,
mouseKeyboard =mk,
grid = self.grid)
base.setupWindowControls(winCtrl)
self.initialized = True
if self.lens != None: self.cam.node().setLens(self.lens)
if self.camPos != None: self.camera.setPos(self.camPos)
if self.camLookAt != None: self.camera.lookAt(self.camLookAt)
self.camLens = self.camNode.getLens()
if self.name in ['top', 'front', 'left']:
x = self.ClientSize.GetWidth() * 0.1
y = self.ClientSize.GetHeight() * 0.1
self.camLens.setFilmSize(x, y)
self.Bind(wx.EVT_SIZE, self.onSize)
## self.accept("wheel_down", self.zoomOut)
## self.accept("wheel_up", self.zoomIn)
## self.accept("page_down", self.zoomOut)
## self.accept("page_down-repeat", self.zoomOut)
## self.accept("page_up", self.zoomIn)
## self.accept("page_up-repeat", self.zoomIn)
#self.accept("mouse3", self.onRightDown)
def Close(self):
"""Closes the viewport."""
if self.initialized:
wx.Window.Close(self)
#base.closeWindow(self.win)
ViewportManager.viewports.remove(self)
def onSize(self, evt):
"""Invoked when the viewport is resized."""
WxPandaWindow.onSize(self, evt)
if self.win != None:
newWidth = self.ClientSize.GetWidth()
newHeight = self.ClientSize.GetHeight()
if hasattr(base, "direct") and base.direct:
for dr in base.direct.drList:
if dr.camNode == self.camNode:
dr.updateFilmSize(newWidth, newHeight)
break
def onRightDown(self, evt = None):
"""Invoked when the viewport is right-clicked."""
if evt == None:
mpos = wx.GetMouseState()
mpos = self.ScreenToClient((mpos.x, mpos.y))
else:
mpos = evt.GetPosition()
self.Update()
#self.PopupMenu(self.menu, mpos)
#self.menu.Destroy()
def zoomOut(self):
self.camera.setY(self.camera, -MOUSE_ZOO_SPEED)
def zoomIn(self):
self.camera.setY(self.camera, MOUSE_ZOOM_SPEED)
@staticmethod
def make(parent, vpType = None):
"""Safe constructor that also takes CREATENEW, VPLEFT, VPTOP, etc."""
if vpType == None or vpType == CREATENEW:
return Viewport(parent)
if isinstance(vpType, Viewport): return vpType
if vpType == VPLEFT: return Viewport.makeLeft(parent)
if vpType == VPFRONT: return Viewport.makeFront(parent)
if vpType == VPTOP: return Viewport.makeTop(parent)
if vpType == VPPERSPECTIVE: return Viewport.makePerspective(parent)
raise TypeError, "Unknown viewport type: %s" % vpType
@staticmethod
def makeOrthographic(parent, name, campos):
v = Viewport(name, parent)
v.lens = OrthographicLens()
v.lens.setFilmSize(30)
v.camPos = campos
v.camLookAt = Point3(0, 0, 0)
v.grid = DirectGrid(parent=render)
if name == 'left':
v.grid.setHpr(0, 0, 90)
collPlane = CollisionNode('LeftGridCol')
collPlane.addSolid(CollisionPlane(Plane(1, 0, 0, 0)))
collPlane.setIntoCollideMask(BitMask32.bit(21))
v.collPlane = NodePath(collPlane)
v.collPlane.wrtReparentTo(v.grid)
#v.grid.gridBack.findAllMatches("**/+GeomNode")[0].setName("_leftViewGridBack")
LE_showInOneCam(v.grid, name)
elif name == 'front':
v.grid.setHpr(90, 0, 90)
collPlane = CollisionNode('FrontGridCol')
collPlane.addSolid(CollisionPlane(Plane(0, -1, 0, 0)))
collPlane.setIntoCollideMask(BitMask32.bit(21))
v.collPlane = NodePath(collPlane)
v.collPlane.wrtReparentTo(v.grid)
#v.grid.gridBack.findAllMatches("**/+GeomNode")[0].setName("_frontViewGridBack")
LE_showInOneCam(v.grid, name)
else:
collPlane = CollisionNode('TopGridCol')
collPlane.addSolid(CollisionPlane(Plane(0, 0, 1, 0)))
collPlane.setIntoCollideMask(BitMask32.bit(21))
v.collPlane = NodePath(collPlane)
v.collPlane.reparentTo(v.grid)
#v.grid.gridBack.findAllMatches("**/+GeomNode")[0].setName("_topViewGridBack")
LE_showInOneCam(v.grid, name)
return v
@staticmethod
def makePerspective(parent):
v = Viewport('persp', parent)
v.camPos = Point3(-19, -19, 19)
v.camLookAt = Point3(0, 0, 0)
v.grid = DirectGrid(parent=render)
collPlane = CollisionNode('PerspGridCol')
collPlane.addSolid(CollisionPlane(Plane(0, 0, 1, 0)))
#oldBitmask = collPlane.getIntoCollideMask()
#collPlane.setIntoCollideMask(BitMask32.bit(21)|oldBitmask)
collPlane.setIntoCollideMask(BitMask32.bit(21))
v.collPlane = NodePath(collPlane)
v.collPlane.reparentTo(v.grid)
collPlane2 = CollisionNode('PerspGridCol2')
collPlane2.addSolid(CollisionPlane(Plane(0, 0, -1, 0)))
#oldBitmask = collPlane2.getIntoCollideMask()
#collPlane2.setIntoCollideMask(BitMask32.bit(21)|oldBitmask)
collPlane2.setIntoCollideMask(BitMask32.bit(21))
v.collPlane2 = NodePath(collPlane2)
v.collPlane2.reparentTo(v.grid)
#v.grid.gridBack.findAllMatches("**/+GeomNode")[0].setName("_perspViewGridBack")
LE_showInOneCam(v.grid, 'persp')
return v
@staticmethod
def makeLeft(parent): return Viewport.makeOrthographic(parent, 'left', Point3(600, 0, 0))
@staticmethod
def makeFront(parent): return Viewport.makeOrthographic(parent, 'front', Point3(0, -600, 0))
@staticmethod
def makeTop(parent): return Viewport.makeOrthographic(parent, 'top', Point3(0, 0, 600))
| bsd-3-clause |
guorendong/iridium-browser-ubuntu | cc/quads/draw_quad_unittest.cc | 46108 | // Copyright 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "cc/quads/draw_quad.h"
#include <algorithm>
#include "base/bind.h"
#include "base/compiler_specific.h"
#include "cc/base/math_util.h"
#include "cc/output/filter_operations.h"
#include "cc/quads/checkerboard_draw_quad.h"
#include "cc/quads/debug_border_draw_quad.h"
#include "cc/quads/io_surface_draw_quad.h"
#include "cc/quads/largest_draw_quad.h"
#include "cc/quads/picture_draw_quad.h"
#include "cc/quads/render_pass.h"
#include "cc/quads/render_pass_draw_quad.h"
#include "cc/quads/solid_color_draw_quad.h"
#include "cc/quads/stream_video_draw_quad.h"
#include "cc/quads/surface_draw_quad.h"
#include "cc/quads/texture_draw_quad.h"
#include "cc/quads/tile_draw_quad.h"
#include "cc/quads/yuv_video_draw_quad.h"
#include "cc/test/fake_picture_pile_impl.h"
#include "cc/test/geometry_test_utils.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/skia/include/effects/SkBlurImageFilter.h"
#include "ui/gfx/transform.h"
namespace cc {
namespace {
TEST(DrawQuadTest, CopySharedQuadState) {
gfx::Transform quad_transform = gfx::Transform(1.0, 0.0, 0.5, 1.0, 0.5, 0.0);
gfx::Size content_bounds(26, 28);
gfx::Rect visible_content_rect(10, 12, 14, 16);
gfx::Rect clip_rect(19, 21, 23, 25);
bool is_clipped = true;
float opacity = 0.25f;
SkXfermode::Mode blend_mode = SkXfermode::kMultiply_Mode;
int sorting_context_id = 65536;
scoped_ptr<SharedQuadState> state(new SharedQuadState);
state->SetAll(quad_transform,
content_bounds,
visible_content_rect,
clip_rect,
is_clipped,
opacity,
blend_mode,
sorting_context_id);
scoped_ptr<SharedQuadState> copy(new SharedQuadState);
copy->CopyFrom(state.get());
EXPECT_EQ(quad_transform, copy->content_to_target_transform);
EXPECT_EQ(visible_content_rect, copy->visible_content_rect);
EXPECT_EQ(opacity, copy->opacity);
EXPECT_EQ(clip_rect, copy->clip_rect);
EXPECT_EQ(is_clipped, copy->is_clipped);
EXPECT_EQ(blend_mode, copy->blend_mode);
}
SharedQuadState* CreateSharedQuadState(RenderPass* render_pass) {
gfx::Transform quad_transform = gfx::Transform(1.0, 0.0, 0.5, 1.0, 0.5, 0.0);
gfx::Size content_bounds(26, 28);
gfx::Rect visible_content_rect(10, 12, 14, 16);
gfx::Rect clip_rect(19, 21, 23, 25);
bool is_clipped = false;
float opacity = 1.f;
int sorting_context_id = 65536;
SkXfermode::Mode blend_mode = SkXfermode::kSrcOver_Mode;
SharedQuadState* state = render_pass->CreateAndAppendSharedQuadState();
state->SetAll(quad_transform,
content_bounds,
visible_content_rect,
clip_rect,
is_clipped,
opacity,
blend_mode,
sorting_context_id);
return state;
}
void CompareDrawQuad(DrawQuad* quad,
DrawQuad* copy,
SharedQuadState* copy_shared_state) {
EXPECT_EQ(quad->material, copy->material);
EXPECT_EQ(quad->rect, copy->rect);
EXPECT_EQ(quad->visible_rect, copy->visible_rect);
EXPECT_EQ(quad->opaque_rect, copy->opaque_rect);
EXPECT_EQ(quad->needs_blending, copy->needs_blending);
EXPECT_EQ(copy_shared_state, copy->shared_quad_state);
}
#define CREATE_SHARED_STATE() \
scoped_ptr<RenderPass> render_pass = RenderPass::Create(); \
SharedQuadState* shared_state(CreateSharedQuadState(render_pass.get())); \
SharedQuadState* copy_shared_state = \
render_pass->CreateAndAppendSharedQuadState(); \
copy_shared_state->CopyFrom(shared_state);
#define QUAD_DATA \
gfx::Rect quad_rect(30, 40, 50, 60); \
gfx::Rect quad_visible_rect(40, 50, 30, 20); \
gfx::Rect quad_opaque_rect(60, 55, 10, 10); \
ALLOW_UNUSED_LOCAL(quad_opaque_rect); \
bool needs_blending = true; \
ALLOW_UNUSED_LOCAL(needs_blending);
#define SETUP_AND_COPY_QUAD_NEW(Type, quad) \
DrawQuad* copy_new = \
render_pass->CopyFromAndAppendDrawQuad(quad_new, copy_shared_state); \
CompareDrawQuad(quad_new, copy_new, copy_shared_state); \
const Type* copy_quad = Type::MaterialCast(copy_new); \
ALLOW_UNUSED_LOCAL(copy_quad);
#define SETUP_AND_COPY_QUAD_ALL(Type, quad) \
DrawQuad* copy_all = \
render_pass->CopyFromAndAppendDrawQuad(quad_all, copy_shared_state); \
CompareDrawQuad(quad_all, copy_all, copy_shared_state); \
copy_quad = Type::MaterialCast(copy_all);
#define SETUP_AND_COPY_QUAD_NEW_RP(Type, quad, a) \
DrawQuad* copy_new = render_pass->CopyFromAndAppendRenderPassDrawQuad( \
quad_new, copy_shared_state, a); \
CompareDrawQuad(quad_new, copy_new, copy_shared_state); \
const Type* copy_quad = Type::MaterialCast(copy_new); \
ALLOW_UNUSED_LOCAL(copy_quad);
#define SETUP_AND_COPY_QUAD_ALL_RP(Type, quad, a) \
DrawQuad* copy_all = render_pass->CopyFromAndAppendRenderPassDrawQuad( \
quad_all, copy_shared_state, a); \
CompareDrawQuad(quad_all, copy_all, copy_shared_state); \
copy_quad = Type::MaterialCast(copy_all);
#define CREATE_QUAD_1_NEW(Type, a) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a); } \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_1_ALL(Type, a) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, \
quad_rect, \
quad_opaque_rect, \
quad_visible_rect, \
needs_blending, \
a); \
} \
SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
#define CREATE_QUAD_2_NEW(Type, a, b) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b); } \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_2_ALL(Type, a, b) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, \
quad_rect, \
quad_opaque_rect, \
quad_visible_rect, \
needs_blending, \
a, \
b); \
} \
SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
#define CREATE_QUAD_3_NEW(Type, a, b, c) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c); } \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_3_ALL(Type, a, b, c) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, \
quad_rect, \
quad_opaque_rect, \
quad_visible_rect, \
needs_blending, \
a, \
b, \
c); \
} \
SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
#define CREATE_QUAD_4_NEW(Type, a, b, c, d) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c, d); } \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_4_ALL(Type, a, b, c, d) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, \
quad_rect, \
quad_opaque_rect, \
quad_visible_rect, \
needs_blending, \
a, \
b, \
c, \
d); \
} \
SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
#define CREATE_QUAD_5_NEW(Type, a, b, c, d, e) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c, d, e); } \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_5_ALL(Type, a, b, c, d, e) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, \
quad_rect, \
quad_opaque_rect, \
quad_visible_rect, \
needs_blending, \
a, \
b, \
c, \
d, \
e); \
} \
SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
#define CREATE_QUAD_6_NEW(Type, a, b, c, d, e, f) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c, d, e, f); } \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_6_ALL(Type, a, b, c, d, e, f) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, \
quad_rect, \
quad_opaque_rect, \
quad_visible_rect, \
needs_blending, \
a, \
b, \
c, \
d, \
e, \
f); \
} \
SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
#define CREATE_QUAD_7_NEW(Type, a, b, c, d, e, f, g) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c, d, e, f, g); \
} \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_7_ALL(Type, a, b, c, d, e, f, g) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, \
quad_rect, \
quad_opaque_rect, \
quad_visible_rect, \
needs_blending, \
a, \
b, \
c, \
d, \
e, \
f, \
g); \
} \
SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
#define CREATE_QUAD_8_NEW(Type, a, b, c, d, e, f, g, h) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_new->SetNew( \
shared_state, quad_rect, a, b, c, d, e, f, g, h); \
} \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_8_ALL(Type, a, b, c, d, e, f, g, h) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, \
quad_rect, \
quad_opaque_rect, \
quad_visible_rect, \
needs_blending, \
a, \
b, \
c, \
d, \
e, \
f, \
g, \
h); \
} \
SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
#define CREATE_QUAD_9_NEW(Type, a, b, c, d, e, f, g, h, i) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_new->SetNew( \
shared_state, quad_rect, a, b, c, d, e, f, g, h, i); \
} \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_9_ALL(Type, a, b, c, d, e, f, g, h, i) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, quad_rect, quad_opaque_rect, \
quad_visible_rect, needs_blending, a, b, c, d, \
e, f, g, h, i); \
} \
SETUP_AND_COPY_QUAD_ALL(Type, quad_all);
#define CREATE_QUAD_10_NEW(Type, a, b, c, d, e, f, g, h, i, j) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_new->SetNew( \
shared_state, quad_rect, a, b, c, d, e, f, g, h, i, j); \
} \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_11_NEW(Type, a, b, c, d, e, f, g, h, i, j, k) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_new->SetNew(shared_state, quad_rect, a, b, c, d, e, f, g, \
h, i, j, k); \
} \
SETUP_AND_COPY_QUAD_NEW(Type, quad_new);
#define CREATE_QUAD_ALL_RP(Type, a, b, c, d, e, f, g, copy_a) \
Type* quad_all = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_all->SetAll(shared_state, \
quad_rect, \
quad_opaque_rect, \
quad_visible_rect, \
needs_blending, \
a, \
b, \
c, \
d, \
e, \
f, \
g); \
} \
SETUP_AND_COPY_QUAD_ALL_RP(Type, quad_all, copy_a);
#define CREATE_QUAD_NEW_RP(Type, a, b, c, d, e, f, g, h, copy_a) \
Type* quad_new = render_pass->CreateAndAppendDrawQuad<Type>(); \
{ \
QUAD_DATA quad_new->SetNew( \
shared_state, quad_rect, a, b, c, d, e, f, g, h); \
} \
SETUP_AND_COPY_QUAD_NEW_RP(Type, quad_new, copy_a);
TEST(DrawQuadTest, CopyCheckerboardDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
SkColor color = 0xfabb0011;
float scale = 2.3f;
CREATE_SHARED_STATE();
CREATE_QUAD_3_NEW(CheckerboardDrawQuad, visible_rect, color, scale);
EXPECT_EQ(DrawQuad::CHECKERBOARD, copy_quad->material);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(color, copy_quad->color);
EXPECT_EQ(scale, copy_quad->scale);
CREATE_QUAD_2_ALL(CheckerboardDrawQuad, color, scale);
EXPECT_EQ(DrawQuad::CHECKERBOARD, copy_quad->material);
EXPECT_EQ(color, copy_quad->color);
EXPECT_EQ(scale, copy_quad->scale);
}
TEST(DrawQuadTest, CopyDebugBorderDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
SkColor color = 0xfabb0011;
int width = 99;
CREATE_SHARED_STATE();
CREATE_QUAD_3_NEW(DebugBorderDrawQuad, visible_rect, color, width);
EXPECT_EQ(DrawQuad::DEBUG_BORDER, copy_quad->material);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(color, copy_quad->color);
EXPECT_EQ(width, copy_quad->width);
CREATE_QUAD_2_ALL(DebugBorderDrawQuad, color, width);
EXPECT_EQ(DrawQuad::DEBUG_BORDER, copy_quad->material);
EXPECT_EQ(color, copy_quad->color);
EXPECT_EQ(width, copy_quad->width);
}
TEST(DrawQuadTest, CopyIOSurfaceDrawQuad) {
gfx::Rect opaque_rect(33, 47, 10, 12);
gfx::Rect visible_rect(40, 50, 30, 20);
gfx::Size size(58, 95);
ResourceProvider::ResourceId resource_id = 72;
IOSurfaceDrawQuad::Orientation orientation = IOSurfaceDrawQuad::UNFLIPPED;
CREATE_SHARED_STATE();
CREATE_QUAD_5_NEW(IOSurfaceDrawQuad,
opaque_rect,
visible_rect,
size,
resource_id,
orientation);
EXPECT_EQ(DrawQuad::IO_SURFACE_CONTENT, copy_quad->material);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(opaque_rect, copy_quad->opaque_rect);
EXPECT_EQ(size, copy_quad->io_surface_size);
EXPECT_EQ(resource_id, copy_quad->io_surface_resource_id);
EXPECT_EQ(orientation, copy_quad->orientation);
CREATE_QUAD_3_ALL(IOSurfaceDrawQuad, size, resource_id, orientation);
EXPECT_EQ(DrawQuad::IO_SURFACE_CONTENT, copy_quad->material);
EXPECT_EQ(size, copy_quad->io_surface_size);
EXPECT_EQ(resource_id, copy_quad->io_surface_resource_id);
EXPECT_EQ(orientation, copy_quad->orientation);
}
TEST(DrawQuadTest, CopyRenderPassDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
RenderPassId render_pass_id(22, 64);
ResourceProvider::ResourceId mask_resource_id = 78;
gfx::Vector2dF mask_uv_scale(33.f, 19.f);
gfx::Size mask_texture_size(128, 134);
FilterOperations filters;
filters.Append(FilterOperation::CreateBlurFilter(1.f));
gfx::Vector2dF filters_scale;
FilterOperations background_filters;
background_filters.Append(
FilterOperation::CreateGrayscaleFilter(1.f));
RenderPassId copied_render_pass_id(235, 11);
CREATE_SHARED_STATE();
CREATE_QUAD_NEW_RP(RenderPassDrawQuad,
visible_rect,
render_pass_id,
mask_resource_id,
mask_uv_scale,
mask_texture_size,
filters,
filters_scale,
background_filters,
copied_render_pass_id);
EXPECT_EQ(DrawQuad::RENDER_PASS, copy_quad->material);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(copied_render_pass_id, copy_quad->render_pass_id);
EXPECT_EQ(mask_resource_id, copy_quad->mask_resource_id);
EXPECT_EQ(mask_uv_scale.ToString(), copy_quad->mask_uv_scale.ToString());
EXPECT_EQ(mask_texture_size.ToString(),
copy_quad->mask_texture_size.ToString());
EXPECT_EQ(filters, copy_quad->filters);
EXPECT_EQ(filters_scale, copy_quad->filters_scale);
EXPECT_EQ(background_filters, copy_quad->background_filters);
CREATE_QUAD_ALL_RP(RenderPassDrawQuad,
render_pass_id,
mask_resource_id,
mask_uv_scale,
mask_texture_size,
filters,
filters_scale,
background_filters,
copied_render_pass_id);
EXPECT_EQ(DrawQuad::RENDER_PASS, copy_quad->material);
EXPECT_EQ(copied_render_pass_id, copy_quad->render_pass_id);
EXPECT_EQ(mask_resource_id, copy_quad->mask_resource_id);
EXPECT_EQ(mask_uv_scale.ToString(), copy_quad->mask_uv_scale.ToString());
EXPECT_EQ(mask_texture_size.ToString(),
copy_quad->mask_texture_size.ToString());
EXPECT_EQ(filters, copy_quad->filters);
EXPECT_EQ(filters_scale, copy_quad->filters_scale);
EXPECT_EQ(background_filters, copy_quad->background_filters);
}
TEST(DrawQuadTest, CopySolidColorDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
SkColor color = 0x49494949;
bool force_anti_aliasing_off = false;
CREATE_SHARED_STATE();
CREATE_QUAD_3_NEW(
SolidColorDrawQuad, visible_rect, color, force_anti_aliasing_off);
EXPECT_EQ(DrawQuad::SOLID_COLOR, copy_quad->material);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(color, copy_quad->color);
EXPECT_EQ(force_anti_aliasing_off, copy_quad->force_anti_aliasing_off);
CREATE_QUAD_2_ALL(SolidColorDrawQuad, color, force_anti_aliasing_off);
EXPECT_EQ(DrawQuad::SOLID_COLOR, copy_quad->material);
EXPECT_EQ(color, copy_quad->color);
EXPECT_EQ(force_anti_aliasing_off, copy_quad->force_anti_aliasing_off);
}
TEST(DrawQuadTest, CopyStreamVideoDrawQuad) {
gfx::Rect opaque_rect(33, 47, 10, 12);
gfx::Rect visible_rect(40, 50, 30, 20);
ResourceProvider::ResourceId resource_id = 64;
gfx::Transform matrix = gfx::Transform(0.5, 0.25, 1, 0.75, 0, 1);
CREATE_SHARED_STATE();
CREATE_QUAD_4_NEW(
StreamVideoDrawQuad, opaque_rect, visible_rect, resource_id, matrix);
EXPECT_EQ(DrawQuad::STREAM_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(opaque_rect, copy_quad->opaque_rect);
EXPECT_EQ(resource_id, copy_quad->resource_id);
EXPECT_EQ(matrix, copy_quad->matrix);
CREATE_QUAD_2_ALL(StreamVideoDrawQuad, resource_id, matrix);
EXPECT_EQ(DrawQuad::STREAM_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(resource_id, copy_quad->resource_id);
EXPECT_EQ(matrix, copy_quad->matrix);
}
TEST(DrawQuadTest, CopySurfaceDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
SurfaceId surface_id(1234);
CREATE_SHARED_STATE();
CREATE_QUAD_2_NEW(SurfaceDrawQuad, visible_rect, surface_id);
EXPECT_EQ(DrawQuad::SURFACE_CONTENT, copy_quad->material);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(surface_id, copy_quad->surface_id);
CREATE_QUAD_1_ALL(SurfaceDrawQuad, surface_id);
EXPECT_EQ(DrawQuad::SURFACE_CONTENT, copy_quad->material);
EXPECT_EQ(surface_id, copy_quad->surface_id);
}
TEST(DrawQuadTest, CopyTextureDrawQuad) {
gfx::Rect opaque_rect(33, 47, 10, 12);
gfx::Rect visible_rect(40, 50, 30, 20);
unsigned resource_id = 82;
bool premultiplied_alpha = true;
gfx::PointF uv_top_left(0.5f, 224.f);
gfx::PointF uv_bottom_right(51.5f, 260.f);
const float vertex_opacity[] = { 1.0f, 1.0f, 1.0f, 1.0f };
bool y_flipped = true;
bool nearest_neighbor = true;
CREATE_SHARED_STATE();
CREATE_QUAD_10_NEW(TextureDrawQuad,
opaque_rect,
visible_rect,
resource_id,
premultiplied_alpha,
uv_top_left,
uv_bottom_right,
SK_ColorTRANSPARENT,
vertex_opacity,
y_flipped,
nearest_neighbor);
EXPECT_EQ(DrawQuad::TEXTURE_CONTENT, copy_quad->material);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(opaque_rect, copy_quad->opaque_rect);
EXPECT_EQ(resource_id, copy_quad->resource_id);
EXPECT_EQ(premultiplied_alpha, copy_quad->premultiplied_alpha);
EXPECT_EQ(uv_top_left, copy_quad->uv_top_left);
EXPECT_EQ(uv_bottom_right, copy_quad->uv_bottom_right);
EXPECT_FLOAT_ARRAY_EQ(vertex_opacity, copy_quad->vertex_opacity, 4);
EXPECT_EQ(y_flipped, copy_quad->y_flipped);
EXPECT_EQ(nearest_neighbor, copy_quad->nearest_neighbor);
CREATE_QUAD_8_ALL(TextureDrawQuad,
resource_id,
premultiplied_alpha,
uv_top_left,
uv_bottom_right,
SK_ColorTRANSPARENT,
vertex_opacity,
y_flipped,
nearest_neighbor);
EXPECT_EQ(DrawQuad::TEXTURE_CONTENT, copy_quad->material);
EXPECT_EQ(resource_id, copy_quad->resource_id);
EXPECT_EQ(premultiplied_alpha, copy_quad->premultiplied_alpha);
EXPECT_EQ(uv_top_left, copy_quad->uv_top_left);
EXPECT_EQ(uv_bottom_right, copy_quad->uv_bottom_right);
EXPECT_FLOAT_ARRAY_EQ(vertex_opacity, copy_quad->vertex_opacity, 4);
EXPECT_EQ(y_flipped, copy_quad->y_flipped);
EXPECT_EQ(nearest_neighbor, copy_quad->nearest_neighbor);
}
TEST(DrawQuadTest, CopyTileDrawQuad) {
gfx::Rect opaque_rect(33, 44, 22, 33);
gfx::Rect visible_rect(40, 50, 30, 20);
unsigned resource_id = 104;
gfx::RectF tex_coord_rect(31.f, 12.f, 54.f, 20.f);
gfx::Size texture_size(85, 32);
bool swizzle_contents = true;
bool nearest_neighbor = true;
CREATE_SHARED_STATE();
CREATE_QUAD_7_NEW(TileDrawQuad,
opaque_rect,
visible_rect,
resource_id,
tex_coord_rect,
texture_size,
swizzle_contents,
nearest_neighbor);
EXPECT_EQ(DrawQuad::TILED_CONTENT, copy_quad->material);
EXPECT_EQ(opaque_rect, copy_quad->opaque_rect);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(resource_id, copy_quad->resource_id);
EXPECT_EQ(tex_coord_rect, copy_quad->tex_coord_rect);
EXPECT_EQ(texture_size, copy_quad->texture_size);
EXPECT_EQ(swizzle_contents, copy_quad->swizzle_contents);
EXPECT_EQ(nearest_neighbor, copy_quad->nearest_neighbor);
CREATE_QUAD_5_ALL(TileDrawQuad,
resource_id,
tex_coord_rect,
texture_size,
swizzle_contents,
nearest_neighbor);
EXPECT_EQ(DrawQuad::TILED_CONTENT, copy_quad->material);
EXPECT_EQ(resource_id, copy_quad->resource_id);
EXPECT_EQ(tex_coord_rect, copy_quad->tex_coord_rect);
EXPECT_EQ(texture_size, copy_quad->texture_size);
EXPECT_EQ(swizzle_contents, copy_quad->swizzle_contents);
EXPECT_EQ(nearest_neighbor, copy_quad->nearest_neighbor);
}
TEST(DrawQuadTest, CopyYUVVideoDrawQuad) {
gfx::Rect opaque_rect(33, 47, 10, 12);
gfx::Rect visible_rect(40, 50, 30, 20);
gfx::RectF ya_tex_coord_rect(40, 50, 30, 20);
gfx::RectF uv_tex_coord_rect(20, 25, 15, 10);
gfx::Size ya_tex_size(32, 68);
gfx::Size uv_tex_size(41, 51);
ResourceProvider::ResourceId y_plane_resource_id = 45;
ResourceProvider::ResourceId u_plane_resource_id = 532;
ResourceProvider::ResourceId v_plane_resource_id = 4;
ResourceProvider::ResourceId a_plane_resource_id = 63;
YUVVideoDrawQuad::ColorSpace color_space = YUVVideoDrawQuad::JPEG;
CREATE_SHARED_STATE();
CREATE_QUAD_11_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size,
uv_tex_size, y_plane_resource_id, u_plane_resource_id,
v_plane_resource_id, a_plane_resource_id, color_space);
EXPECT_EQ(DrawQuad::YUV_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(opaque_rect, copy_quad->opaque_rect);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(ya_tex_coord_rect, copy_quad->ya_tex_coord_rect);
EXPECT_EQ(uv_tex_coord_rect, copy_quad->uv_tex_coord_rect);
EXPECT_EQ(ya_tex_size, copy_quad->ya_tex_size);
EXPECT_EQ(uv_tex_size, copy_quad->uv_tex_size);
EXPECT_EQ(y_plane_resource_id, copy_quad->y_plane_resource_id);
EXPECT_EQ(u_plane_resource_id, copy_quad->u_plane_resource_id);
EXPECT_EQ(v_plane_resource_id, copy_quad->v_plane_resource_id);
EXPECT_EQ(a_plane_resource_id, copy_quad->a_plane_resource_id);
EXPECT_EQ(color_space, copy_quad->color_space);
CREATE_QUAD_9_ALL(YUVVideoDrawQuad, ya_tex_coord_rect, uv_tex_coord_rect,
ya_tex_size, uv_tex_size, y_plane_resource_id,
u_plane_resource_id, v_plane_resource_id,
a_plane_resource_id, color_space);
EXPECT_EQ(DrawQuad::YUV_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(ya_tex_coord_rect, copy_quad->ya_tex_coord_rect);
EXPECT_EQ(uv_tex_coord_rect, copy_quad->uv_tex_coord_rect);
EXPECT_EQ(ya_tex_size, copy_quad->ya_tex_size);
EXPECT_EQ(uv_tex_size, copy_quad->uv_tex_size);
EXPECT_EQ(y_plane_resource_id, copy_quad->y_plane_resource_id);
EXPECT_EQ(u_plane_resource_id, copy_quad->u_plane_resource_id);
EXPECT_EQ(v_plane_resource_id, copy_quad->v_plane_resource_id);
EXPECT_EQ(a_plane_resource_id, copy_quad->a_plane_resource_id);
EXPECT_EQ(color_space, copy_quad->color_space);
}
TEST(DrawQuadTest, CopyPictureDrawQuad) {
gfx::Rect opaque_rect(33, 44, 22, 33);
gfx::Rect visible_rect(40, 50, 30, 20);
gfx::RectF tex_coord_rect(31.f, 12.f, 54.f, 20.f);
gfx::Size texture_size(85, 32);
bool nearest_neighbor = true;
ResourceFormat texture_format = RGBA_8888;
gfx::Rect content_rect(30, 40, 20, 30);
float contents_scale = 3.141592f;
scoped_refptr<RasterSource> raster_source =
FakePicturePileImpl::CreateEmptyPile(gfx::Size(100, 100),
gfx::Size(100, 100));
CREATE_SHARED_STATE();
CREATE_QUAD_9_NEW(PictureDrawQuad, opaque_rect, visible_rect, tex_coord_rect,
texture_size, nearest_neighbor, texture_format,
content_rect, contents_scale, raster_source);
EXPECT_EQ(DrawQuad::PICTURE_CONTENT, copy_quad->material);
EXPECT_EQ(opaque_rect, copy_quad->opaque_rect);
EXPECT_EQ(visible_rect, copy_quad->visible_rect);
EXPECT_EQ(tex_coord_rect, copy_quad->tex_coord_rect);
EXPECT_EQ(texture_size, copy_quad->texture_size);
EXPECT_EQ(nearest_neighbor, copy_quad->nearest_neighbor);
EXPECT_EQ(texture_format, copy_quad->texture_format);
EXPECT_EQ(content_rect, copy_quad->content_rect);
EXPECT_EQ(contents_scale, copy_quad->contents_scale);
EXPECT_EQ(raster_source, copy_quad->raster_source);
CREATE_QUAD_7_ALL(PictureDrawQuad, tex_coord_rect, texture_size,
nearest_neighbor, texture_format, content_rect,
contents_scale, raster_source);
EXPECT_EQ(DrawQuad::PICTURE_CONTENT, copy_quad->material);
EXPECT_EQ(tex_coord_rect, copy_quad->tex_coord_rect);
EXPECT_EQ(texture_size, copy_quad->texture_size);
EXPECT_EQ(nearest_neighbor, copy_quad->nearest_neighbor);
EXPECT_EQ(texture_format, copy_quad->texture_format);
EXPECT_EQ(content_rect, copy_quad->content_rect);
EXPECT_EQ(contents_scale, copy_quad->contents_scale);
EXPECT_EQ(raster_source, copy_quad->raster_source);
}
class DrawQuadIteratorTest : public testing::Test {
protected:
ResourceProvider::ResourceId IncrementResourceId(
ResourceProvider::ResourceId id) {
++num_resources_;
return id + 1;
}
int IterateAndCount(DrawQuad* quad) {
num_resources_ = 0;
quad->IterateResources(base::Bind(
&DrawQuadIteratorTest::IncrementResourceId, base::Unretained(this)));
return num_resources_;
}
private:
int num_resources_;
};
TEST_F(DrawQuadIteratorTest, CheckerboardDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
SkColor color = 0xfabb0011;
float scale = 3.2f;
CREATE_SHARED_STATE();
CREATE_QUAD_3_NEW(CheckerboardDrawQuad, visible_rect, color, scale);
EXPECT_EQ(0, IterateAndCount(quad_new));
}
TEST_F(DrawQuadIteratorTest, DebugBorderDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
SkColor color = 0xfabb0011;
int width = 99;
CREATE_SHARED_STATE();
CREATE_QUAD_3_NEW(DebugBorderDrawQuad, visible_rect, color, width);
EXPECT_EQ(0, IterateAndCount(quad_new));
}
TEST_F(DrawQuadIteratorTest, IOSurfaceDrawQuad) {
gfx::Rect opaque_rect(33, 47, 10, 12);
gfx::Rect visible_rect(40, 50, 30, 20);
gfx::Size size(58, 95);
ResourceProvider::ResourceId resource_id = 72;
IOSurfaceDrawQuad::Orientation orientation = IOSurfaceDrawQuad::UNFLIPPED;
CREATE_SHARED_STATE();
CREATE_QUAD_5_NEW(IOSurfaceDrawQuad,
opaque_rect,
visible_rect,
size,
resource_id,
orientation);
EXPECT_EQ(resource_id, quad_new->io_surface_resource_id);
EXPECT_EQ(1, IterateAndCount(quad_new));
EXPECT_EQ(resource_id + 1, quad_new->io_surface_resource_id);
}
TEST_F(DrawQuadIteratorTest, RenderPassDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
RenderPassId render_pass_id(22, 64);
ResourceProvider::ResourceId mask_resource_id = 78;
gfx::Vector2dF mask_uv_scale(33.f, 19.f);
gfx::Size mask_texture_size(128, 134);
FilterOperations filters;
filters.Append(FilterOperation::CreateBlurFilter(1.f));
gfx::Vector2dF filters_scale(2.f, 3.f);
FilterOperations background_filters;
background_filters.Append(
FilterOperation::CreateGrayscaleFilter(1.f));
RenderPassId copied_render_pass_id(235, 11);
CREATE_SHARED_STATE();
CREATE_QUAD_NEW_RP(RenderPassDrawQuad,
visible_rect,
render_pass_id,
mask_resource_id,
mask_uv_scale,
mask_texture_size,
filters,
filters_scale,
background_filters,
copied_render_pass_id);
EXPECT_EQ(mask_resource_id, quad_new->mask_resource_id);
EXPECT_EQ(1, IterateAndCount(quad_new));
EXPECT_EQ(mask_resource_id + 1, quad_new->mask_resource_id);
quad_new->mask_resource_id = 0;
EXPECT_EQ(0, IterateAndCount(quad_new));
EXPECT_EQ(0u, quad_new->mask_resource_id);
}
TEST_F(DrawQuadIteratorTest, SolidColorDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
SkColor color = 0x49494949;
bool force_anti_aliasing_off = false;
CREATE_SHARED_STATE();
CREATE_QUAD_3_NEW(
SolidColorDrawQuad, visible_rect, color, force_anti_aliasing_off);
EXPECT_EQ(0, IterateAndCount(quad_new));
}
TEST_F(DrawQuadIteratorTest, StreamVideoDrawQuad) {
gfx::Rect opaque_rect(33, 47, 10, 12);
gfx::Rect visible_rect(40, 50, 30, 20);
ResourceProvider::ResourceId resource_id = 64;
gfx::Transform matrix = gfx::Transform(0.5, 0.25, 1, 0.75, 0, 1);
CREATE_SHARED_STATE();
CREATE_QUAD_4_NEW(
StreamVideoDrawQuad, opaque_rect, visible_rect, resource_id, matrix);
EXPECT_EQ(resource_id, quad_new->resource_id);
EXPECT_EQ(1, IterateAndCount(quad_new));
EXPECT_EQ(resource_id + 1, quad_new->resource_id);
}
TEST_F(DrawQuadIteratorTest, SurfaceDrawQuad) {
gfx::Rect visible_rect(40, 50, 30, 20);
SurfaceId surface_id(4321);
CREATE_SHARED_STATE();
CREATE_QUAD_2_NEW(SurfaceDrawQuad, visible_rect, surface_id);
EXPECT_EQ(0, IterateAndCount(quad_new));
}
TEST_F(DrawQuadIteratorTest, TextureDrawQuad) {
gfx::Rect opaque_rect(33, 47, 10, 12);
gfx::Rect visible_rect(40, 50, 30, 20);
unsigned resource_id = 82;
bool premultiplied_alpha = true;
gfx::PointF uv_top_left(0.5f, 224.f);
gfx::PointF uv_bottom_right(51.5f, 260.f);
const float vertex_opacity[] = { 1.0f, 1.0f, 1.0f, 1.0f };
bool y_flipped = true;
bool nearest_neighbor = true;
CREATE_SHARED_STATE();
CREATE_QUAD_10_NEW(TextureDrawQuad,
opaque_rect,
visible_rect,
resource_id,
premultiplied_alpha,
uv_top_left,
uv_bottom_right,
SK_ColorTRANSPARENT,
vertex_opacity,
y_flipped,
nearest_neighbor);
EXPECT_EQ(resource_id, quad_new->resource_id);
EXPECT_EQ(1, IterateAndCount(quad_new));
EXPECT_EQ(resource_id + 1, quad_new->resource_id);
}
TEST_F(DrawQuadIteratorTest, TileDrawQuad) {
gfx::Rect opaque_rect(33, 44, 22, 33);
gfx::Rect visible_rect(40, 50, 30, 20);
unsigned resource_id = 104;
gfx::RectF tex_coord_rect(31.f, 12.f, 54.f, 20.f);
gfx::Size texture_size(85, 32);
bool swizzle_contents = true;
bool nearest_neighbor = true;
CREATE_SHARED_STATE();
CREATE_QUAD_7_NEW(TileDrawQuad,
opaque_rect,
visible_rect,
resource_id,
tex_coord_rect,
texture_size,
swizzle_contents,
nearest_neighbor);
EXPECT_EQ(resource_id, quad_new->resource_id);
EXPECT_EQ(1, IterateAndCount(quad_new));
EXPECT_EQ(resource_id + 1, quad_new->resource_id);
}
TEST_F(DrawQuadIteratorTest, YUVVideoDrawQuad) {
gfx::Rect opaque_rect(33, 47, 10, 12);
gfx::Rect visible_rect(40, 50, 30, 20);
gfx::RectF ya_tex_coord_rect(0.0f, 0.0f, 0.75f, 0.5f);
gfx::RectF uv_tex_coord_rect(0.0f, 0.0f, 0.375f, 0.25f);
gfx::Size ya_tex_size(32, 68);
gfx::Size uv_tex_size(41, 51);
ResourceProvider::ResourceId y_plane_resource_id = 45;
ResourceProvider::ResourceId u_plane_resource_id = 532;
ResourceProvider::ResourceId v_plane_resource_id = 4;
ResourceProvider::ResourceId a_plane_resource_id = 63;
YUVVideoDrawQuad::ColorSpace color_space = YUVVideoDrawQuad::JPEG;
CREATE_SHARED_STATE();
CREATE_QUAD_11_NEW(YUVVideoDrawQuad, opaque_rect, visible_rect,
ya_tex_coord_rect, uv_tex_coord_rect, ya_tex_size,
uv_tex_size, y_plane_resource_id, u_plane_resource_id,
v_plane_resource_id, a_plane_resource_id, color_space);
EXPECT_EQ(DrawQuad::YUV_VIDEO_CONTENT, copy_quad->material);
EXPECT_EQ(y_plane_resource_id, quad_new->y_plane_resource_id);
EXPECT_EQ(u_plane_resource_id, quad_new->u_plane_resource_id);
EXPECT_EQ(v_plane_resource_id, quad_new->v_plane_resource_id);
EXPECT_EQ(a_plane_resource_id, quad_new->a_plane_resource_id);
EXPECT_EQ(color_space, quad_new->color_space);
EXPECT_EQ(4, IterateAndCount(quad_new));
EXPECT_EQ(y_plane_resource_id + 1, quad_new->y_plane_resource_id);
EXPECT_EQ(u_plane_resource_id + 1, quad_new->u_plane_resource_id);
EXPECT_EQ(v_plane_resource_id + 1, quad_new->v_plane_resource_id);
EXPECT_EQ(a_plane_resource_id + 1, quad_new->a_plane_resource_id);
}
// Disabled until picture draw quad is supported for ubercomp: crbug.com/231715
TEST_F(DrawQuadIteratorTest, DISABLED_PictureDrawQuad) {
gfx::Rect opaque_rect(33, 44, 22, 33);
gfx::Rect visible_rect(40, 50, 30, 20);
gfx::RectF tex_coord_rect(31.f, 12.f, 54.f, 20.f);
gfx::Size texture_size(85, 32);
bool nearest_neighbor = true;
ResourceFormat texture_format = RGBA_8888;
gfx::Rect content_rect(30, 40, 20, 30);
float contents_scale = 3.141592f;
scoped_refptr<RasterSource> raster_source =
FakePicturePileImpl::CreateEmptyPile(gfx::Size(100, 100),
gfx::Size(100, 100));
CREATE_SHARED_STATE();
CREATE_QUAD_9_NEW(PictureDrawQuad, opaque_rect, visible_rect, tex_coord_rect,
texture_size, nearest_neighbor, texture_format,
content_rect, contents_scale, raster_source);
EXPECT_EQ(0, IterateAndCount(quad_new));
}
TEST(DrawQuadTest, LargestQuadType) {
size_t largest = 0;
for (int i = 0; i <= DrawQuad::MATERIAL_LAST; ++i) {
switch (static_cast<DrawQuad::Material>(i)) {
case DrawQuad::CHECKERBOARD:
largest = std::max(largest, sizeof(CheckerboardDrawQuad));
break;
case DrawQuad::DEBUG_BORDER:
largest = std::max(largest, sizeof(DebugBorderDrawQuad));
break;
case DrawQuad::IO_SURFACE_CONTENT:
largest = std::max(largest, sizeof(IOSurfaceDrawQuad));
break;
case DrawQuad::PICTURE_CONTENT:
largest = std::max(largest, sizeof(PictureDrawQuad));
break;
case DrawQuad::TEXTURE_CONTENT:
largest = std::max(largest, sizeof(TextureDrawQuad));
break;
case DrawQuad::RENDER_PASS:
largest = std::max(largest, sizeof(RenderPassDrawQuad));
break;
case DrawQuad::SOLID_COLOR:
largest = std::max(largest, sizeof(SolidColorDrawQuad));
break;
case DrawQuad::SURFACE_CONTENT:
largest = std::max(largest, sizeof(SurfaceDrawQuad));
break;
case DrawQuad::TILED_CONTENT:
largest = std::max(largest, sizeof(TileDrawQuad));
break;
case DrawQuad::STREAM_VIDEO_CONTENT:
largest = std::max(largest, sizeof(StreamVideoDrawQuad));
break;
case DrawQuad::YUV_VIDEO_CONTENT:
largest = std::max(largest, sizeof(YUVVideoDrawQuad));
break;
case DrawQuad::INVALID:
break;
}
}
EXPECT_EQ(LargestDrawQuadSize(), largest);
if (!HasFailure())
return;
// On failure, output the size of all quads for debugging.
LOG(ERROR) << "largest " << largest;
LOG(ERROR) << "kLargestDrawQuad " << LargestDrawQuadSize();
for (int i = 0; i <= DrawQuad::MATERIAL_LAST; ++i) {
switch (static_cast<DrawQuad::Material>(i)) {
case DrawQuad::CHECKERBOARD:
LOG(ERROR) << "CheckerboardDrawQuad " << sizeof(CheckerboardDrawQuad);
break;
case DrawQuad::DEBUG_BORDER:
LOG(ERROR) << "DebugBorderDrawQuad " << sizeof(DebugBorderDrawQuad);
break;
case DrawQuad::IO_SURFACE_CONTENT:
LOG(ERROR) << "IOSurfaceDrawQuad " << sizeof(IOSurfaceDrawQuad);
break;
case DrawQuad::PICTURE_CONTENT:
LOG(ERROR) << "PictureDrawQuad " << sizeof(PictureDrawQuad);
break;
case DrawQuad::TEXTURE_CONTENT:
LOG(ERROR) << "TextureDrawQuad " << sizeof(TextureDrawQuad);
break;
case DrawQuad::RENDER_PASS:
LOG(ERROR) << "RenderPassDrawQuad " << sizeof(RenderPassDrawQuad);
break;
case DrawQuad::SOLID_COLOR:
LOG(ERROR) << "SolidColorDrawQuad " << sizeof(SolidColorDrawQuad);
break;
case DrawQuad::SURFACE_CONTENT:
LOG(ERROR) << "SurfaceDrawQuad " << sizeof(SurfaceDrawQuad);
break;
case DrawQuad::TILED_CONTENT:
LOG(ERROR) << "TileDrawQuad " << sizeof(TileDrawQuad);
break;
case DrawQuad::STREAM_VIDEO_CONTENT:
LOG(ERROR) << "StreamVideoDrawQuad " << sizeof(StreamVideoDrawQuad);
break;
case DrawQuad::YUV_VIDEO_CONTENT:
LOG(ERROR) << "YUVVideoDrawQuad " << sizeof(YUVVideoDrawQuad);
break;
case DrawQuad::INVALID:
break;
}
}
}
} // namespace
} // namespace cc
| bsd-3-clause |
jhotta/documentation | code_snippets/api-dashboard-get-all.py | 214 | from datadog import initialize, api
options = {
'api_key': '9775a026f1ca7d1c6c5af9d94d9595a4',
'app_key': '87ce4a24b5553d2e482ea8a8500e71b8ad4554ff'
}
initialize(**options)
print api.Timeboard.get_all()
| bsd-3-clause |
alexm92/sentry | src/sentry/api/serializers/models/activity.py | 1885 | from __future__ import absolute_import
import six
from sentry.api.serializers import Serializer, register, serialize
from sentry.models import Activity
@register(Activity)
class ActivitySerializer(Serializer):
def get_attrs(self, item_list, user):
# TODO(dcramer); assert on relations
users = {
d['id']: d
for d in serialize(set(i.user for i in item_list if i.user_id), user)
}
return {
item: {
'user': users[six.text_type(item.user_id)] if item.user_id else None,
} for item in item_list
}
def serialize(self, obj, attrs, user):
return {
'id': six.text_type(obj.id),
'user': attrs['user'],
'type': obj.get_type_display(),
'data': obj.data,
'dateCreated': obj.datetime,
}
class OrganizationActivitySerializer(ActivitySerializer):
def get_attrs(self, item_list, user):
# TODO(dcramer); assert on relations
attrs = super(OrganizationActivitySerializer, self).get_attrs(
item_list, user,
)
groups = {
d['id']: d
for d in serialize(set(i.group for i in item_list if i.group_id), user)
}
projects = {
d['id']: d
for d in serialize(set(i.project for i in item_list), user)
}
for item in item_list:
attrs[item]['issue'] = groups[six.text_type(item.group_id)] if item.group_id else None
attrs[item]['project'] = projects[six.text_type(item.project_id)]
return attrs
def serialize(self, obj, attrs, user):
context = super(OrganizationActivitySerializer, self).serialize(
obj, attrs, user,
)
context['issue'] = attrs['issue']
context['project'] = attrs['project']
return context
| bsd-3-clause |
rwatson/chromium-capsicum | chrome/browser/search_engines/keyword_editor_controller_unittest.cc | 4706 | // Copyright (c) 2009 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "app/table_model_observer.h"
#include "chrome/browser/profile.h"
#include "chrome/browser/search_engines/keyword_editor_controller.h"
#include "chrome/browser/search_engines/template_url.h"
#include "chrome/browser/search_engines/template_url_model.h"
#include "chrome/browser/search_engines/template_url_table_model.h"
#include "chrome/test/testing_profile.h"
#include "testing/gtest/include/gtest/gtest.h"
// Base class for keyword editor tests. Creates a profile containing an
// empty TemplateURLModel.
class KeywordEditorControllerTest : public testing::Test,
public TableModelObserver {
public:
virtual void SetUp() {
model_changed_count_ = items_changed_count_ = added_count_ =
removed_count_ = 0;
profile_.reset(new TestingProfile());
profile_->CreateTemplateURLModel();
model_ = profile_->GetTemplateURLModel();
controller_.reset(new KeywordEditorController(profile_.get()));
controller_->table_model()->SetObserver(this);
}
virtual void OnModelChanged() {
model_changed_count_++;
}
virtual void OnItemsChanged(int start, int length) {
items_changed_count_++;
}
virtual void OnItemsAdded(int start, int length) {
added_count_++;
}
virtual void OnItemsRemoved(int start, int length) {
removed_count_++;
}
void VerifyChangeCount(int model_changed_count, int item_changed_count,
int added_count, int removed_count) {
ASSERT_EQ(model_changed_count, model_changed_count_);
ASSERT_EQ(item_changed_count, items_changed_count_);
ASSERT_EQ(added_count, added_count_);
ASSERT_EQ(removed_count, removed_count_);
ClearChangeCount();
}
void ClearChangeCount() {
model_changed_count_ = items_changed_count_ = added_count_ =
removed_count_ = 0;
}
TemplateURLTableModel* table_model() const {
return controller_->table_model();
}
protected:
MessageLoopForUI message_loop_;
scoped_ptr<TestingProfile> profile_;
scoped_ptr<KeywordEditorController> controller_;
TemplateURLModel* model_;
int model_changed_count_;
int items_changed_count_;
int added_count_;
int removed_count_;
};
// Tests adding a TemplateURL.
TEST_F(KeywordEditorControllerTest, Add) {
controller_->AddTemplateURL(L"a", L"b", L"http://c");
// Verify the observer was notified.
VerifyChangeCount(0, 0, 1, 0);
if (HasFatalFailure())
return;
// Verify the TableModel has the new data.
ASSERT_EQ(1, table_model()->RowCount());
// Verify the TemplateURLModel has the new entry.
ASSERT_EQ(1U, model_->GetTemplateURLs().size());
// Verify the entry is what we added.
const TemplateURL* turl = model_->GetTemplateURLs()[0];
EXPECT_EQ(L"a", turl->short_name());
EXPECT_EQ(L"b", turl->keyword());
EXPECT_TRUE(turl->url() != NULL);
EXPECT_TRUE(turl->url()->url() == L"http://c");
}
// Tests modifying a TemplateURL.
TEST_F(KeywordEditorControllerTest, Modify) {
controller_->AddTemplateURL(L"a", L"b", L"http://c");
ClearChangeCount();
// Modify the entry.
const TemplateURL* turl = model_->GetTemplateURLs()[0];
controller_->ModifyTemplateURL(turl, L"a1", L"b1", L"http://c1");
// Make sure it was updated appropriately.
VerifyChangeCount(0, 1, 0, 0);
EXPECT_EQ(L"a1", turl->short_name());
EXPECT_EQ(L"b1", turl->keyword());
EXPECT_TRUE(turl->url() != NULL);
EXPECT_TRUE(turl->url()->url() == L"http://c1");
}
// Tests making a TemplateURL the default search provider.
TEST_F(KeywordEditorControllerTest, MakeDefault) {
controller_->AddTemplateURL(L"a", L"b", L"http://c{searchTerms}");
ClearChangeCount();
const TemplateURL* turl = model_->GetTemplateURLs()[0];
controller_->MakeDefaultTemplateURL(0);
// Making an item the default sends a handful of changes. Which are sent isn't
// important, what is important is 'something' is sent.
ASSERT_TRUE(items_changed_count_ > 0 || added_count_ > 0 ||
removed_count_ > 0);
ASSERT_TRUE(model_->GetDefaultSearchProvider() == turl);
}
// Mutates the TemplateURLModel and make sure table model is updating
// appropriately.
TEST_F(KeywordEditorControllerTest, MutateTemplateURLModel) {
TemplateURL* turl = new TemplateURL();
turl->set_keyword(L"a");
turl->set_short_name(L"b");
model_->Add(turl);
// Table model should have updated.
VerifyChangeCount(1, 0, 0, 0);
// And should contain the newly added TemplateURL.
ASSERT_EQ(1, table_model()->RowCount());
ASSERT_EQ(0, table_model()->IndexOfTemplateURL(turl));
}
| bsd-3-clause |
ric2b/Vivaldi-browser | chromium/ui/display/util/display_util_unittest.cc | 16089 | // Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ui/display/util/display_util.h"
#include "base/test/metrics/histogram_tester.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "ui/display/util/edid_parser.h"
namespace display {
namespace {
// HP z32x monitor.
const unsigned char kHPz32x[] =
"\x00\xFF\xFF\xFF\xFF\xFF\xFF\x00\x22\xF0\x75\x32\x01\x01\x01\x01"
"\x1B\x1B\x01\x04\xB5\x46\x27\x78\x3A\x8D\x15\xAC\x51\x32\xB8\x26"
"\x0B\x50\x54\x21\x08\x00\xD1\xC0\xA9\xC0\x81\xC0\xD1\x00\xB3\x00"
"\x95\x00\xA9\x40\x81\x80\x4D\xD0\x00\xA0\xF0\x70\x3E\x80\x30\x20"
"\x35\x00\xB9\x88\x21\x00\x00\x1A\x00\x00\x00\xFD\x00\x18\x3C\x1E"
"\x87\x3C\x00\x0A\x20\x20\x20\x20\x20\x20\x00\x00\x00\xFC\x00\x48"
"\x50\x20\x5A\x33\x32\x78\x0A\x20\x20\x20\x20\x20\x00\x00\x00\xFF"
"\x00\x43\x4E\x43\x37\x32\x37\x30\x4D\x57\x30\x0A\x20\x20\x01\x46"
"\x02\x03\x18\xF1\x4B\x10\x1F\x04\x13\x03\x12\x02\x11\x01\x05\x14"
"\x23\x09\x07\x07\x83\x01\x00\x00\xA3\x66\x00\xA0\xF0\x70\x1F\x80"
"\x30\x20\x35\x00\xB9\x88\x21\x00\x00\x1A\x56\x5E\x00\xA0\xA0\xA0"
"\x29\x50\x30\x20\x35\x00\xB9\x88\x21\x00\x00\x1A\xEF\x51\x00\xA0"
"\xF0\x70\x19\x80\x30\x20\x35\x00\xB9\x88\x21\x00\x00\x1A\xE2\x68"
"\x00\xA0\xA0\x40\x2E\x60\x20\x30\x63\x00\xB9\x88\x21\x00\x00\x1C"
"\x28\x3C\x80\xA0\x70\xB0\x23\x40\x30\x20\x36\x00\xB9\x88\x21\x00"
"\x00\x1A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x3E";
// Chromebook Samus internal display.
const unsigned char kSamus[] =
"\x00\xff\xff\xff\xff\xff\xff\x00\x30\xe4\x2e\x04\x00\x00\x00\x00"
"\x00\x18\x01\x04\xa5\x1b\x12\x96\x02\x4f\xd5\xa2\x59\x52\x93\x26"
"\x17\x50\x54\x00\x00\x00\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01"
"\x01\x01\x01\x01\x01\x01\x6d\x6f\x00\x9e\xa0\xa4\x31\x60\x30\x20"
"\x3a\x00\x10\xb5\x10\x00\x00\x19\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\x00\x4c"
"\x47\x20\x44\x69\x73\x70\x6c\x61\x79\x0a\x20\x20\x00\x00\x00\xfe"
"\x00\x4c\x50\x31\x32\x39\x51\x45\x32\x2d\x53\x50\x41\x31\x00\x6c";
// Chromebook Eve internal display.
const unsigned char kEve[] =
"\x00\xff\xff\xff\xff\xff\xff\x00\x4d\x10\x8a\x14\x00\x00\x00\x00"
"\x16\x1b\x01\x04\xa5\x1a\x11\x78\x06\xde\x50\xa3\x54\x4c\x99\x26"
"\x0f\x50\x54\x00\x00\x00\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01"
"\x01\x01\x01\x01\x01\x01\xbb\x62\x60\xa0\x90\x40\x2e\x60\x30\x20"
"\x3a\x00\x03\xad\x10\x00\x00\x18\x00\x00\x00\x10\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc"
"\x00\x4c\x51\x31\x32\x33\x50\x31\x4a\x58\x33\x32\x0a\x20\x00\xb6";
// Invalid EDID: too short to contain chromaticity nor gamma information.
const unsigned char kInvalidEdid[] =
"\x00\xFF\xFF\xFF\xFF\xFF\xFF\x00\x22\xF0\x76\x26\x01\x01\x01\x01"
"\x02\x12\x01\x03\x80\x34\x21";
// Partially valid EDID: gamma information is marked as non existent.
const unsigned char kEdidWithNoGamma[] =
"\x00\xFF\xFF\xFF\xFF\xFF\xFF\x00\x22\xF0\x76\x26\x01\x01\x01\x01"
"\x02\x12\x01\x03\x80\x34\x21\xFF\xEE\xEF\x95\xA3\x54\x4C\x9B\x26"
"\x0F\x50\x54\xA5\x6B\x80\x81\x40\x81\x80\x81\x99\x71\x00\xA9\x00";
// Chromebook Samsung Galaxy (kohaku) that supports HDR metadata.
constexpr unsigned char kHDR[] =
"\x00\xff\xff\xff\xff\xff\xff\x00\x4c\x83\x42\x41\x00\x00\x00\x00"
"\x13\x1d\x01\x04\xb5\x1d\x11\x78\x02\x38\xd1\xae\x51\x3b\xb8\x23"
"\x0b\x50\x54\x00\x00\x00\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01"
"\x01\x01\x01\x01\x01\x01\xb9\xd5\x00\x40\xf1\x70\x20\x80\x30\x20"
"\x88\x00\x26\xa5\x10\x00\x00\x1b\xb9\xd5\x00\x40\xf1\x70\x20\x80"
"\x30\x20\x88\x00\x26\xa5\x10\x00\x00\x1b\x00\x00\x00\x0f\x00\xff"
"\x09\x3c\xff\x09\x3c\x2c\x80\x00\x00\x00\x00\x00\x00\x00\x00\xfe"
"\x00\x41\x54\x4e\x41\x33\x33\x54\x50\x30\x34\x2d\x30\x20\x01\xba"
"\x02\x03\x0f\x00\xe3\x05\x80\x00\xe6\x06\x05\x01\x73\x6d\x07\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"
"\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xab";
// EDID collected in the wild: valid but with primaries in the wrong order.
const unsigned char kSST210[] =
"\xff\x00\xff\xff\xff\xff\x00\xff\x2d\x4c\x42\x52\x32\x31\x50\x43"
"\x0c\x2b\x03\x01\x33\x80\xa2\x20\x56\x2a\x9c\x4e\x50\x5b\x26\x95"
"\x50\x23\xbf\x59\x80\xef\x80\x81\x59\x61\x59\x45\x59\x31\x40\x31"
"\x01\x01\x01\x01\x01\x01\x32\x8c\xa0\x40\xb0\x60\x40\x19\x32\x1e"
"\x00\x13\x44\x06\x00\x21\x1e\x00\x00\x00\xfd\x00\x38\x00\x1e\x55"
"\x0f\x51\x0a\x00\x20\x20\x20\x20\x20\x20\x00\x00\xfc\x00\x32\x00"
"\x30\x31\x20\x54\x69\x44\x69\x67\x61\x74\x0a\x6c\x00\x00\xff\x00"
"\x48\x00\x4b\x34\x41\x54\x30\x30\x32\x38\x0a\x38\x20\x20\xf8\x00";
// EDID of |kSST210| with the order of the primaries corrected. Still invalid
// because the triangle of primaries is too small.
const unsigned char kSST210Corrected[] =
"\xff\x00\xff\xff\xff\xff\x00\xff\x2d\x4c\x42\x52\x32\x31\x50\x43"
"\x0c\x2b\x03\x01\x33\x80\xa2\x20\x56\x2a\x9c\x95\x50\x4e\x50\x5b"
"\x26\x23\xbf\x59\x80\xef\x80\x81\x59\x61\x59\x45\x59\x31\x40\x31"
"\x01\x01\x01\x01\x01\x01\x32\x8c\xa0\x40\xb0\x60\x40\x19\x32\x1e"
"\x00\x13\x44\x06\x00\x21\x1e\x00\x00\x00\xfd\x00\x38\x00\x1e\x55"
"\x0f\x51\x0a\x00\x20\x20\x20\x20\x20\x20\x00\x00\xfc\x00\x32\x00"
"\x30\x31\x20\x54\x69\x44\x69\x67\x61\x74\x0a\x6c\x00\x00\xff\x00"
"\x48\x00\x4b\x34\x41\x54\x30\x30\x32\x38\x0a\x38\x20\x20\xf8\x00";
// This EDID produces blue primary coordinates too far off the expected point,
// which would paint blue colors as purple. See https://crbug.com/809909.
const unsigned char kBrokenBluePrimaries[] =
"\x00\xff\xff\xff\xff\xff\xff\x00\x4c\x83\x4d\x83\x00\x00\x00\x00"
"\x00\x19\x01\x04\x95\x1d\x10\x78\x0a\xee\x25\xa3\x54\x4c\x99\x29"
"\x26\x50\x54\x00\x00\x00\x01\x01\x01\x01\x01\x01\x01\x01\x01\x01"
"\x01\x01\x01\x01\x01\x01\xd2\x37\x80\xa2\x70\x38\x40\x40\x30\x20"
"\x25\x00\x25\xa5\x10\x00\x00\x1a\xa6\x2c\x80\xa2\x70\x38\x40\x40"
"\x30\x20\x25\x00\x25\xa5\x10\x00\x00\x1a\x00\x00\x00\xfe\x00\x56"
"\x59\x54\x39\x36\x80\x31\x33\x33\x48\x4c\x0a\x20\x00\x00\x00\x00";
} // namespace
TEST(DisplayUtilTest, TestValidDisplaySize) {
EXPECT_FALSE(IsDisplaySizeValid(gfx::Size(10, 10)));
EXPECT_FALSE(IsDisplaySizeValid(gfx::Size(40, 30)));
EXPECT_FALSE(IsDisplaySizeValid(gfx::Size(50, 40)));
EXPECT_FALSE(IsDisplaySizeValid(gfx::Size(160, 90)));
EXPECT_FALSE(IsDisplaySizeValid(gfx::Size(160, 100)));
EXPECT_TRUE(IsDisplaySizeValid(gfx::Size(50, 60)));
EXPECT_TRUE(IsDisplaySizeValid(gfx::Size(100, 70)));
EXPECT_TRUE(IsDisplaySizeValid(gfx::Size(272, 181)));
}
TEST(DisplayUtilTest, GetColorSpaceFromEdid) {
base::HistogramTester histogram_tester;
// Test with HP z32x monitor.
constexpr SkColorSpacePrimaries expected_hpz32x_primaries = {
.fRX = 0.673828f,
.fRY = 0.316406f,
.fGX = 0.198242f,
.fGY = 0.719727f,
.fBX = 0.148438f,
.fBY = 0.043945f,
.fWX = 0.313477f,
.fWY = 0.329102f};
skcms_Matrix3x3 expected_hpz32x_toXYZ50_matrix;
expected_hpz32x_primaries.toXYZD50(&expected_hpz32x_toXYZ50_matrix);
const std::vector<uint8_t> hpz32x_edid(kHPz32x,
kHPz32x + base::size(kHPz32x) - 1);
const gfx::ColorSpace expected_hpz32x_color_space =
gfx::ColorSpace::CreateCustom(
expected_hpz32x_toXYZ50_matrix,
skcms_TransferFunction({2.2, 1, 0, 0, 0, 0, 0}));
EXPECT_EQ(expected_hpz32x_color_space.ToString(),
GetColorSpaceFromEdid(display::EdidParser(hpz32x_edid)).ToString());
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kSuccess),
1);
// Test with Chromebook Samus internal display.
constexpr SkColorSpacePrimaries expected_samus_primaries = {.fRX = 0.633789f,
.fRY = 0.347656f,
.fGX = 0.323242f,
.fGY = 0.577148f,
.fBX = 0.151367f,
.fBY = 0.090820f,
.fWX = 0.313477f,
.fWY = 0.329102f};
skcms_Matrix3x3 expected_samus_toXYZ50_matrix;
expected_samus_primaries.toXYZD50(&expected_samus_toXYZ50_matrix);
const std::vector<uint8_t> samus_edid(kSamus,
kSamus + base::size(kSamus) - 1);
const gfx::ColorSpace expected_samus_color_space =
gfx::ColorSpace::CreateCustom(
expected_samus_toXYZ50_matrix,
skcms_TransferFunction({2.5, 1, 0, 0, 0, 0, 0}));
EXPECT_EQ(expected_samus_color_space.ToString(),
GetColorSpaceFromEdid(display::EdidParser(samus_edid)).ToString());
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kSuccess),
2);
// Test with Chromebook Eve internal display. The SkColorSpacePrimaries:
// SkColorSpacePrimaries expected_eve_primaries = {.fRX = 0.639648f,
// .fRY = 0.329102f,
// .fGX = 0.299805f,
// .fGY = 0.599609f,
// .fBX = 0.149414f,
// .fBY = 0.059570f,
// .fWX = 0.312500f,
// .fWY = 0.328125f};
// are very close to the BT.709/sRGB ones, so they'll be rounded to those.
const skcms_TransferFunction eve_transfer({2.2, 1, 0, 0, 0, 0, 0});
const gfx::ColorSpace expected_eve_color_space(
gfx::ColorSpace::PrimaryID::BT709, gfx::ColorSpace::TransferID::CUSTOM,
gfx::ColorSpace::MatrixID::RGB, gfx::ColorSpace::RangeID::FULL,
/*custom_primary_matrix=*/nullptr, &eve_transfer);
const std::vector<uint8_t> eve_edid(kEve, kEve + base::size(kEve) - 1);
EXPECT_EQ(expected_eve_color_space.ToString(),
GetColorSpaceFromEdid(display::EdidParser(eve_edid)).ToString());
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kSuccess),
3);
// Test with a display that supports HDR: Chromebook Samsung Galaxy (kohaku).
constexpr SkColorSpacePrimaries expected_hdr_primaries = {.fRX = 0.67960f,
.fRY = 0.31930f,
.fGX = 0.23240f,
.fGY = 0.71870f,
.fBX = 0.13965f,
.fBY = 0.04400f,
.fWX = 0.31250f,
.fWY = 0.32910f};
skcms_Matrix3x3 expected_hdr_toXYZ50_matrix;
expected_hdr_primaries.toXYZD50(&expected_hdr_toXYZ50_matrix);
const std::vector<uint8_t> hdr_edid(kHDR, kHDR + base::size(kHDR) - 1);
const gfx::ColorSpace expected_hdr_color_space =
gfx::ColorSpace::CreateCustom(expected_hdr_toXYZ50_matrix,
gfx::ColorSpace::TransferID::SMPTEST2084);
EXPECT_TRUE(expected_hdr_color_space.IsHDR());
EXPECT_EQ(expected_hdr_color_space.ToString(),
GetColorSpaceFromEdid(display::EdidParser(hdr_edid)).ToString());
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kSuccess),
4);
// Test with gamma marked as non-existent.
const std::vector<uint8_t> no_gamma_edid(
kEdidWithNoGamma, kEdidWithNoGamma + base::size(kEdidWithNoGamma) - 1);
const gfx::ColorSpace no_gamma_color_space =
GetColorSpaceFromEdid(display::EdidParser(no_gamma_edid));
EXPECT_FALSE(no_gamma_color_space.IsValid());
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kErrorBadGamma),
1);
histogram_tester.ExpectTotalCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome", 5);
}
TEST(DisplayUtilTest, GetInvalidColorSpaceFromEdid) {
base::HistogramTester histogram_tester;
const std::vector<uint8_t> empty_edid;
EXPECT_EQ(gfx::ColorSpace(),
GetColorSpaceFromEdid(display::EdidParser(empty_edid)));
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kErrorPrimariesAreaTooSmall),
1);
const std::vector<uint8_t> invalid_edid(
kInvalidEdid, kInvalidEdid + base::size(kInvalidEdid) - 1);
const gfx::ColorSpace invalid_color_space =
GetColorSpaceFromEdid(display::EdidParser(invalid_edid));
EXPECT_FALSE(invalid_color_space.IsValid());
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kErrorPrimariesAreaTooSmall),
2);
const std::vector<uint8_t> sst210_edid(kSST210,
kSST210 + base::size(kSST210) - 1);
const gfx::ColorSpace sst210_color_space =
GetColorSpaceFromEdid(display::EdidParser(sst210_edid));
EXPECT_FALSE(sst210_color_space.IsValid()) << sst210_color_space.ToString();
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kErrorBadCoordinates),
1);
const std::vector<uint8_t> sst210_edid_2(
kSST210Corrected, kSST210Corrected + base::size(kSST210Corrected) - 1);
const gfx::ColorSpace sst210_color_space_2 =
GetColorSpaceFromEdid(display::EdidParser(sst210_edid_2));
EXPECT_FALSE(sst210_color_space_2.IsValid())
<< sst210_color_space_2.ToString();
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kErrorPrimariesAreaTooSmall),
3);
const std::vector<uint8_t> broken_blue_edid(
kBrokenBluePrimaries,
kBrokenBluePrimaries + base::size(kBrokenBluePrimaries) - 1);
const gfx::ColorSpace broken_blue_color_space =
GetColorSpaceFromEdid(display::EdidParser(broken_blue_edid));
EXPECT_FALSE(broken_blue_color_space.IsValid())
<< broken_blue_color_space.ToString();
histogram_tester.ExpectBucketCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome",
static_cast<base::HistogramBase::Sample>(
EdidColorSpaceChecksOutcome::kErrorBluePrimaryIsBroken),
1);
histogram_tester.ExpectTotalCount(
"DrmUtil.GetColorSpaceFromEdid.ChecksOutcome", 5);
}
} // namespace display
| bsd-3-clause |