repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
mehdi149/OF_COMPILER_0.1 | gen-src/main/java/org/projectfloodlight/openflow/protocol/ver14/OFOxmTunnelIdMaskedVer14.java | 10644 | // Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFOxmTunnelIdMaskedVer14 implements OFOxmTunnelIdMasked {
private static final Logger logger = LoggerFactory.getLogger(OFOxmTunnelIdMaskedVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 20;
private final static U64 DEFAULT_VALUE = U64.ZERO;
private final static U64 DEFAULT_VALUE_MASK = U64.ZERO;
// OF message fields
private final U64 value;
private final U64 mask;
//
// Immutable default instance
final static OFOxmTunnelIdMaskedVer14 DEFAULT = new OFOxmTunnelIdMaskedVer14(
DEFAULT_VALUE, DEFAULT_VALUE_MASK
);
// package private constructor - used by readers, builders, and factory
OFOxmTunnelIdMaskedVer14(U64 value, U64 mask) {
if(value == null) {
throw new NullPointerException("OFOxmTunnelIdMaskedVer14: property value cannot be null");
}
if(mask == null) {
throw new NullPointerException("OFOxmTunnelIdMaskedVer14: property mask cannot be null");
}
this.value = value;
this.mask = mask;
}
// Accessors for OF message fields
@Override
public long getTypeLen() {
return 0x80004d10L;
}
@Override
public U64 getValue() {
return value;
}
@Override
public U64 getMask() {
return mask;
}
@Override
public MatchField<U64> getMatchField() {
return MatchField.TUNNEL_ID;
}
@Override
public boolean isMasked() {
return true;
}
public OFOxm<U64> getCanonical() {
if (U64.NO_MASK.equals(mask)) {
return new OFOxmTunnelIdVer14(value);
} else if(U64.FULL_MASK.equals(mask)) {
return null;
} else {
return this;
}
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFOxmTunnelIdMasked.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFOxmTunnelIdMasked.Builder {
final OFOxmTunnelIdMaskedVer14 parentMessage;
// OF message fields
private boolean valueSet;
private U64 value;
private boolean maskSet;
private U64 mask;
BuilderWithParent(OFOxmTunnelIdMaskedVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getTypeLen() {
return 0x80004d10L;
}
@Override
public U64 getValue() {
return value;
}
@Override
public OFOxmTunnelIdMasked.Builder setValue(U64 value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public U64 getMask() {
return mask;
}
@Override
public OFOxmTunnelIdMasked.Builder setMask(U64 mask) {
this.mask = mask;
this.maskSet = true;
return this;
}
@Override
public MatchField<U64> getMatchField() {
return MatchField.TUNNEL_ID;
}
@Override
public boolean isMasked() {
return true;
}
@Override
public OFOxm<U64> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFOxmTunnelIdMasked build() {
U64 value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
U64 mask = this.maskSet ? this.mask : parentMessage.mask;
if(mask == null)
throw new NullPointerException("Property mask must not be null");
//
return new OFOxmTunnelIdMaskedVer14(
value,
mask
);
}
}
static class Builder implements OFOxmTunnelIdMasked.Builder {
// OF message fields
private boolean valueSet;
private U64 value;
private boolean maskSet;
private U64 mask;
@Override
public long getTypeLen() {
return 0x80004d10L;
}
@Override
public U64 getValue() {
return value;
}
@Override
public OFOxmTunnelIdMasked.Builder setValue(U64 value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public U64 getMask() {
return mask;
}
@Override
public OFOxmTunnelIdMasked.Builder setMask(U64 mask) {
this.mask = mask;
this.maskSet = true;
return this;
}
@Override
public MatchField<U64> getMatchField() {
return MatchField.TUNNEL_ID;
}
@Override
public boolean isMasked() {
return true;
}
@Override
public OFOxm<U64> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFOxmTunnelIdMasked build() {
U64 value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
U64 mask = this.maskSet ? this.mask : DEFAULT_VALUE_MASK;
if(mask == null)
throw new NullPointerException("Property mask must not be null");
return new OFOxmTunnelIdMaskedVer14(
value,
mask
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFOxmTunnelIdMasked> {
@Override
public OFOxmTunnelIdMasked readFrom(ByteBuf bb) throws OFParseError {
// fixed value property typeLen == 0x80004d10L
int typeLen = bb.readInt();
if(typeLen != (int) 0x80004d10)
throw new OFParseError("Wrong typeLen: Expected=0x80004d10L(0x80004d10L), got="+typeLen);
U64 value = U64.ofRaw(bb.readLong());
U64 mask = U64.ofRaw(bb.readLong());
OFOxmTunnelIdMaskedVer14 oxmTunnelIdMaskedVer14 = new OFOxmTunnelIdMaskedVer14(
value,
mask
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", oxmTunnelIdMaskedVer14);
return oxmTunnelIdMaskedVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFOxmTunnelIdMaskedVer14Funnel FUNNEL = new OFOxmTunnelIdMaskedVer14Funnel();
static class OFOxmTunnelIdMaskedVer14Funnel implements Funnel<OFOxmTunnelIdMaskedVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFOxmTunnelIdMaskedVer14 message, PrimitiveSink sink) {
// fixed value property typeLen = 0x80004d10L
sink.putInt((int) 0x80004d10);
message.value.putTo(sink);
message.mask.putTo(sink);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFOxmTunnelIdMaskedVer14> {
@Override
public void write(ByteBuf bb, OFOxmTunnelIdMaskedVer14 message) {
// fixed value property typeLen = 0x80004d10L
bb.writeInt((int) 0x80004d10);
bb.writeLong(message.value.getValue());
bb.writeLong(message.mask.getValue());
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFOxmTunnelIdMaskedVer14(");
b.append("value=").append(value);
b.append(", ");
b.append("mask=").append(mask);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFOxmTunnelIdMaskedVer14 other = (OFOxmTunnelIdMaskedVer14) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
if (mask == null) {
if (other.mask != null)
return false;
} else if (!mask.equals(other.mask))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
result = prime * result + ((mask == null) ? 0 : mask.hashCode());
return result;
}
}
| apache-2.0 |
zhichao-li/BigDL | dl/src/test/scala/com/intel/analytics/bigdl/optim/RefLocalOptimizer.scala | 2358 | /*
* Licensed to Intel Corporation under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* Intel Corporation licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.optim
import com.intel.analytics.bigdl.DataSet
import com.intel.analytics.bigdl.dataset.MiniBatch
import com.intel.analytics.bigdl._
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import scala.reflect.ClassTag
/**
* The class is used as a reference optimizer in local optimizer unit test
*/
class RefLocalOptimizer[T: ClassTag](
model: Module[T],
dataset: DataSet[MiniBatch[T]],
criterion: Criterion[T]
)(implicit ev: TensorNumeric[T]) extends Optimizer[T, MiniBatch[T]](model, dataset, criterion) {
val (w, g) = model.getParameters()
override def optimize(): Module[T] = {
this.assertEngineInited()
val data = dataset.toLocal().data(train = true)
var count = 0
state("epoch") = state.get[Int]("epoch").getOrElse(1)
state("neval") = state.get[Int]("neval").getOrElse(1)
while (!endWhen(state)) {
val batch = data.next()
val input = batch.data
val target = batch.labels
model.training()
model.zeroGradParameters()
val output = model.forward(input).asInstanceOf[Tensor[T]]
val loss = criterion.forward(output, target)
model.backward(input, criterion.backward(output, target))
optimMethod.optimize(_ => (loss, g), w, state)
count += input.size(1)
state("neval") = state[Int]("neval") + 1
println(s"loss is $loss")
if (count >= dataset.size()) {
state("epoch") = state[Int]("epoch") + 1
count = 0
}
}
model
}
}
| apache-2.0 |
nkolosnjaji/generator-jhipster | generators/server/templates/src/main/java/package/repository/search/_package-info.java | 815 | <%#
Copyright 2013-2018 the original author or authors from the JHipster project.
This file is part of the JHipster project, see http://www.jhipster.tech/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
/**
* Spring Data Elasticsearch repositories.
*/
package <%=packageName%>.repository.search;
| apache-2.0 |
iperdomo/keycloak | testsuite/integration-arquillian/tests/base/src/test/java/org/keycloak/testsuite/migration/MigrationTest.java | 8668 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.testsuite.migration;
import java.util.HashSet;
import org.junit.Test;
import org.keycloak.admin.client.resource.RealmResource;
import org.keycloak.representations.idm.RealmRepresentation;
import org.keycloak.testsuite.AbstractKeycloakTest;
import org.keycloak.testsuite.arquillian.migration.Migration;
import java.util.List;
import java.util.Set;
import org.junit.Before;
import org.keycloak.admin.client.resource.ClientResource;
import org.keycloak.admin.client.resource.RoleResource;
import org.keycloak.models.AdminRoles;
import org.keycloak.models.AuthenticationExecutionModel;
import org.keycloak.models.Constants;
import org.keycloak.models.UserModel;
import org.keycloak.models.utils.DefaultAuthenticationFlows;
import org.keycloak.representations.idm.AuthenticationExecutionExportRepresentation;
import org.keycloak.representations.idm.AuthenticationFlowRepresentation;
import org.keycloak.representations.idm.ClientRepresentation;
import org.keycloak.representations.idm.RequiredActionProviderRepresentation;
import org.keycloak.representations.idm.RoleRepresentation;
import static org.keycloak.testsuite.Assert.*;
import static org.keycloak.testsuite.auth.page.AuthRealm.MASTER;
/**
* @author <a href="mailto:vramik@redhat.com">Vlastislav Ramik</a>
*/
public class MigrationTest extends AbstractKeycloakTest {
private final String MIGRATION = "Migration";
private RealmResource migrationRealm;
private RealmResource masterRealm;
@Override
public void addTestRealms(List<RealmRepresentation> testRealms) {
log.info("Adding no test realms for migration test. Test realm should be migrated from previous vesrion.");
}
@Before
public void beforeMigrationTest() {
migrationRealm = adminClient.realms().realm(MIGRATION);
masterRealm = adminClient.realms().realm(MASTER);
//add migration realm to testRealmReps to make the migration removed after test
testRealmReps.add(adminClient.realms().realm(MIGRATION).toRepresentation());
}
@Test
@Migration(versionFrom = "1.9.8.Final")
public void migration1_9_8Test() {
testMigratedData();
testMigrationTo2_0_0();
testMigrationTo2_1_0();
testMigrationTo2_2_0();
}
@Test
@Migration(versionFrom = "2.2.1.Final")
public void migration2_2_1Test() {
testMigratedData();
}
private void testMigratedData() {
//master realm
assertNames(masterRealm.roles().list(), "offline_access", "uma_authorization", "create-realm", "master-test-realm-role", "admin");
assertNames(masterRealm.clients().findAll(), "admin-cli", "security-admin-console", "broker", "account",
"master-realm", "master-test-client", "Migration-realm");
String id = masterRealm.clients().findByClientId("master-test-client").get(0).getId();
assertNames(masterRealm.clients().get(id).roles().list(), "master-test-client-role");
assertNames(masterRealm.users().search("", 0, 5), "admin", "master-test-user");
assertNames(masterRealm.groups().groups(), "master-test-group");
//migrationRealm
assertNames(migrationRealm.roles().list(), "offline_access", "uma_authorization", "migration-test-realm-role");
assertNames(migrationRealm.clients().findAll(), "account", "admin-cli", "broker", "migration-test-client", "realm-management", "security-admin-console");
String id2 = migrationRealm.clients().findByClientId("migration-test-client").get(0).getId();
assertNames(migrationRealm.clients().get(id2).roles().list(), "migration-test-client-role");
assertNames(migrationRealm.users().search("", 0, 5), "migration-test-user");
assertNames(migrationRealm.groups().groups(), "migration-test-group");
}
/**
* @see org.keycloak.migration.migrators.MigrateTo2_0_0
*/
private void testMigrationTo2_0_0() {
testAuthorizationServices(masterRealm, migrationRealm);
}
/**
* @see org.keycloak.migration.migrators.MigrateTo2_1_0
*/
private void testMigrationTo2_1_0() {
testNameOfOTPRequiredAction(masterRealm, migrationRealm);
}
/**
* @see org.keycloak.migration.migrators.MigrateTo2_2_0
*/
private void testMigrationTo2_2_0() {
testIdentityProviderAuthenticator(masterRealm, migrationRealm);
//MigrateTo2_2_0#migrateRolePolicies is not relevant any more
}
private void testAuthorizationServices(RealmResource... realms) {
for (RealmResource realm : realms) {
//test setup of authorization services
for (String roleName : Constants.AUTHZ_DEFAULT_AUTHORIZATION_ROLES) {
RoleResource role = realm.roles().get(roleName); //throws javax.ws.rs.NotFoundException if not found
assertFalse("Role's scopeParamRequired should be false.", role.toRepresentation().isScopeParamRequired());
assertFalse("Role shouldn't be composite should be false.", role.toRepresentation().isComposite());
assertTrue("role should be added to default roles for new users", realm.toRepresentation().getDefaultRoles().contains(roleName));
}
//test admin roles - master admin client
List<ClientRepresentation> clients = realm.clients().findByClientId(realm.toRepresentation().getRealm() + "-realm");
if (!clients.isEmpty()) {
ClientResource masterAdminClient = realm.clients().get(clients.get(0).getId());
masterAdminClient.roles().get(AdminRoles.VIEW_AUTHORIZATION).toRepresentation();
masterAdminClient.roles().get(AdminRoles.MANAGE_AUTHORIZATION).toRepresentation();
//test admin roles - admin role composite
Set<String> roleNames = new HashSet<>();
for (RoleRepresentation role : realm.roles().get(AdminRoles.ADMIN).getRoleComposites()) {
roleNames.add(role.getName());
}
assertTrue(AdminRoles.VIEW_AUTHORIZATION + " should be composite role of " + AdminRoles.ADMIN, roleNames.contains(AdminRoles.VIEW_AUTHORIZATION));
assertTrue(AdminRoles.MANAGE_AUTHORIZATION + " should be composite role of " + AdminRoles.ADMIN, roleNames.contains(AdminRoles.MANAGE_AUTHORIZATION));
}
}
}
private void testNameOfOTPRequiredAction(RealmResource... realms) {
for (RealmResource realm : realms) {
RequiredActionProviderRepresentation otpAction = realm.flows().getRequiredAction(UserModel.RequiredAction.CONFIGURE_TOTP.name());
assertEquals("The name of CONFIGURE_TOTP required action should be 'Configure OTP'.", "Configure OTP", otpAction.getName());
}
}
private void testIdentityProviderAuthenticator(RealmResource... realms) {
for (RealmResource realm : realms) {
boolean success = false;
for (AuthenticationFlowRepresentation flow : realm.flows().getFlows()) {
if (flow.getAlias().equals(DefaultAuthenticationFlows.BROWSER_FLOW)) {
for (AuthenticationExecutionExportRepresentation execution : flow.getAuthenticationExecutions()) {
if ("identity-provider-redirector".equals(execution.getAuthenticator())) {
assertEquals("Requirement should be ALTERNATIVE.", AuthenticationExecutionModel.Requirement.ALTERNATIVE.name(), execution.getRequirement());
assertTrue("Priority should be 25.", execution.getPriority() == 25);
success = true;
}
}
}
}
if (!success) {
fail("BROWSER_FLOW should contain execution: 'identity-provider-redirector' authenticator.");
}
}
}
}
| apache-2.0 |
peridotperiod/isis | tck/tck-fixture/src/main/java/org/apache/isis/core/tck/fixture/scalars/ApplibValuedEntityFixture.java | 1690 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.tck.fixture.scalars;
import org.apache.isis.applib.fixtures.AbstractFixture;
import org.apache.isis.core.tck.dom.scalars.ApplibValuedEntity;
import org.apache.isis.core.tck.dom.scalars.ApplibValuedEntityRepository;
public class ApplibValuedEntityFixture extends AbstractFixture {
@Override
public void install() {
createEntity();
createEntity();
createEntity();
createEntity();
createEntity();
}
private ApplibValuedEntity createEntity() {
return applibValuesEntityRepository.newEntity();
}
private ApplibValuedEntityRepository applibValuesEntityRepository;
public void setApplibValuesEntityRepository(final ApplibValuedEntityRepository applibValuesEntityRepository) {
this.applibValuesEntityRepository = applibValuesEntityRepository;
}
}
| apache-2.0 |
salyh/javamailspec | geronimo-jta_1.2_spec/src/main/java/javax/transaction/TransactionRolledbackException.java | 1391 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//
// This source code implements specifications defined by the Java
// Community Process. In order to remain compliant with the specification
// DO NOT add / change / or delete method signatures!
//
package javax.transaction;
import java.rmi.RemoteException;
/**
* @version $Rev: 467742 $ $Date: 2006-10-25 21:30:38 +0200 (mer 25 oct 2006) $
*/
public class TransactionRolledbackException extends RemoteException {
public TransactionRolledbackException() {
super();
}
public TransactionRolledbackException(String message) {
super(message);
}
}
| apache-2.0 |
skinzer/governator | governator-annotations/src/main/java/com/netflix/governator/annotations/WarmUp.java | 1126 | /*
* Copyright 2013 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.governator.annotations;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
/**
* Marks a method as a warm up method. Governator will execute warm up methods
* in parallel when the com.netflix.governator.lifecycle.LifecycleManager is started.
*/
@Documented
@Retention(java.lang.annotation.RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface WarmUp
{
}
| apache-2.0 |
RohanHart/camel | camel-core/src/main/java/org/apache/camel/model/cloud/ServiceCallDefinition.java | 33347 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model.cloud;
import java.util.Set;
import java.util.function.Supplier;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlTransient;
import org.apache.camel.CamelContext;
import org.apache.camel.CamelContextAware;
import org.apache.camel.ExchangePattern;
import org.apache.camel.Expression;
import org.apache.camel.NoFactoryAvailableException;
import org.apache.camel.Processor;
import org.apache.camel.cloud.LoadBalancer;
import org.apache.camel.cloud.ServiceChooser;
import org.apache.camel.cloud.ServiceChooserAware;
import org.apache.camel.cloud.ServiceDiscovery;
import org.apache.camel.cloud.ServiceDiscoveryAware;
import org.apache.camel.cloud.ServiceFilter;
import org.apache.camel.cloud.ServiceFilterAware;
import org.apache.camel.impl.cloud.DefaultLoadBalancer;
import org.apache.camel.impl.cloud.DefaultServiceCallExpression;
import org.apache.camel.impl.cloud.DefaultServiceCallProcessor;
import org.apache.camel.impl.cloud.HealthyServiceFilter;
import org.apache.camel.impl.cloud.PassThroughServiceFilter;
import org.apache.camel.impl.cloud.RandomServiceChooser;
import org.apache.camel.impl.cloud.RoundRobinServiceChooser;
import org.apache.camel.model.NoOutputDefinition;
import org.apache.camel.spi.Metadata;
import org.apache.camel.spi.RouteContext;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.ObjectHelper;
/**
* Remote service call definition
*/
@Metadata(label = "eip,routing")
@XmlRootElement(name = "serviceCall")
@XmlAccessorType(XmlAccessType.FIELD)
public class ServiceCallDefinition extends NoOutputDefinition<ServiceCallDefinition> {
@XmlAttribute @Metadata(required = "true")
private String name;
@XmlAttribute @Metadata(defaultValue = "http")
private String uri;
@XmlAttribute
private String component;
@XmlAttribute
private ExchangePattern pattern;
@XmlAttribute
private String configurationRef;
@XmlAttribute
private String serviceDiscoveryRef;
@XmlTransient
private ServiceDiscovery serviceDiscovery;
@XmlAttribute
private String serviceFilterRef;
@XmlTransient
private ServiceFilter serviceFilter;
@XmlAttribute
private String serviceChooserRef;
@XmlTransient
private ServiceChooser serviceChooser;
@XmlAttribute
private String loadBalancerRef;
@XmlTransient
private LoadBalancer loadBalancer;
@XmlAttribute
private String expressionRef;
@XmlTransient
private Expression expression;
@XmlElements({
@XmlElement(name = "cachingServiceDiscovery", type = CachingServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "chainedServiceDiscovery", type = ChainedServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "consulServiceDiscovery", type = ConsulServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "dnsServiceDiscovery", type = DnsServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "etcdServiceDiscovery", type = EtcdServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "kubernetesServiceDiscovery", type = KubernetesServiceCallServiceDiscoveryConfiguration.class),
@XmlElement(name = "staticServiceDiscovery", type = StaticServiceCallServiceDiscoveryConfiguration.class)}
)
private ServiceCallServiceDiscoveryConfiguration serviceDiscoveryConfiguration;
@XmlElements({
@XmlElement(name = "blacklistServiceFilter", type = BlacklistServiceCallServiceFilterConfiguration.class),
@XmlElement(name = "chainedServiceFilter", type = ChainedServiceCallServiceFilterConfiguration.class),
@XmlElement(name = "customServiceFilter", type = CustomServiceCallServiceFilterConfiguration.class),
@XmlElement(name = "healthyServiceFilter", type = HealthyServiceCallServiceFilterConfiguration.class),
@XmlElement(name = "passThroughServiceFilter", type = PassThroughServiceCallServiceFilterConfiguration.class)}
)
private ServiceCallServiceFilterConfiguration serviceFilterConfiguration;
@XmlElements({
@XmlElement(name = "ribbonLoadBalancer", type = RibbonServiceCallLoadBalancerConfiguration.class),
@XmlElement(name = "defaultLoadBalancer", type = DefaultServiceCallLoadBalancerConfiguration.class) }
)
private ServiceCallLoadBalancerConfiguration loadBalancerConfiguration;
@XmlElements({
@XmlElement(name = "expressionConfiguration", type = ServiceCallExpressionConfiguration.class)}
)
private ServiceCallExpressionConfiguration expressionConfiguration;
public ServiceCallDefinition() {
}
@Override
public String toString() {
return "ServiceCall[" + name + "]";
}
@Override
public String getLabel() {
return "serviceCall";
}
// *****************************
// Properties
// *****************************
public String getName() {
return name;
}
/**
* Sets the name of the service to use
*/
public void setName(String name) {
this.name = name;
}
public ExchangePattern getPattern() {
return pattern;
}
/**
* Sets the optional {@link ExchangePattern} used to invoke this endpoint
*/
public void setPattern(ExchangePattern pattern) {
this.pattern = pattern;
}
public String getConfigurationRef() {
return configurationRef;
}
/**
* Refers to a ServiceCall configuration to use
*/
public void setConfigurationRef(String configurationRef) {
this.configurationRef = configurationRef;
}
public String getUri() {
return uri;
}
/**
* The uri of the endpoint to send to.
* The uri can be dynamic computed using the {@link org.apache.camel.language.simple.SimpleLanguage} expression.
*/
public void setUri(String uri) {
this.uri = uri;
}
public String getComponent() {
return component;
}
/**
* The component to use.
*/
public void setComponent(String component) {
this.component = component;
}
public String getServiceDiscoveryRef() {
return serviceDiscoveryRef;
}
/**
* Sets a reference to a custom {@link ServiceDiscovery} to use.
*/
public void setServiceDiscoveryRef(String serviceDiscoveryRef) {
this.serviceDiscoveryRef = serviceDiscoveryRef;
}
public ServiceDiscovery getServiceDiscovery() {
return serviceDiscovery;
}
/**
* Sets a custom {@link ServiceDiscovery} to use.
*/
public void setServiceDiscovery(ServiceDiscovery serviceDiscovery) {
this.serviceDiscovery = serviceDiscovery;
}
public String getServiceFilterRef() {
return serviceFilterRef;
}
/**
* Sets a reference to a custom {@link ServiceFilter} to use.
*/
public void setServiceFilterRef(String serviceFilterRef) {
this.serviceFilterRef = serviceFilterRef;
}
public ServiceFilter getServiceFilter() {
return serviceFilter;
}
/**
* Sets a custom {@link ServiceFilter} to use.
*/
public void setServiceFilter(ServiceFilter serviceFilter) {
this.serviceFilter = serviceFilter;
}
public String getServiceChooserRef() {
return serviceChooserRef;
}
/**
* Sets a reference to a custom {@link ServiceChooser} to use.
*/
public void setServiceChooserRef(String serviceChooserRef) {
this.serviceChooserRef = serviceChooserRef;
}
public ServiceChooser getServiceChooser() {
return serviceChooser;
}
/**
* Sets a custom {@link ServiceChooser} to use.
*/
public void setServiceChooser(ServiceChooser serviceChooser) {
this.serviceChooser = serviceChooser;
}
public String getLoadBalancerRef() {
return loadBalancerRef;
}
/**
* Sets a reference to a custom {@link LoadBalancer} to use.
*/
public void setLoadBalancerRef(String loadBalancerRef) {
this.loadBalancerRef = loadBalancerRef;
}
public LoadBalancer getLoadBalancer() {
return loadBalancer;
}
/**
* Sets a custom {@link LoadBalancer} to use.
*/
public void setLoadBalancer(LoadBalancer loadBalancer) {
this.loadBalancer = loadBalancer;
}
public String getExpressionRef() {
return expressionRef;
}
/**
* Set a reference to a custom {@link Expression} to use.
*/
public void setExpressionRef(String expressionRef) {
this.expressionRef = expressionRef;
}
public Expression getExpression() {
return expression;
}
/**
* Set a custom {@link Expression} to use.
*/
public void setExpression(Expression expression) {
this.expression = expression;
}
public ServiceCallServiceDiscoveryConfiguration getServiceDiscoveryConfiguration() {
return serviceDiscoveryConfiguration;
}
/**
* Configures the ServiceDiscovery using the given configuration.
*/
public void setServiceDiscoveryConfiguration(ServiceCallServiceDiscoveryConfiguration serviceDiscoveryConfiguration) {
this.serviceDiscoveryConfiguration = serviceDiscoveryConfiguration;
}
public ServiceCallServiceFilterConfiguration getServiceFilterConfiguration() {
return serviceFilterConfiguration;
}
/**
* Configures the ServiceFilter using the given configuration.
*/
public void setServiceFilterConfiguration(ServiceCallServiceFilterConfiguration serviceFilterConfiguration) {
this.serviceFilterConfiguration = serviceFilterConfiguration;
}
public ServiceCallLoadBalancerConfiguration getLoadBalancerConfiguration() {
return loadBalancerConfiguration;
}
/**
* Configures the LoadBalancer using the given configuration.
*/
public void setLoadBalancerConfiguration(ServiceCallLoadBalancerConfiguration loadBalancerConfiguration) {
this.loadBalancerConfiguration = loadBalancerConfiguration;
}
public ServiceCallExpressionConfiguration getExpressionConfiguration() {
return expressionConfiguration;
}
/**
* Configures the Expression using the given configuration.
*/
public void setExpressionConfiguration(ServiceCallExpressionConfiguration expressionConfiguration) {
this.expressionConfiguration = expressionConfiguration;
}
// *****************************
// Fluent API
// *****************************
/**
* Sets the optional {@link ExchangePattern} used to invoke this endpoint
*/
public ServiceCallDefinition pattern(ExchangePattern pattern) {
setPattern(pattern);
return this;
}
/**
* Sets the name of the service to use
*/
public ServiceCallDefinition name(String name) {
setName(name);
return this;
}
/**
* Sets the uri of the service to use
*/
public ServiceCallDefinition uri(String uri) {
setUri(uri);
return this;
}
/**
* Sets the component to use
*/
public ServiceCallDefinition component(String component) {
setComponent(component);
return this;
}
/**
* Refers to a ServiceCall configuration to use
*/
public ServiceCallDefinition serviceCallConfiguration(String ref) {
configurationRef = ref;
return this;
}
/**
* Sets a reference to a custom {@link ServiceDiscovery} to use.
*/
public ServiceCallDefinition serviceDiscovery(String serviceDiscoveryRef) {
setServiceDiscoveryRef(serviceDiscoveryRef);
return this;
}
/**
* Sets a custom {@link ServiceDiscovery} to use.
*/
public ServiceCallDefinition serviceDiscovery(ServiceDiscovery serviceDiscovery) {
setServiceDiscovery(serviceDiscovery);
return this;
}
/**
* Sets a reference to a custom {@link ServiceFilter} to use.
*/
public ServiceCallDefinition serviceFilter(String serviceFilterRef) {
setServiceDiscoveryRef(serviceDiscoveryRef);
return this;
}
/**
* Sets a custom {@link ServiceFilter} to use.
*/
public ServiceCallDefinition serviceFilter(ServiceFilter serviceFilter) {
setServiceFilter(serviceFilter);
return this;
}
/**
* Sets a reference to a custom {@link ServiceChooser} to use.
*/
public ServiceCallDefinition serviceChooser(String serviceChooserRef) {
setServiceChooserRef(serviceChooserRef);
return this;
}
/**
* Sets a custom {@link ServiceChooser} to use.
*/
public ServiceCallDefinition serviceChooser(ServiceChooser serviceChooser) {
setServiceChooser(serviceChooser);
return this;
}
/**
* Sets a reference to a custom {@link LoadBalancer} to use.
*/
public ServiceCallDefinition loadBalancer(String loadBalancerRef) {
setLoadBalancerRef(loadBalancerRef);
return this;
}
/**
* Sets a custom {@link LoadBalancer} to use.
*/
public ServiceCallDefinition loadBalancer(LoadBalancer loadBalancer) {
setLoadBalancer(loadBalancer);
return this;
}
/**
* Sets a reference to a custom {@link Expression} to use.
*/
public ServiceCallDefinition expression(String expressionRef) {
setExpressionRef(loadBalancerRef);
return this;
}
/**
* Sets a custom {@link Expression} to use.
*/
public ServiceCallDefinition expression(Expression expression) {
setExpression(expression);
return this;
}
/**
* Configures the ServiceDiscovery using the given configuration.
*/
public ServiceCallDefinition serviceDiscoveryConfiguration(ServiceCallServiceDiscoveryConfiguration serviceDiscoveryConfiguration) {
setServiceDiscoveryConfiguration(serviceDiscoveryConfiguration);
return this;
}
/**
* Configures the ServiceFilter using the given configuration.
*/
public ServiceCallDefinition serviceFilterConfiguration(ServiceCallServiceFilterConfiguration serviceFilterConfiguration) {
setServiceFilterConfiguration(serviceFilterConfiguration);
return this;
}
/**
* Configures the LoadBalancer using the given configuration.
*/
public ServiceCallDefinition loadBalancerConfiguration(ServiceCallLoadBalancerConfiguration loadBalancerConfiguration) {
setLoadBalancerConfiguration(loadBalancerConfiguration);
return this;
}
/**
* Configures the Expression using the given configuration.
*/
public ServiceCallDefinition expressionConfiguration(ServiceCallExpressionConfiguration expressionConfiguration) {
setExpressionConfiguration(expressionConfiguration);
return this;
}
// *****************************
// Shortcuts - ServiceDiscovery
// *****************************
public CachingServiceCallServiceDiscoveryConfiguration cachingServiceDiscovery() {
CachingServiceCallServiceDiscoveryConfiguration conf = new CachingServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public ConsulServiceCallServiceDiscoveryConfiguration consulServiceDiscovery() {
ConsulServiceCallServiceDiscoveryConfiguration conf = new ConsulServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public DnsServiceCallServiceDiscoveryConfiguration dnsServiceDiscovery() {
DnsServiceCallServiceDiscoveryConfiguration conf = new DnsServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public ServiceCallDefinition dnsServiceDiscovery(String domain) {
DnsServiceCallServiceDiscoveryConfiguration conf = new DnsServiceCallServiceDiscoveryConfiguration(this);
conf.setDomain(domain);
setServiceDiscoveryConfiguration(conf);
return this;
}
public ServiceCallDefinition dnsServiceDiscovery(String domain, String protocol) {
DnsServiceCallServiceDiscoveryConfiguration conf = new DnsServiceCallServiceDiscoveryConfiguration(this);
conf.setDomain(domain);
conf.setProto(protocol);
setServiceDiscoveryConfiguration(conf);
return this;
}
public EtcdServiceCallServiceDiscoveryConfiguration etcdServiceDiscovery() {
EtcdServiceCallServiceDiscoveryConfiguration conf = new EtcdServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public KubernetesServiceCallServiceDiscoveryConfiguration kubernetesServiceDiscovery() {
KubernetesServiceCallServiceDiscoveryConfiguration conf = new KubernetesServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public KubernetesServiceCallServiceDiscoveryConfiguration kubernetesClientServiceDiscovery() {
KubernetesServiceCallServiceDiscoveryConfiguration conf = new KubernetesServiceCallServiceDiscoveryConfiguration(this);
conf.setLookup("client");
setServiceDiscoveryConfiguration(conf);
return conf;
}
public ServiceCallDefinition kubernetesEnvServiceDiscovery() {
KubernetesServiceCallServiceDiscoveryConfiguration conf = new KubernetesServiceCallServiceDiscoveryConfiguration(this);
conf.setLookup("environment");
setServiceDiscoveryConfiguration(conf);
return this;
}
public ServiceCallDefinition kubernetesDnsServiceDiscovery(String namespace, String domain) {
KubernetesServiceCallServiceDiscoveryConfiguration conf = new KubernetesServiceCallServiceDiscoveryConfiguration(this);
conf.setLookup("dns");
conf.setNamespace(namespace);
conf.setDnsDomain(domain);
setServiceDiscoveryConfiguration(conf);
return this;
}
public ChainedServiceCallServiceDiscoveryConfiguration multiServiceDiscovery() {
ChainedServiceCallServiceDiscoveryConfiguration conf = new ChainedServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
public StaticServiceCallServiceDiscoveryConfiguration staticServiceDiscovery() {
StaticServiceCallServiceDiscoveryConfiguration conf = new StaticServiceCallServiceDiscoveryConfiguration(this);
setServiceDiscoveryConfiguration(conf);
return conf;
}
// *****************************
// Shortcuts - ServiceFilter
// *****************************
public ServiceCallDefinition healthyFilter() {
HealthyServiceCallServiceFilterConfiguration conf = new HealthyServiceCallServiceFilterConfiguration(this);
setServiceFilterConfiguration(conf);
return this;
}
public ServiceCallDefinition passThroughFilter() {
PassThroughServiceCallServiceFilterConfiguration conf = new PassThroughServiceCallServiceFilterConfiguration(this);
setServiceFilterConfiguration(conf);
return this;
}
public ChainedServiceCallServiceFilterConfiguration multiFilter() {
ChainedServiceCallServiceFilterConfiguration conf = new ChainedServiceCallServiceFilterConfiguration(this);
setServiceFilterConfiguration(conf);
return conf;
}
public ServiceCallDefinition customFilter(String serviceFilter) {
CustomServiceCallServiceFilterConfiguration conf = new CustomServiceCallServiceFilterConfiguration();
conf.setServiceFilterRef(serviceFilter);
setServiceFilterConfiguration(conf);
return this;
}
public ServiceCallDefinition customFilter(ServiceFilter serviceFilter) {
CustomServiceCallServiceFilterConfiguration conf = new CustomServiceCallServiceFilterConfiguration();
conf.setServiceFilter(serviceFilter);
setServiceFilterConfiguration(conf);
return this;
}
// *****************************
// Shortcuts - LoadBalancer
// *****************************
public ServiceCallDefinition defaultLoadBalancer() {
DefaultServiceCallLoadBalancerConfiguration conf = new DefaultServiceCallLoadBalancerConfiguration();
setLoadBalancerConfiguration(conf);
return this;
}
public ServiceCallDefinition ribbonLoadBalancer() {
RibbonServiceCallLoadBalancerConfiguration conf = new RibbonServiceCallLoadBalancerConfiguration(this);
setLoadBalancerConfiguration(conf);
return this;
}
public ServiceCallDefinition ribbonLoadBalancer(String clientName) {
RibbonServiceCallLoadBalancerConfiguration conf = new RibbonServiceCallLoadBalancerConfiguration(this);
conf.setClientName(clientName);
setLoadBalancerConfiguration(conf);
return this;
}
// *****************************
// Processor Factory
// *****************************
@Override
public Processor createProcessor(RouteContext routeContext) throws Exception {
final CamelContext camelContext = routeContext.getCamelContext();
final ServiceCallConfigurationDefinition config = retrieveConfig(camelContext);
ServiceDiscovery serviceDiscovery = retrieveServiceDiscovery(camelContext, config);
ServiceFilter serviceFilter = retrieveServiceFilter(camelContext, config);
ServiceChooser serviceChooser = retrieveServiceChooser(camelContext, config);
LoadBalancer loadBalancer = retrieveLoadBalancer(camelContext, config);
Expression expression = retrieveExpression(camelContext, config);
if (loadBalancer instanceof CamelContextAware) {
((CamelContextAware) loadBalancer).setCamelContext(camelContext);
}
if (loadBalancer instanceof ServiceDiscoveryAware) {
((ServiceDiscoveryAware) loadBalancer).setServiceDiscovery(serviceDiscovery);
}
if (loadBalancer instanceof ServiceFilterAware) {
((ServiceFilterAware) loadBalancer).setServiceFilter(serviceFilter);
}
if (loadBalancer instanceof ServiceChooserAware) {
((ServiceChooserAware) loadBalancer).setServiceChooser(serviceChooser);
}
// The component is used to configure what the default scheme to use (eg camel component name).
// The component configured on EIP takes precedence vs configured on configuration.
String component = this.component;
if (component == null) {
component = config != null ? config.getComponent() : null;
}
return new DefaultServiceCallProcessor(camelContext, name, component, uri, pattern, loadBalancer, expression);
}
// *****************************
// Helpers
// *****************************
private ServiceCallConfigurationDefinition retrieveConfig(CamelContext camelContext) {
ServiceCallConfigurationDefinition config = null;
if (configurationRef != null) {
// lookup in registry first
config = CamelContextHelper.lookup(camelContext, configurationRef, ServiceCallConfigurationDefinition.class);
if (config == null) {
// and fallback as service configuration
config = camelContext.getServiceCallConfiguration(configurationRef);
}
}
if (config == null) {
config = camelContext.getServiceCallConfiguration(null);
}
if (config == null) {
// if no default then try to find if there configuration in the registry of the given type
Set<ServiceCallConfigurationDefinition> set = camelContext.getRegistry().findByType(ServiceCallConfigurationDefinition.class);
if (set.size() == 1) {
config = set.iterator().next();
}
}
return config;
}
private ServiceDiscovery retrieveServiceDiscovery(CamelContext camelContext, ServiceCallConfigurationDefinition config) throws Exception {
ServiceDiscovery answer;
if (serviceDiscoveryConfiguration != null) {
answer = serviceDiscoveryConfiguration.newInstance(camelContext);
} else if (config != null && config.getServiceDiscoveryConfiguration() != null) {
answer = config.getServiceDiscoveryConfiguration().newInstance(camelContext);
} else {
answer = retrieve(ServiceDiscovery.class, camelContext, this::getServiceDiscovery, this::getServiceDiscoveryRef);
if (answer == null && config != null) {
answer = retrieve(ServiceDiscovery.class, camelContext, config::getServiceDiscovery, config::getServiceDiscoveryRef);
}
if (answer == null) {
answer = findByType(camelContext, ServiceDiscovery.class);
}
}
// If there's no configuration, let's try to find a suitable implementation
if (answer == null) {
for (ServiceCallServiceDiscoveryConfiguration configuration : ServiceCallConstants.SERVICE_DISCOVERY_CONFIGURATIONS) {
try {
answer = configuration.newInstance(camelContext);
if (answer != null) {
break;
}
} catch (NoFactoryAvailableException e) {
// skip
}
}
}
return answer;
}
private ServiceFilter retrieveServiceFilter(CamelContext camelContext, ServiceCallConfigurationDefinition config) throws Exception {
ServiceFilter answer;
if (serviceFilterConfiguration != null) {
answer = serviceFilterConfiguration.newInstance(camelContext);
} else if (config != null && config.getServiceFilterConfiguration() != null) {
answer = config.getServiceFilterConfiguration().newInstance(camelContext);
} else {
answer = retrieve(ServiceFilter.class, camelContext, this::getServiceFilter, this::getServiceFilterRef);
if (answer == null && config != null) {
answer = retrieve(ServiceFilter.class, camelContext, config::getServiceFilter, config::getServiceFilterRef);
// If the ServiceFilter is not found but a ref is set, try to determine
// the implementation according to the ref name.
if (answer == null) {
String ref = config.getServiceFilterRef();
if (ObjectHelper.equal("healthy", ref, true)) {
answer = new HealthyServiceFilter();
} else if (ObjectHelper.equal("pass-through", ref, true)) {
answer = new PassThroughServiceFilter();
} else if (ObjectHelper.equal("passthrough", ref, true)) {
answer = new PassThroughServiceFilter();
}
}
}
}
if (answer == null) {
answer = findByType(camelContext, ServiceFilter.class);
}
// If there's no configuration, let's use the healthy strategy
if (answer == null) {
answer = new HealthyServiceFilter();
}
return answer;
}
private ServiceChooser retrieveServiceChooser(CamelContext camelContext, ServiceCallConfigurationDefinition config) {
ServiceChooser answer = retrieve(ServiceChooser.class, camelContext, this::getServiceChooser, this::getServiceChooserRef);
if (answer == null && config != null) {
answer = retrieve(ServiceChooser.class, camelContext, config::getServiceChooser, config::getServiceChooserRef);
// If the ServiceChooser is not found but a ref is set, try to determine
// the implementation according to the ref name.
if (answer == null) {
String ref = config.getServiceChooserRef();
if (ObjectHelper.equal("roundrobin", ref, true)) {
answer = new RoundRobinServiceChooser();
} else if (ObjectHelper.equal("round-robin", ref, true)) {
answer = new RoundRobinServiceChooser();
} else if (ObjectHelper.equal("random", ref, true)) {
answer = new RandomServiceChooser();
}
}
}
if (answer == null) {
answer = findByType(camelContext, ServiceChooser.class);
}
// If there's no configuration, let's use the round-robin strategy
if (answer == null) {
answer = new RoundRobinServiceChooser();
}
return answer;
}
private LoadBalancer retrieveLoadBalancer(CamelContext camelContext, ServiceCallConfigurationDefinition config) throws Exception {
LoadBalancer answer;
if (loadBalancerConfiguration != null) {
answer = loadBalancerConfiguration.newInstance(camelContext);
} else if (config != null && config.getLoadBalancerConfiguration() != null) {
answer = config.getLoadBalancerConfiguration().newInstance(camelContext);
} else {
answer = retrieve(LoadBalancer.class, camelContext, this::getLoadBalancer, this::getLoadBalancerRef);
if (answer == null && config != null) {
answer = retrieve(LoadBalancer.class, camelContext, config::getLoadBalancer, config::getLoadBalancerRef);
}
if (answer == null) {
answer = findByType(camelContext, LoadBalancer.class);
}
}
// If there's no configuration, let's try to find a suitable implementation
if (answer == null) {
for (ServiceCallLoadBalancerConfiguration configuration : ServiceCallConstants.LOAD_BALANCER_CONFIGURATIONS) {
try {
answer = configuration.newInstance(camelContext);
if (answer != null) {
break;
}
} catch (NoFactoryAvailableException e) {
// skip
}
}
}
if (answer == null) {
answer = new DefaultLoadBalancer();
}
return answer;
}
private Expression retrieveExpression(CamelContext camelContext, ServiceCallConfigurationDefinition config) throws Exception {
Expression answer;
if (expressionConfiguration != null) {
answer = expressionConfiguration.newInstance(camelContext);
} else if (config != null && config.getExpressionConfiguration() != null) {
answer = config.getExpressionConfiguration().newInstance(camelContext);
} else {
answer = retrieve(Expression.class, camelContext, this::getExpression, this::getExpressionRef);
if (answer == null && config != null) {
answer = retrieve(Expression.class, camelContext, config::getExpression, config::getExpressionRef);
}
if (answer == null) {
answer = findByType(camelContext, Expression.class);
}
}
if (answer == null) {
answer = new DefaultServiceCallExpression();
}
return answer;
}
private <T> T retrieve(Class<T> type, CamelContext camelContext, Supplier<T> instanceSupplier, Supplier<String> refSupplier) {
T answer = null;
if (instanceSupplier != null) {
answer = instanceSupplier.get();
}
if (answer == null && refSupplier != null) {
String ref = refSupplier.get();
if (ref != null) {
answer = CamelContextHelper.lookup(camelContext, ref, type);
}
}
return answer;
}
private <T> T findByType(CamelContext camelContext, Class<T> type) {
Set<T> set = camelContext.getRegistry().findByType(type);
if (set.size() == 1) {
return set.iterator().next();
}
return null;
}
}
| apache-2.0 |
stewartpark/presto | presto-main/src/test/java/com/facebook/presto/cost/StatsCalculatorAssertion.java | 4683 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.cost;
import com.facebook.presto.Session;
import com.facebook.presto.cost.ComposableStatsCalculator.Rule;
import com.facebook.presto.sql.planner.TypeProvider;
import com.facebook.presto.sql.planner.iterative.Lookup;
import com.facebook.presto.sql.planner.optimizations.PlanNodeSearcher;
import com.facebook.presto.sql.planner.plan.PlanNode;
import com.facebook.presto.sql.planner.plan.PlanNodeId;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.Consumer;
import static com.facebook.presto.sql.planner.iterative.Lookup.noLookup;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static java.util.Objects.requireNonNull;
public class StatsCalculatorAssertion
{
private final StatsCalculator statsCalculator;
private final Session session;
private final PlanNode planNode;
private final TypeProvider types;
private Map<PlanNode, PlanNodeStatsEstimate> sourcesStats;
public StatsCalculatorAssertion(StatsCalculator statsCalculator, Session session, PlanNode planNode, TypeProvider types)
{
this.statsCalculator = requireNonNull(statsCalculator, "statsCalculator can not be null");
this.session = requireNonNull(session, "sesssion can not be null");
this.planNode = requireNonNull(planNode, "planNode is null");
this.types = requireNonNull(types, "types is null");
sourcesStats = new HashMap<>();
planNode.getSources().forEach(child -> sourcesStats.put(child, PlanNodeStatsEstimate.unknown()));
}
public StatsCalculatorAssertion withSourceStats(PlanNodeStatsEstimate sourceStats)
{
checkState(planNode.getSources().size() == 1, "expected single source");
return withSourceStats(0, sourceStats);
}
public StatsCalculatorAssertion withSourceStats(int sourceIndex, PlanNodeStatsEstimate sourceStats)
{
checkArgument(sourceIndex < planNode.getSources().size(), "invalid sourceIndex %s; planNode has %s sources", sourceIndex, planNode.getSources().size());
sourcesStats.put(planNode.getSources().get(sourceIndex), sourceStats);
return this;
}
public StatsCalculatorAssertion withSourceStats(PlanNodeId planNodeId, PlanNodeStatsEstimate sourceStats)
{
PlanNode sourceNode = PlanNodeSearcher.searchFrom(planNode).where(node -> node.getId().equals(planNodeId)).findOnlyElement();
sourcesStats.put(sourceNode, sourceStats);
return this;
}
public StatsCalculatorAssertion withSourceStats(Map<PlanNode, PlanNodeStatsEstimate> stats)
{
sourcesStats.putAll(stats);
return this;
}
public StatsCalculatorAssertion check(Consumer<PlanNodeStatsAssertion> statisticsAssertionConsumer)
{
PlanNodeStatsEstimate statsEstimate = statsCalculator.calculateStats(planNode, this::getSourceStats, noLookup(), session, types);
statisticsAssertionConsumer.accept(PlanNodeStatsAssertion.assertThat(statsEstimate));
return this;
}
public StatsCalculatorAssertion check(Rule<?> rule, Consumer<PlanNodeStatsAssertion> statisticsAssertionConsumer)
{
Optional<PlanNodeStatsEstimate> statsEstimate = calculatedStats(rule, planNode, this::getSourceStats, noLookup(), session, types);
checkState(statsEstimate.isPresent(), "Expected stats estimates to be present");
statisticsAssertionConsumer.accept(PlanNodeStatsAssertion.assertThat(statsEstimate.get()));
return this;
}
private static <T extends PlanNode> Optional<PlanNodeStatsEstimate> calculatedStats(Rule<T> rule, PlanNode node, StatsProvider sourceStats, Lookup lookup, Session session, TypeProvider types)
{
return rule.calculate((T) node, sourceStats, lookup, session, types);
}
private PlanNodeStatsEstimate getSourceStats(PlanNode sourceNode)
{
checkArgument(sourcesStats.containsKey(sourceNode), "stats not found for source %s", sourceNode);
return sourcesStats.get(sourceNode);
}
}
| apache-2.0 |
tonybaloney/st2contrib | packs/orion/actions/node_remanage.py | 1372 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from lib.actions import OrionBaseAction
class NodeRemanage(OrionBaseAction):
def run(self, node):
"""
Remanage an Orion node
"""
self.connect()
orion_node = self.get_node(node)
if not orion_node.npm:
raise ValueError("Node not found")
NodeId = "N:{}".format(orion_node.npm_id)
orion_data = self.invoke("Orion.Nodes", "Remanage", NodeId)
# This Invoke always returns None, so check and return True
if orion_data is None:
return True
else:
return orion_data
| apache-2.0 |
brix-cms/brix-cms | brix-core/src/main/java/org/brixcms/web/admin/AdminPanel.java | 5576 | /**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.brixcms.web.admin;
import java.util.ArrayList;
import java.util.List;
import org.apache.wicket.extensions.markup.html.tabs.TabbedPanel;
import org.apache.wicket.markup.html.IHeaderContributor;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.image.Image;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.html.panel.Panel;
import org.apache.wicket.model.IModel;
import org.brixcms.Brix;
import org.brixcms.Plugin;
import org.brixcms.auth.Action.Context;
import org.brixcms.web.admin.res.AdminPanelResources;
import org.brixcms.web.generic.BrixGenericPanel;
import org.brixcms.web.tab.BrixNavbarPanel;
import org.brixcms.web.tab.IBrixTab;
import org.brixcms.workspace.Workspace;
import org.brixcms.workspace.WorkspaceManager;
import org.brixcms.workspace.WorkspaceModel;
@SuppressWarnings("serial")
public class AdminPanel extends BrixGenericPanel<Workspace> implements IHeaderContributor {
private TabbedPanel<IBrixTab> tabbedPanel;
private final WebMarkupContainer container;
private WebMarkupContainer noWorkspacesContainer;
public AdminPanel(String id, String workspace) {
super(id);
setModel(new WorkspaceModel(workspace));
add(new AdminPanelResources());
add(container = new WebMarkupContainer("container") {
@Override
public boolean isVisible() {
return AdminPanel.this.getModelObject() != null;
}
});
}
private List<Workspace> getWorkspaces() {
Brix brix = getBrix();
List<Workspace> workspaces = new ArrayList<Workspace>();
Workspace current = getModelObject();
for (Plugin p : brix.getPlugins()) {
List<Workspace> filtered = brix.filterVisibleWorkspaces(
p.getWorkspaces(current, false), Context.ADMINISTRATION);
for (Workspace w : filtered) {
workspaces.add(w);
}
}
if (!workspaces.contains(current)) {
workspaces.add(current);
}
return workspaces;
}
private Brix getBrix() {
return Brix.get();
}
private List<Workspace> getAvailableWorkspaces() {
Brix brix = Brix.get();
List<Workspace> workspaces = new ArrayList<Workspace>();
Workspace current = getModelObject();
for (Plugin p : brix.getPlugins()) {
List<Workspace> filtered = brix.filterVisibleWorkspaces(
p.getWorkspaces(current, false), Context.ADMINISTRATION);
for (Workspace w : filtered) {
workspaces.add(w);
}
}
if (!workspaces.contains(current) && current != null) {
workspaces.add(current);
}
return workspaces;
}
@Override
protected void onConfigure() {
fixCurrentWorkspace();
if (noWorkspacesContainer == null) {
add(noWorkspacesContainer = newNoWorkspacesContainer("no-workspaces"));
}
noWorkspacesContainer.setVisible(!container.determineVisibility());
if (tabbedPanel == null && container.determineVisibility()) {
container.add(newWorkspaceSwitcher("switcher", getModel()));
container.add(new Image("logo", AdminPanelResources.LOGO));
setupTabbedPanel();
}
super.onConfigure();
}
private void fixCurrentWorkspace() {
if (!isCurrentWorkspaceValid()) {
List<Workspace> workspaces = getWorkspaces();
if (!workspaces.isEmpty()) {
setModelObject(workspaces.iterator().next());
}
}
}
private boolean isCurrentWorkspaceValid() {
WorkspaceManager manager = getBrix().getWorkspaceManager();
Workspace workspace = getModelObject();
return workspace != null && manager.workspaceExists(workspace.getId());
}
/**
* Factory method for a container that will display the "no workspaces found" message. This component usually
* provides its own markup so it is best to use a {@link Panel} or a {@link Fragment}
*
* @param id
* @return
*/
private WebMarkupContainer newNoWorkspacesContainer(String id) {
return new WebMarkupContainer(id);
}
protected Panel newWorkspaceSwitcher(String id, IModel<Workspace> workspaceModel) {
return new WorkspaceSwitcher(id, workspaceModel);
}
private void setupTabbedPanel() {
if (tabbedPanel != null) {
tabbedPanel.remove();
}
List<IBrixTab> tabs = new ArrayList<IBrixTab>();
Brix brix = Brix.get();
for (Plugin p : brix.getPlugins()) {
List<IBrixTab> pluginTabs = p.newTabs(getModel());
if (pluginTabs != null) {
tabs.addAll(pluginTabs);
}
}
tabbedPanel = new BrixNavbarPanel("tabbedPanel", tabs);
container.add(tabbedPanel);
}
}
| apache-2.0 |
xoofx/roslyn | src/VisualStudio/Core/Impl/Options/Style/NamingPreferences/NamingStyleOptionPageViewModel.cs | 10402 | using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Linq;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.CodeStyle;
using Microsoft.CodeAnalysis.Diagnostics.Analyzers.NamingStyles;
using Microsoft.VisualStudio.Imaging;
using Microsoft.VisualStudio.LanguageServices.Implementation.Options.Style.NamingPreferences;
using Microsoft.VisualStudio.LanguageServices.Implementation.Utilities;
namespace Microsoft.VisualStudio.LanguageServices.Implementation.Options.Style
{
internal class NamingStyleOptionPageViewModel : AbstractNotifyPropertyChanged
{
public string ManageSpecificationsButtonText => ServicesVSResources.Manage_specifications;
public string ManageStylesButtonText => ServicesVSResources.Manage_styles;
private readonly NotificationOptionViewModel[] _notifications = new[]
{
new NotificationOptionViewModel(NotificationOption.None, KnownMonikers.None),
new NotificationOptionViewModel(NotificationOption.Suggestion, KnownMonikers.StatusInformation),
new NotificationOptionViewModel(NotificationOption.Warning, KnownMonikers.StatusWarning),
new NotificationOptionViewModel(NotificationOption.Error, KnownMonikers.StatusError)
};
public ObservableCollection<NamingRuleViewModel> CodeStyleItems { get; set; }
public ObservableCollection<SymbolSpecification> Specifications { get; set; }
public ObservableCollection<NamingStyle> NamingStyles { get; set; }
public NamingStyleOptionPageViewModel(SerializableNamingStylePreferencesInfo info)
{
var viewModels = new List<NamingRuleViewModel>();
foreach (var namingRule in info.NamingRules)
{
var viewModel = new NamingRuleViewModel();
viewModel.NamingStyles = new ObservableCollection<NamingStyle>(info.NamingStyles);
viewModel.Specifications = new ObservableCollection<SymbolSpecification>(info.SymbolSpecifications);
viewModel.NotificationPreferences = new List<NotificationOptionViewModel>(_notifications);
viewModel.SelectedSpecification = viewModel.Specifications.Single(s => s.ID == namingRule.SymbolSpecificationID);
viewModel.SelectedStyle= viewModel.NamingStyles.Single(s => s.ID == namingRule.NamingStyleID);
viewModel.SelectedNotificationPreference = viewModel.NotificationPreferences.Single(n => n.Notification.Value == namingRule.EnforcementLevel);
viewModels.Add(viewModel);
}
CodeStyleItems = new ObservableCollection<NamingRuleViewModel>(viewModels);
Specifications = new ObservableCollection<SymbolSpecification>(info.SymbolSpecifications);
NamingStyles = new ObservableCollection<NamingStyle>(info.NamingStyles);
SetMoveArrowStatuses();
}
private int _selectedIndex;
public int SelectedIndex
{
get
{
return _selectedIndex;
}
set
{
if (value == _selectedIndex)
{
return;
}
_selectedIndex = value;
}
}
internal void AddItem(NamingRuleViewModel namingRuleViewModel)
{
CodeStyleItems.Add(namingRuleViewModel);
SetMoveArrowStatuses();
}
internal void RemoveItem(NamingRuleViewModel namingRuleViewModel)
{
CodeStyleItems.Remove(namingRuleViewModel);
SetMoveArrowStatuses();
}
internal void UpdateSpecificationList(ManageSymbolSpecificationsDialogViewModel viewModel)
{
var symbolSpecifications = viewModel.Items.Cast<SymbolSpecificationViewModel>().Select(n => new SymbolSpecification(
n.ID,
n.ItemName,
n.SymbolKindList.Where(s => s.IsChecked).Select(k => k.CreateSymbolKindOrTypeKind()).ToList(),
n.AccessibilityList.Where(s => s.IsChecked).Select(a => new SymbolSpecification.AccessibilityKind(a._accessibility)).ToList(),
n.ModifierList.Where(s => s.IsChecked).Select(m => new SymbolSpecification.ModifierKind(m._modifier)).ToList()));
Specifications.Clear();
foreach (var specification in symbolSpecifications)
{
Specifications.Add(specification);
}
// The existing rules have had their Specifications pulled out from underneath them, so
// this goes through and resets them.
foreach (var rule in CodeStyleItems)
{
var selectedSpecification = rule.SelectedSpecification;
rule.Specifications.Clear();
foreach (var specification in symbolSpecifications)
{
rule.Specifications.Add(specification);
}
// Set the SelectedSpecification to null and then back to the actual selected
// specification to trigger the INotifyPropertyChanged event.
rule.SelectedSpecification = null;
if (selectedSpecification != null)
{
rule.SelectedSpecification = rule.Specifications.Single(s => s.ID == selectedSpecification.ID);
}
}
}
internal void MoveItem(int oldSelectedIndex, int newSelectedIndex)
{
CodeStyleItems.Move(oldSelectedIndex, newSelectedIndex);
SetMoveArrowStatuses();
}
private void SetMoveArrowStatuses()
{
for (int i = 0; i < CodeStyleItems.Count; i++)
{
CodeStyleItems[i].CanMoveUp = true;
CodeStyleItems[i].CanMoveDown = true;
if (i == 0)
{
CodeStyleItems[i].CanMoveUp = false;
}
if (i == CodeStyleItems.Count - 1)
{
CodeStyleItems[i].CanMoveDown = false;
}
}
}
internal void UpdateStyleList(ManageNamingStylesDialogViewModel viewModel)
{
var namingStyles = viewModel.Items.Cast<NamingStyleViewModel>().Select(n => new NamingStyle
{
ID = n.ID,
Name = n.ItemName,
Prefix = n.RequiredPrefix,
Suffix = n.RequiredSuffix,
WordSeparator = n.WordSeparator,
CapitalizationScheme = n.CapitalizationSchemes[n.CapitalizationSchemeIndex].Capitalization
});
NamingStyles.Clear();
foreach (var style in namingStyles)
{
NamingStyles.Add(style);
}
// The existing rules have had their Styles pulled out from underneath them, so
// this goes through and resets them.
foreach (var rule in CodeStyleItems)
{
var selectedStyle = rule.SelectedStyle;
rule.NamingStyles.Clear();
foreach (var style in namingStyles)
{
rule.NamingStyles.Add(style);
}
// Set the SelectedStyle to null and then back to the actual selected
// style to trigger the INotifyPropertyChanged event.
rule.SelectedStyle = null;
if (selectedStyle != null)
{
rule.SelectedStyle = rule.NamingStyles.Single(n => n.ID == selectedStyle.ID);
}
}
}
internal class NamingRuleViewModel : AbstractNotifyPropertyChanged
{
public NamingRuleViewModel()
{
Specifications = new ObservableCollection<SymbolSpecification>();
NamingStyles = new ObservableCollection<NamingStyle>();
NotificationPreferences = new List<NotificationOptionViewModel>();
}
private SymbolSpecification _selectedSpecification;
private NamingStyle _selectedNamingStyle;
private NotificationOptionViewModel _selectedNotification;
public ObservableCollection<SymbolSpecification> Specifications { get; set; }
public ObservableCollection<NamingStyle> NamingStyles { get; set; }
public IEnumerable<NotificationOptionViewModel> NotificationPreferences { get; set; }
public SymbolSpecification SelectedSpecification
{
get
{
return _selectedSpecification;
}
set
{
SetProperty(ref _selectedSpecification, value);
}
}
public NamingStyle SelectedStyle
{
get
{
return _selectedNamingStyle;
}
set
{
SetProperty(ref _selectedNamingStyle, value);
}
}
public NotificationOptionViewModel SelectedNotificationPreference
{
get
{
return _selectedNotification;
}
set
{
SetProperty(ref _selectedNotification, value);
}
}
private bool _canMoveUp;
public bool CanMoveUp
{
get
{
return _canMoveUp;
}
set
{
SetProperty(ref _canMoveUp, value);
}
}
private bool _canMoveDown;
public bool CanMoveDown
{
get
{
return _canMoveDown;
}
set
{
SetProperty(ref _canMoveDown, value);
}
}
public bool IsComplete()
{
return SelectedSpecification != null && SelectedStyle != null && SelectedNotificationPreference != null;
}
}
}
}
| apache-2.0 |
Cyan3/oodt | catalog/src/main/java/org/apache/oodt/cas/catalog/server/channel/rmi/RmiCommunicationChannelClientInterface.java | 4565 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.catalog.server.channel.rmi;
//JDK imports
import java.rmi.Remote;
import java.rmi.RemoteException;
import java.util.List;
import java.util.Properties;
import java.util.Set;
//OODT imports
import org.apache.oodt.cas.catalog.mapping.IngestMapper;
import org.apache.oodt.cas.catalog.metadata.TransactionalMetadata;
import org.apache.oodt.cas.catalog.page.QueryPager;
import org.apache.oodt.cas.catalog.query.QueryExpression;
import org.apache.oodt.cas.catalog.repository.CatalogRepository;
import org.apache.oodt.cas.catalog.struct.TransactionId;
import org.apache.oodt.cas.catalog.system.Catalog;
import org.apache.oodt.cas.metadata.Metadata;
/**
*
* @author bfoster
* @version $Revision$
*
*/
public interface RmiCommunicationChannelClientInterface extends Remote {
public void setCatalogRepository(CatalogRepository catalogRepository);
public CatalogRepository getCatalogRepository() throws Exception;
public IngestMapper getIngestMapper() throws RemoteException;
public void setIngestMapper(IngestMapper ingestMapper) throws RemoteException;
public boolean isRestrictQueryPermissions() throws RemoteException;
public void setRestrictQueryPermissions(boolean restrictQueryPermissions) throws RemoteException;
public boolean isHasIngestPermissions() throws RemoteException;
public void setHasIngestPermissions(boolean restrictIngestPermissions) throws RemoteException;
public Class<? extends TransactionId<?>> getTransactionIdClass() throws RemoteException;
public void setTransactionIdClass(Class<? extends TransactionId<?>> transactionIdClass) throws RemoteException;
public void addCatalog(Catalog catalog) throws RemoteException;
public void addCatalog(Catalog catalog, boolean allowOverride) throws RemoteException;
public void removeCatalog(String catalogUrn) throws RemoteException;
public void removeCatalog(String catalogUrn, boolean preserveMapping) throws RemoteException;
public Set<Catalog> getCurrentCatalogList() throws RemoteException;
public Catalog getCatalog(String catalogUrn) throws RemoteException;
public Set<String> getCurrentCatalogIds() throws RemoteException;
public TransactionId<?> ingest(Metadata metadata) throws RemoteException;
public void delete(Metadata metadata) throws RemoteException;
public List<String> getProperty(String key) throws RemoteException;
public Properties getCalalogProperties() throws RemoteException;
public Properties getCalalogProperties(String catalogUrn) throws RemoteException;
public QueryPager query(QueryExpression queryExpression) throws RemoteException;
public QueryPager query(QueryExpression queryExpression, boolean sortResults) throws RemoteException;
public Set<TransactionalMetadata> getNextPage(QueryPager queryPager) throws RemoteException;
public Set<TransactionId<?>> getTransactionIdsForAllPages(QueryPager queryPager) throws RemoteException;
public Set<TransactionalMetadata> getAllPages(QueryPager queryPager) throws RemoteException;
public Set<TransactionalMetadata> getMetadataFromTransactionIdStrings(List<String> catalogServiceTransactionIdStrings) throws RemoteException;
public Set<TransactionalMetadata> getMetadataFromTransactionIds(List<TransactionId<?>> catalogServiceTransactionIds) throws RemoteException;
public Set<TransactionId<?>> getCatalogServiceTransactionIds(List<TransactionId<?>> catalogTransactionIds, String catalogUrn) throws RemoteException;
public TransactionId<?> getCatalogServiceTransactionId(TransactionId<?> catalogTransactionId, String catalogUrn) throws RemoteException;
public TransactionId<?> getCatalogServiceTransactionId(TransactionId<?> catalogTransactionId, String catalogUrn, boolean generateNew) throws RemoteException;
}
| apache-2.0 |
meggermo/jackrabbit-oak | oak-segment-tar/src/test/java/org/apache/jackrabbit/oak/segment/SegmentParserTest.java | 17820 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment;
import static com.google.common.base.Strings.repeat;
import static com.google.common.collect.Lists.newArrayListWithCapacity;
import static com.google.common.collect.Maps.newHashMap;
import static junitx.framework.ComparableAssert.assertEquals;
import static org.apache.jackrabbit.oak.api.Type.BINARY;
import static org.apache.jackrabbit.oak.api.Type.LONGS;
import static org.apache.jackrabbit.oak.api.Type.NAME;
import static org.apache.jackrabbit.oak.api.Type.NAMES;
import static org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState.EMPTY_NODE;
import static org.apache.jackrabbit.oak.segment.Segment.MEDIUM_LIMIT;
import static org.apache.jackrabbit.oak.segment.Segment.SMALL_LIMIT;
import static org.apache.jackrabbit.oak.segment.SegmentParser.BlobType.LONG;
import static org.apache.jackrabbit.oak.segment.SegmentParser.BlobType.MEDIUM;
import static org.apache.jackrabbit.oak.segment.SegmentParser.BlobType.SMALL;
import static org.apache.jackrabbit.oak.segment.SegmentWriterBuilder.segmentWriterBuilder;
import static org.apache.jackrabbit.oak.segment.TestUtils.newRecordId;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
import com.google.common.collect.ImmutableList;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.memory.ArrayBasedBlob;
import org.apache.jackrabbit.oak.segment.SegmentParser.BlobInfo;
import org.apache.jackrabbit.oak.segment.SegmentParser.ListInfo;
import org.apache.jackrabbit.oak.segment.SegmentParser.MapInfo;
import org.apache.jackrabbit.oak.segment.SegmentParser.NodeInfo;
import org.apache.jackrabbit.oak.segment.SegmentParser.ValueInfo;
import org.apache.jackrabbit.oak.segment.memory.MemoryStore;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
public class SegmentParserTest {
private MemoryStore store;
private SegmentWriter writer;
private static class TestParser extends SegmentParser {
private final String name;
private TestParser(SegmentReader reader, String name) {
super(reader);
this.name = name;
}
private void throwUOE(String method) {
throw new UnsupportedOperationException(name + " must not call " + method);
}
@Override
protected void onNode(RecordId parentId, RecordId nodeId) {
throwUOE("onNode");
}
@Override
protected void onTemplate(RecordId parentId, RecordId templateId) {
throwUOE("onTemplate");
}
@Override
protected void onMap(RecordId parentId, RecordId mapId, MapRecord map) {
throwUOE("onMap");
}
@Override
protected void onMapDiff(RecordId parentId, RecordId mapId, MapRecord map) {
throwUOE("onMapDiff");
}
@Override
protected void onMapLeaf(RecordId parentId, RecordId mapId, MapRecord map) {
throwUOE("onMapLeaf");
}
@Override
protected void onMapBranch(RecordId parentId, RecordId mapId, MapRecord map) {
throwUOE("onMapBranch");
}
@Override
protected void onProperty(RecordId parentId, RecordId propertyId, PropertyTemplate template) {
throwUOE("onProperty");
}
@Override
protected void onValue(RecordId parentId, RecordId valueId, Type<?> type) {
throwUOE("onValue");
}
@Override
protected void onBlob(RecordId parentId, RecordId blobId) {
throwUOE("onBlob");
}
@Override
protected void onString(RecordId parentId, RecordId stringId) {
throwUOE("onString");
}
@Override
protected void onList(RecordId parentId, RecordId listId, int count) {
throwUOE("onList");
}
@Override
protected void onListBucket(RecordId parentId, RecordId listId, int index, int count, int capacity) {
throwUOE("onListBucket");
}
}
@Before
public void setup() throws IOException {
store = new MemoryStore();
writer = segmentWriterBuilder("").build(store);
}
@Test
public void emptyNode() throws IOException {
SegmentNodeState node = writer.writeNode(EMPTY_NODE);
NodeInfo info = new TestParser(store.getReader(), "emptyNode") {
@Override protected void onTemplate(RecordId parentId, RecordId templateId) { }
}.parseNode(node.getRecordId());
assertEquals(node.getRecordId(), info.nodeId);
assertEquals(0, info.nodeCount);
assertEquals(0, info.propertyCount);
assertEquals(info.nodeId.toString10(), info.stableId);
}
@Test
public void singleChildNode() throws IOException {
NodeBuilder builder = EMPTY_NODE.builder();
builder.setChildNode("child");
SegmentNodeState node = writer.writeNode(builder.getNodeState());
NodeInfo info = new TestParser(store.getReader(), "singleChildNode") {
@Override protected void onNode(RecordId parentId, RecordId nodeId) { }
@Override protected void onTemplate(RecordId parentId, RecordId templateId) { }
}.parseNode(node.getRecordId());
assertEquals(node.getRecordId(), info.nodeId);
assertEquals(1, info.nodeCount);
assertEquals(0, info.propertyCount);
assertEquals(info.nodeId.toString10(), info.stableId);
}
@Test
public void node() throws IOException {
final NodeBuilder builder = EMPTY_NODE.builder();
builder.setChildNode("one");
builder.setChildNode("two");
builder.setProperty("three", 42);
SegmentNodeState node = writer.writeNode(builder.getNodeState());
NodeInfo info = new TestParser(store.getReader(), "node") {
@Override protected void onNode(RecordId parentId, RecordId nodeId) { }
@Override protected void onTemplate(RecordId parentId, RecordId templateId) { }
@Override protected void onMap(RecordId parentId, RecordId mapId, MapRecord map) { }
@Override protected void onProperty(RecordId parentId, RecordId propertyId, PropertyTemplate template) { }
@Override protected void onList(RecordId parentId, RecordId listId, int count) { }
}.parseNode(node.getRecordId());
assertEquals(node.getRecordId(), info.nodeId);
assertEquals(2, info.nodeCount);
assertEquals(1, info.propertyCount);
assertEquals(info.nodeId.toString10(), info.stableId);
}
@Test
public void template() throws IOException {
NodeBuilder builder = EMPTY_NODE.builder();
builder.setChildNode("n");
builder.setProperty("p", 1);
builder.setProperty("jcr:primaryType", "type", NAME);
builder.setProperty("jcr:mixinTypes", ImmutableList.of("type1", "type2"), NAMES);
SegmentNodeState node = writer.writeNode(builder.getNodeState());
NodeInfo nodeInfo = new TestParser(store.getReader(), "template") {
@Override
protected void onTemplate(RecordId parentId, RecordId templateId) {
TemplateInfo info = parseTemplate(templateId);
assertEquals(templateId, info.templateId);
assertTrue(info.hasPrimaryType);
assertTrue(info.hasMixinType);
assertFalse(info.zeroChildNodes);
assertFalse(info.manyChildNodes);
assertEquals(2, info.mixinCount);
assertEquals(1, info.propertyCount);
}
@Override protected void onString(RecordId parentId, RecordId stringId) { }
@Override protected void onNode(RecordId parentId, RecordId nodeId) { }
@Override protected void onProperty(RecordId parentId, RecordId propertyId, PropertyTemplate template) { }
@Override protected void onList(RecordId parentId, RecordId listId, int count) { }
}.parseNode(node.getRecordId());
}
@Test
public void emptyMap() throws IOException {
Map<String, RecordId> empty = newHashMap();
MapRecord map = writer.writeMap(null, empty);
MapInfo mapInfo = new TestParser(store.getReader(), "emptyMap") {
@Override protected void onMapLeaf(RecordId parentId, RecordId mapId, MapRecord map) { }
}.parseMap(null, map.getRecordId(), map);
assertEquals(map.getRecordId(), mapInfo.mapId);
}
@Test
public void nonEmptyMap() throws IOException {
Random rnd = new Random();
MapRecord base = writer.writeMap(null, createMap(33, rnd));
MapRecord map = writer.writeMap(base, createMap(1, rnd));
MapInfo mapInfo = new TestParser(store.getReader(), "nonEmptyMap") {
@Override
protected void onMapDiff(RecordId parentId, RecordId mapId, MapRecord map) {
MapInfo mapInfo = parseMapDiff(mapId, map);
assertEquals(mapId, mapInfo.mapId);
}
@Override
protected void onMap(RecordId parentId, RecordId mapId, MapRecord map) {
MapInfo mapInfo = parseMap(parentId, mapId, map);
assertEquals(mapId, mapInfo.mapId);
}
@Override
protected void onMapBranch(RecordId parentId, RecordId mapId, MapRecord map) {
MapInfo mapInfo = parseMapBranch(mapId, map);
assertEquals(mapId, mapInfo.mapId);
}
@Override
protected void onMapLeaf(RecordId parentId, RecordId mapId, MapRecord map) {
MapInfo mapInfo = parseMapLeaf(mapId, map);
assertEquals(mapId, mapInfo.mapId);
}
@Override protected void onString(RecordId parentId, RecordId stringId) { }
}.parseMap(null, map.getRecordId(), map);
assertEquals(map.getRecordId(), mapInfo.mapId);
}
private Map<String, RecordId> createMap(int size, Random rnd) throws IOException {
Map<String, RecordId> map = newHashMap();
for (int k = 0; k < size; k++) {
map.put("k" + k, writer.writeString("string" + rnd.nextLong()));
}
return map;
}
@Test
public void singleValueProperty() throws IOException {
NodeBuilder builder = EMPTY_NODE.builder();
builder.setProperty("p", 1);
SegmentNodeState node = writer.writeNode(builder.getNodeState());
NodeInfo nodeInfo = new TestParser(store.getReader(), "singleValueProperty") {
@Override
protected void onProperty(RecordId parentId, RecordId propertyId, PropertyTemplate template) {
PropertyInfo propertyInfo = parseProperty(parentId, propertyId, template);
assertEquals(propertyId, propertyInfo.propertyId);
assertEquals(-1, propertyInfo.count);
}
@Override protected void onTemplate(RecordId parentId, RecordId templateId) { }
@Override protected void onValue(RecordId parentId, RecordId valueId, Type<?> type) { }
@Override protected void onList(RecordId parentId, RecordId listId, int count) { }
}.parseNode(node.getRecordId());
}
@Test
public void multiValueProperty() throws IOException {
NodeBuilder builder = EMPTY_NODE.builder();
builder.setProperty("p", ImmutableList.of(1L, 2L, 3L, 4L), LONGS);
SegmentNodeState node = writer.writeNode(builder.getNodeState());
NodeInfo nodeInfo = new TestParser(store.getReader(), "multiValueProperty") {
@Override
protected void onProperty(RecordId parentId, RecordId propertyId, PropertyTemplate template) {
PropertyInfo propertyInfo = parseProperty(parentId, propertyId, template);
assertEquals(propertyId, propertyInfo.propertyId);
assertEquals(4, propertyInfo.count);
}
@Override protected void onTemplate(RecordId parentId, RecordId templateId) { }
@Override protected void onValue(RecordId parentId, RecordId valueId, Type<?> type) { }
@Override protected void onList(RecordId parentId, RecordId listId, int count) { }
}.parseNode(node.getRecordId());
}
@Test
public void smallBlob() throws IOException {
SegmentBlob blob = writer.writeBlob(createRandomBlob(4));
ValueInfo valueInfo = new TestParser(store.getReader(), "smallBlob") {
@Override
protected void onBlob(RecordId parentId, RecordId blobId) {
BlobInfo blobInfo = parseBlob(blobId);
assertEquals(blobId, blobInfo.blobId);
assertEquals(SMALL, blobInfo.blobType);
}
}.parseValue(null, blob.getRecordId(), BINARY);
assertEquals(blob.getRecordId(), valueInfo.valueId);
assertEquals(BINARY, valueInfo.type);
}
@Test
public void mediumBlob() throws IOException {
SegmentBlob blob = writer.writeBlob(createRandomBlob(SMALL_LIMIT));
ValueInfo valueInfo = new TestParser(store.getReader(), "mediumBlob") {
@Override
protected void onBlob(RecordId parentId, RecordId blobId) {
BlobInfo blobInfo = parseBlob(blobId);
assertEquals(blobId, blobInfo.blobId);
assertEquals(MEDIUM, blobInfo.blobType);
}
}.parseValue(null, blob.getRecordId(), BINARY);
assertEquals(blob.getRecordId(), valueInfo.valueId);
assertEquals(BINARY, valueInfo.type);
}
@Test
public void longBlob() throws IOException {
SegmentBlob blob = writer.writeBlob(createRandomBlob(MEDIUM_LIMIT));
ValueInfo valueInfo = new TestParser(store.getReader(), "longBlob") {
@Override
protected void onBlob(RecordId parentId, RecordId blobId) {
BlobInfo blobInfo = parseBlob(blobId);
assertEquals(blobId, blobInfo.blobId);
assertEquals(LONG, blobInfo.blobType);
}
@Override protected void onList(RecordId parentId, RecordId listId, int count) { }
}.parseValue(null, blob.getRecordId(), BINARY);
assertEquals(blob.getRecordId(), valueInfo.valueId);
assertEquals(BINARY, valueInfo.type);
}
private static Blob createRandomBlob(int size) {
byte[] bytes = new byte[size];
new Random().nextBytes(bytes);
return new ArrayBasedBlob(bytes);
}
@Test
public void shortString() throws IOException {
RecordId stringId = writer.writeString("short");
BlobInfo blobInfo = new TestParser(store.getReader(), "shortString").parseString(stringId);
assertEquals(stringId, blobInfo.blobId);
assertEquals(SMALL, blobInfo.blobType);
}
@Test
public void mediumString() throws IOException {
RecordId stringId = writer.writeString(repeat("s", SMALL_LIMIT));
BlobInfo blobInfo = new TestParser(store.getReader(), "mediumString").parseString(stringId);
assertEquals(stringId, blobInfo.blobId);
assertEquals(MEDIUM, blobInfo.blobType);
}
@Test
public void longString() throws IOException {
RecordId stringId = writer.writeString(repeat("s", MEDIUM_LIMIT));
BlobInfo blobInfo = new TestParser(store.getReader(), "longString"){
@Override protected void onList(RecordId parentId, RecordId listId, int count) { }
}.parseString(stringId);
assertEquals(stringId, blobInfo.blobId);
assertEquals(LONG, blobInfo.blobType);
}
@Test
public void emptyList() {
RecordId listId = newRecordId(store, new Random());
ListInfo listInfo = new TestParser(store.getReader(), "emptyList").parseList(null, listId, 0);
assertEquals(listId, listInfo.listId);
assertEquals(0, listInfo.count);
}
@Test
public void nonEmptyList() throws IOException {
int count = 100000;
Random rnd = new Random();
List<RecordId> list = newArrayListWithCapacity(count);
for (int k = 0; k < count; k++) {
list.add(writer.writeString("string " + rnd.nextLong()));
}
RecordId listId = writer.writeList(list);
ListInfo listInfo = new TestParser(store.getReader(), "nonEmptyList"){
@Override
protected void onListBucket(RecordId parentId, RecordId listId, int index, int count, int capacity) {
parseListBucket(listId, index, count, capacity);
}
}.parseList(null, listId, count);
assertEquals(listId, listInfo.listId);
assertEquals(count, listInfo.count);
}
}
| apache-2.0 |
gemxd/gemfirexd-oss | gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/sql/compile/ResultColumn.java | 60459 | /*
Derby - Class com.pivotal.gemfirexd.internal.impl.sql.compile.ResultColumn
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
/*
* Changes for GemFireXD distributed data platform (some marked by "GemStone changes")
*
* Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.sql.compile;
import java.util.Vector;
// GemStone changes BEGIN
import com.pivotal.gemfirexd.internal.engine.distributed.utils.GemFireXDUtils;
import com.pivotal.gemfirexd.internal.engine.sql.catalog.ExtraTableInfo;
import com.pivotal.gemfirexd.internal.engine.sql.compile.CollectExpressionOperandsVisitor;
import com.pivotal.gemfirexd.internal.engine.store.GemFireContainer;
// GemStone changes END
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.jdbc.EngineConnection;
import com.pivotal.gemfirexd.internal.iapi.reference.ClassName;
import com.pivotal.gemfirexd.internal.iapi.reference.SQLState;
import com.pivotal.gemfirexd.internal.iapi.services.compiler.MethodBuilder;
import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager;
import com.pivotal.gemfirexd.internal.iapi.sql.ResultColumnDescriptor;
import com.pivotal.gemfirexd.internal.iapi.sql.compile.C_NodeTypes;
import com.pivotal.gemfirexd.internal.iapi.sql.compile.Visitable;
import com.pivotal.gemfirexd.internal.iapi.sql.compile.Visitor;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ColumnDescriptor;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ReferencedKeyConstraintDescriptor;
import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TableDescriptor;
import com.pivotal.gemfirexd.internal.iapi.store.access.Qualifier;
import com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor;
import com.pivotal.gemfirexd.internal.iapi.types.DataValueDescriptor;
import com.pivotal.gemfirexd.internal.iapi.types.DataValueFactory;
import com.pivotal.gemfirexd.internal.iapi.types.StringDataValue;
import com.pivotal.gemfirexd.internal.iapi.types.TypeId;
import com.pivotal.gemfirexd.internal.iapi.util.StringUtil;
import com.pivotal.gemfirexd.internal.shared.common.StoredFormatIds;
/**
* A ResultColumn represents a result column in a SELECT, INSERT, or UPDATE
* statement. In a SELECT statement, the result column just represents an
* expression in a row being returned to the client. For INSERT and UPDATE
* statements, the result column represents an column in a stored table.
* So, a ResultColumn has to be bound differently depending on the type of
* statement it appears in.
* <P>
* The type of the ResultColumn can differ from its underlying expression,
* for example in certain joins the ResultColumn can be nullable even if
* its underlying column is not. In an INSERT or UPDATE the ResultColumn
* will represent the type of the column in the table, the type of
* the underlying expresion will be the type of the source of the
* value to be insert or updated. The method columnTypeAndLengthMatch()
* can be used to detect when normalization is required between
* the expression and the tyoe of ResultColumn. This class does
* not implement any type normalization (conversion), this is
* typically handled by a NormalizeResultSetNode.
*
*/
public class ResultColumn extends ValueNode
implements ResultColumnDescriptor, Comparable
{
/* name and exposedName should point to the same string, unless there is a
* derived column list, in which case name will point to the underlying name
* and exposedName will point to the name from the derived column list.
*/
String name;
String exposedName;
String tableName;
String sourceTableName;
//Used by metadata api ResultSetMetaData.getSchemaName to get a column's table's schema.
String sourceSchemaName;
ValueNode expression;
ColumnDescriptor columnDescriptor;
boolean isGenerated;
boolean isGeneratedForUnmatchedColumnInInsert;
boolean isGroupingColumn;
boolean isReferenced;
boolean isRedundant;
boolean isNameGenerated;
boolean updated;
boolean updatableByCursor;
//GemStone changes BEGIN
boolean isExpanded;
//GemStone changes END
private boolean defaultColumn;
// tells us if this ResultColumn is a placeholder for a generated
// autoincrement value for an insert statement.
boolean autoincrementGenerated;
// tells us if this ResultColumn represents an autoincrement column in a
// base table.
boolean autoincrement;
/* ResultSetNumber for the ResultSet (at generate() time) that we belong to */
private int resultSetNumber = -1;
ColumnReference reference; // used to verify quals at bind time, if given.
/* virtualColumnId is the ResultColumn's position (1-based) within the ResultSet */
private int virtualColumnId;
/**
* Different types of initializer parameters indicate different
* types of initialization. Parameters may be:
*
* <ul>
* <li>arg1 The name of the column, if any.</li>
* <li>arg2 The expression this result column represents</li>
* </ul>
*
* <p>
* - OR -
* </p>
*
* <ul>
* <li>arg1 a column reference node</li>
* <li>arg2 The expression this result column represents</li>
* </ul>
*
* <p>
* - OR -
* </p>
*
* <ul>
* <li>arg1 The column descriptor.</li>
* <li>arg2 The expression this result column represents</li>
* </ul>
*
* <p>
* - OR -
* </p>
*
* <ul>
* <li>dtd The type of the column</li>
* <li>expression The expression this result column represents</li>
* </ul>
*/
public void init(Object arg1, Object arg2) throws StandardException
{
// RESOLVE: This is something of a hack - it is not obvious that
// the first argument being null means it should be treated as
// a String.
if ((arg1 instanceof String) || (arg1 == null))
{
this.name = (String) arg1;
this.exposedName = this.name;
this.expression = (ValueNode) arg2;
}
else if (arg1 instanceof ColumnReference)
{
ColumnReference ref = (ColumnReference) arg1;
this.name = ref.getColumnName();
this.exposedName = ref.getColumnName();
/*
when we bind, we'll want to make sure
the reference has the right table name.
*/
this.reference = ref;
this.expression = (ValueNode) arg2;
}
else if (arg1 instanceof ColumnDescriptor)
{
ColumnDescriptor coldes = (ColumnDescriptor) arg1;
this.name = coldes.getColumnName();
this.exposedName = name;
setType(coldes.getType());
this.columnDescriptor = coldes;
this.expression = (ValueNode) arg2;
this.autoincrement = coldes.isAutoincrement();
}
else
{
setType((DataTypeDescriptor) arg1);
this.expression = (ValueNode) arg2;
if (arg2 instanceof ColumnReference)
{
reference = (ColumnReference) arg2;
}
}
/* this result column represents a <default> keyword in an insert or
* update statement
*/
if (expression != null &&
expression.isInstanceOf(C_NodeTypes.DEFAULT_NODE))
defaultColumn = true;
}
/**
* Returns TRUE if the ResultColumn is standing in for a DEFAULT keyword in
* an insert/update statement.
*/
public boolean isDefaultColumn()
{
return defaultColumn;
}
public void setDefaultColumn(boolean value)
{
defaultColumn = value;
}
/**
* Return TRUE if this result column matches the provided column name.
*
* This function is used by ORDER BY column resolution. For the
* ORDER BY clause, Derby will prefer to match on the column's
* alias (exposedName), but will also successfully match on the
* underlying column name. Thus the following statements are
* treated equally:
* select name from person order by name;
* select name as person_name from person order by name;
* select name as person_name from person order by person_name;
* See DERBY-2351 for more discussion.
*/
boolean columnNameMatches(String columnName)
{
return columnName.equals(exposedName) ||
columnName.equals(name) ||
columnName.equals(getSourceColumnName());
}
/**
* Returns the underlying source column name, if this ResultColumn
* is a simple direct reference to a table column, or NULL otherwise.
*/
// Gemstone changes BEGIN
// increase accessibility from package to public
public String getSourceColumnName()
// GemStone changes END
{
if (expression instanceof ColumnReference)
return ((ColumnReference)expression).getColumnName();
return null;
}
/**
* The following methods implement the ResultColumnDescriptor
* interface. See the Language Module Interface for details.
*/
public String getName()
{
return exposedName;
}
public String getSchemaName()
{
if ((columnDescriptor!=null) &&
(columnDescriptor.getTableDescriptor() != null))
return columnDescriptor.getTableDescriptor().getSchemaName();
else
{
if (expression != null)
// REMIND: could look in reference, if set.
return expression.getSchemaName();
else
return null;
}
}
public String getTableName()
{
if (tableName != null)
{
return tableName;
}
if ((columnDescriptor!=null) &&
(columnDescriptor.getTableDescriptor() != null))
{
return columnDescriptor.getTableDescriptor().getName();
}
else
{
return expression.getTableName();
}
}
/**
* @see ResultColumnDescriptor#getSourceTableName
*/
public String getSourceTableName()
{
return sourceTableName;
}
/**
* @see ResultColumnDescriptor#getSourceSchemaName
*/
public String getSourceSchemaName()
{
return sourceSchemaName;
}
/**
* Clear the table name for the underlying ColumnReference.
* See UpdateNode.scrubResultColumns() for full explaination.
*/
public void clearTableName()
{
if (expression instanceof ColumnReference)
{
((ColumnReference) expression).setTableNameNode((TableName) null);
}
}
public DataTypeDescriptor getType()
{
return getTypeServices();
}
public int getColumnPosition()
{
if (columnDescriptor!=null)
return columnDescriptor.getPosition();
else
return virtualColumnId;
}
/**
* Set the expression in this ResultColumn. This is useful in those
* cases where you don't know the expression in advance, like for
* INSERT statements with column lists, where the column list and
* SELECT or VALUES clause are parsed separately, and then have to
* be hooked up.
*
* @param expression The expression to be set in this ResultColumn
*/
public void setExpression(ValueNode expression)
{
this.expression = expression;
}
/**
* Get the expression in this ResultColumn.
*
* @return ValueNode this.expression
*/
public ValueNode getExpression()
{
return expression;
}
/**
* Set the expression to a null node of the
* correct type.
*
* @exception StandardException Thrown on error
*/
void setExpressionToNullNode()
throws StandardException
{
expression = getNullNode(getTypeServices());
}
/**
* Set the name in this ResultColumn. This is useful when you don't
* know the name at the time you create the ResultColumn, for example,
* in an insert-select statement, where you want the names of the
* result columns to match the table being inserted into, not the
* table they came from.
*
* @param name The name to set in this ResultColumn
*/
public void setName(String name)
{
if (this.name == null)
{
this.name = name;
}
else {
if (SanityManager.DEBUG)
SanityManager.ASSERT(reference == null ||
name.equals(reference.getColumnName()),
"don't change name from reference name");
}
this.exposedName = name;
}
/**
* Is the name for this ResultColumn generated?
*/
public boolean isNameGenerated()
{
return isNameGenerated;
}
/**
* Set that this result column name is generated.
*/
public void setNameGenerated(boolean value)
{
isNameGenerated = value;
}
/**
* Set the resultSetNumber for this ResultColumn. This is the
* resultSetNumber for the ResultSet that we belong to. This
* is useful for generate() and necessary since we do not have a
* back pointer to the RSN.
*
* @param resultSetNumber The resultSetNumber.
*/
public void setResultSetNumber(int resultSetNumber)
{
this.resultSetNumber = resultSetNumber;
}
/**
* For NCJ Usage
*
* @see com.pivotal.gemfirexd.internal.impl.sql.compile.ResultColumn.setResultSetNumber(int)
*/
public void ncjSetResultSetNumber(int resultSetNumber)
{
setResultSetNumber(resultSetNumber);
if (this.expression != null && this.expression instanceof VirtualColumnNode) {
((VirtualColumnNode)this.expression).ncjSetResultSetNumber(resultSetNumber);
}
}
/**
* Get the resultSetNumber for this ResultColumn.
*
* @return int The resultSetNumber.
*/
public int getResultSetNumber()
{
return resultSetNumber;
}
/**
* Adjust the virtualColumnId for this ResultColumn by the specified amount
*
* @param adjust The adjustment for the virtualColumnId
*/
public void adjustVirtualColumnId(int adjust)
{
virtualColumnId += adjust;
}
/**
* Set the virtualColumnId for this ResultColumn
*
* @param id The virtualColumnId for this ResultColumn
*/
public void setVirtualColumnId(int id)
{
virtualColumnId = id;
}
/**
* Get the virtualColumnId for this ResultColumn
*
* @return virtualColumnId for this ResultColumn
*/
public int getVirtualColumnId()
{
return virtualColumnId;
}
/**
* Adjust this virtualColumnId to account for the removal of a column
*
* This routine is called when bind processing finds and removes
* duplicate columns in the result list which were pulled up due to their
* presence in the ORDER BY clause, but were later found to be duplicate.
*
* If this column is a virtual column, and if this column's virtual
* column id is greater than the column id which is being removed, then
* we must logically shift this column to the left by decrementing its
* virtual column id.
*
* @param removedColumnId id of the column being removed.
*/
public void collapseVirtualColumnIdGap(int removedColumnId)
{
if (columnDescriptor == null && virtualColumnId > removedColumnId)
virtualColumnId--;
}
/**
* Generate a unique (across the entire statement) column name for unnamed
* ResultColumns
*
* @exception StandardException Thrown on error
*/
public void guaranteeColumnName() throws StandardException
{
if (exposedName == null)
{
/* Unions may also need generated names, if both sides name don't match */
exposedName ="SQLCol" + getCompilerContext().getNextColumnNumber();
isNameGenerated = true;
}
}
/**
* Convert this object to a String. See comments in QueryTreeNode.java
* for how this should be done for tree printing.
*
* @return This object as a String
*/
public String toString()
{
if (SanityManager.DEBUG)
{
return "exposedName: " + exposedName + "\n" +
"name: " + name + "\n" +
"tableName: " + tableName + "\n" +
"isNameGenerated: " + isNameGenerated + "\n" +
"sourceTableName: " + sourceTableName + "\n" +
"type: " + getTypeServices() + "\n" +
"columnDescriptor: " + columnDescriptor + "\n" +
"isGenerated: " + isGenerated + "\n" +
"isGeneratedForUnmatchedColumnInInsert: " + isGeneratedForUnmatchedColumnInInsert + "\n" +
"isGroupingColumn: " + isGroupingColumn + "\n" +
"isReferenced: " + isReferenced + "\n" +
"isRedundant: " + isRedundant + "\n" +
"virtualColumnId: " + virtualColumnId + "\n" +
"resultSetNumber: " + resultSetNumber + "\n" +
// GemStone changes BEGIN
"expression: " + (expression != null
? (expression.getClass().getName() + '@'
+ Integer.toHexString(System
.identityHashCode(this.expression)))
: "(null)") + '\n' +
// GemStone changes END
super.toString();
}
else
{
return "";
}
}
/**
* Prints the sub-nodes of this object. See QueryTreeNode.java for
* how tree printing is supposed to work.
*
* @param depth The depth of this node in the tree
*/
public void printSubNodes(int depth)
{
if (SanityManager.DEBUG)
{
super.printSubNodes(depth);
if (expression != null)
{
printLabel(depth, "expression: ");
expression.treePrint(depth + 1);
}
if (reference != null)
{
printLabel(depth, "reference: ");
reference.treePrint(depth + 1);
}
}
}
/**
* Bind this expression. This means binding the sub-expressions.
* In this case, we figure out what the result type of this result
* column is when we call one of the bindResultColumn*() methods.
* The reason is that there are different ways of binding the
* result columns depending on the statement type, and this is
* a standard interface that does not take the statement type as
* a parameter.
*
* @param fromList The FROM list for the query this
* expression is in, for binding columns.
* @param subqueryList The subquery list being built as we find SubqueryNodes
* @param aggregateVector The aggregate vector being built as we find AggregateNodes
*
* @return The new top of the expression tree.
*
* @exception StandardException Thrown on error
*/
public ValueNode bindExpression(FromList fromList, SubqueryList subqueryList,
Vector aggregateVector)
throws StandardException
{
/*
** Set the type of a parameter to the type of the result column.
** Don't do it if this result column doesn't have a type yet.
** This can happen if the parameter is part of a table constructor.
*/
if (expression.requiresTypeFromContext() )
{
if (getTypeServices() != null)
{
expression.setType(getTypeServices());
}
}
expression = expression.bindExpression(fromList, subqueryList,
aggregateVector);
if (expression instanceof ColumnReference)
{
autoincrement = ((ColumnReference)expression).getSource().isAutoincrement();
}
return this;
}
/**
* Bind this result column by ordinal position and set the VirtualColumnId.
* This is useful for INSERT statements like "insert into t values (1, 2, 3)",
* where the user did not specify a column list.
* If a columnDescriptor is not found for a given position, then
* the user has specified more values than the # of columns in
* the table and an exception is thrown.
*
* NOTE: We must set the VirtualColumnId here because INSERT does not
* construct the ResultColumnList in the usual way.
*
* @param tableDescriptor The descriptor for the table being
* inserted into
* @param columnId The ordinal position of the column
* in the table, starting at 1.
*
* @exception StandardException Thrown on error
*/
void bindResultColumnByPosition(TableDescriptor tableDescriptor,
int columnId)
throws StandardException
{
ColumnDescriptor columnDescriptor;
columnDescriptor = tableDescriptor.getColumnDescriptor(columnId);
if (columnDescriptor == null)
{
String errorString;
String schemaName;
errorString = "";
schemaName = tableDescriptor.getSchemaName();
if (schemaName != null)
errorString += schemaName + ".";
errorString += tableDescriptor.getName();
throw StandardException.newException(SQLState.LANG_TOO_MANY_RESULT_COLUMNS, errorString);
}
setColumnDescriptor(tableDescriptor, columnDescriptor);
setVirtualColumnId(columnId);
}
/**
* Bind this result column by its name and set the VirtualColumnId.
* This is useful for update statements, and for INSERT statements
* like "insert into t (a, b, c) values (1, 2, 3)" where the user
* specified a column list.
* An exception is thrown when a columnDescriptor cannot be found for a
* given name. (There is no column with that name.)
*
* NOTE: We must set the VirtualColumnId here because INSERT does not
* construct the ResultColumnList in the usual way.
*
* @param tableDescriptor The descriptor for the table being
* updated or inserted into
* @param columnId The ordinal position of the column
* in the table, starting at 1. (Used to
* set the VirtualColumnId.)
*
* @exception StandardException Thrown on error
*/
public void bindResultColumnByName(TableDescriptor tableDescriptor,
int columnId)
throws StandardException
{
ColumnDescriptor columnDescriptor;
columnDescriptor = tableDescriptor.getColumnDescriptor(exposedName);
if (columnDescriptor == null)
{
String errorString;
String schemaName;
errorString = "";
schemaName = tableDescriptor.getSchemaName();
if (schemaName != null)
errorString += schemaName + ".";
errorString += tableDescriptor.getName();
throw StandardException.newException(SQLState.LANG_COLUMN_NOT_FOUND_IN_TABLE, exposedName, errorString);
}
setColumnDescriptor(tableDescriptor, columnDescriptor);
setVirtualColumnId(columnId);
if (isPrivilegeCollectionRequired())
getCompilerContext().addRequiredColumnPriv( columnDescriptor);
}
/**
* Change an untyped null to a typed null.
*
* @exception StandardException Thrown on error
*/
public void typeUntypedNullExpression( ResultColumn bindingRC)
throws StandardException
{
TypeId typeId = bindingRC.getTypeId();
/* This is where we catch null in a VALUES clause outside
* of INSERT VALUES()
*/
if (typeId == null)
{
throw StandardException.newException(SQLState.LANG_NULL_IN_VALUES_CLAUSE);
}
if( expression instanceof UntypedNullConstantNode)
//since we don't know the type of such a constant node, we just
//use the default values for collation type and derivation.
//eg insert into table1 values(1,null)
//When this method is executed for the sql above, we don't know
//the type of the null at this point.
expression = getNullNode(bindingRC.getTypeServices());
else if( ( expression instanceof ColumnReference) && expression.getTypeServices() == null)
{
// The expression must be a reference to a null column in a values table.
expression.setType( bindingRC.getType());
}
}
/**
* Set the column descriptor for this result column. It also gets
* the data type services from the column descriptor and stores it in
* this result column: this is redundant, but we have to store the result
* type here for SELECT statements, and it is more orthogonal if the type
* can be found here regardless of what type of statement it is.
*
* @param tableDescriptor The TableDescriptor for the table
* being updated or inserted into.
* This parameter is used only for
* error reporting.
* @param columnDescriptor The ColumnDescriptor to set in
* this ResultColumn.
*
* @exception StandardException tableNameMismatch
*/
void setColumnDescriptor(TableDescriptor tableDescriptor,
ColumnDescriptor columnDescriptor) throws StandardException
{
/* Callers are responsible for verifying that the column exists */
if (SanityManager.DEBUG)
SanityManager.ASSERT(columnDescriptor != null,
"Caller is responsible for verifying that column exists");
setType(columnDescriptor.getType());
this.columnDescriptor = columnDescriptor;
/*
If the node was created using a reference, the table name
of the reference must agree with that of the tabledescriptor.
*/
if (reference != null && reference.getTableName() != null)
{
if (! tableDescriptor.getName().equals(
reference.getTableName()) )
{
/* REMIND: need to have schema name comparison someday as well...
** left out for now, lots of null checking needed...
** || ! tableDescriptor.getSchemaName().equals(
** reference.getTableNameNode().getSchemaName())) {
*/
String realName = tableDescriptor.getName();
String refName = reference.getTableName();
throw StandardException.newException(SQLState.LANG_TABLE_NAME_MISMATCH,
realName, refName);
}
}
}
/**
* Bind the result column to the expression that lives under it.
* All this does is copy the datatype information to this node.
* This is useful for SELECT statements, where the result type
* of each column is the type of the column's expression.
*
* @exception StandardException Thrown on error
*/
public void bindResultColumnToExpression()
throws StandardException
{
/*
** This gets the same DataTypeServices object as
** is used in the expression. It is probably not
** necessary to clone the object here.
*/
setType(expression.getTypeServices());
if (expression instanceof ColumnReference)
{
ColumnReference cr = (ColumnReference) expression;
tableName = cr.getTableName();
sourceTableName = cr.getSourceTableName();
sourceSchemaName = cr.getSourceSchemaName();
}
}
/**
* Preprocess an expression tree. We do a number of transformations
* here (including subqueries, IN lists, LIKE and BETWEEN) plus
* subquery flattening.
* NOTE: This is done before the outer ResultSetNode is preprocessed.
*
* @param numTables Number of tables in the DML Statement
* @param outerFromList FromList from outer query block
* @param outerSubqueryList SubqueryList from outer query block
* @param outerPredicateList PredicateList from outer query block
*
* @return The modified expression
*
* @exception StandardException Thrown on error
*/
public ValueNode preprocess(int numTables,
FromList outerFromList,
SubqueryList outerSubqueryList,
PredicateList outerPredicateList)
throws StandardException
{
if (expression == null)
return this;
expression = expression.preprocess(numTables, outerFromList,
outerSubqueryList,
outerPredicateList);
return this;
}
/**
This verifies that the expression is storable into the result column.
It checks versus the given ResultColumn.
This method should not be called until the result column and
expression both have a valid type, i.e. after they are bound
appropriately. Its use is for statements like insert, that need to
verify if a given value can be stored into a column.
@exception StandardException thrown if types not suitable.
*/
public void checkStorableExpression(ResultColumn toStore)
throws StandardException
{
checkStorableExpression((ValueNode) toStore);
}
private void checkStorableExpression(ValueNode source)
throws StandardException
{
TypeId toStoreTypeId = source.getTypeId();
if (!getTypeCompiler().storable(toStoreTypeId, getClassFactory()))
{
throw StandardException.newException(SQLState.LANG_NOT_STORABLE,
getTypeId().getSQLTypeName(),
toStoreTypeId.getSQLTypeName() ,
"column: " + (this.exposedName != null ? this.exposedName
: getSourceColumnName()) + ", table: " + (this.tableName
!= null ? this.tableName : getTableName())
+ ", schema: " + getSchemaName() /* GemStoneAddition */);
}
}
/**
This verifies that the expression is storable into the result column.
It checks versus the expression under this ResultColumn.
This method should not be called until the result column and
expression both have a valid type, i.e. after they are bound
appropriately. Its use is for statements like update, that need to
verify if a given value can be stored into a column.
@exception StandardException thrown if types not suitable.
*/
public void checkStorableExpression()
throws StandardException
{
checkStorableExpression(getExpression());
}
/**
* Do code generation for a result column. This consists of doing the code
* generation for the underlying expression.
*
* @param ecb The ExpressionClassBuilder for the class we're generating
* @param mb The method the expression will go into
*
*
* @exception StandardException Thrown on error
*/
public void generateExpression(ExpressionClassBuilder ecb,
MethodBuilder mb)
throws StandardException
{
// GemStone changes BEGIN
// set the current column name in activation for exception
// messages if required
if (!(this.expression instanceof ColumnReference)) {
mb.pushThis();
if (this.exposedName != null) {
mb.push(this.exposedName); // arg one
}
else {
String colName = getSourceColumnName();
if (colName != null) {
mb.push(colName);
}
else {
mb.pushNull("java.lang.String");
}
}
mb.putField(ClassName.BaseActivation,
"currentColumnName", "java.lang.String");
mb.endStatement();
}
// GemStone changes END
expression.generateExpression(ecb, mb);
}
/**
* Do code generation to return a Null of the appropriate type
* for the result column.
Requires the getCOlumnExpress value pushed onto the stack
*
* @param acb The ActivationClassBuilder for the class we're generating
* @param eb The ExpressionBlock that the generate code is to go into
* @param getColumnExpression "fieldx.getColumn(y)"
*
* @exception StandardException Thrown on error
*/
/*PUSHCOMPILE
public void generateNulls(ExpressionClassBuilder acb,
MethodBuilder mb,
Expression getColumnExpress)
throws StandardException
{
acb.pushDataValueFactory(mb);
getTypeCompiler().generateNull(mb, acb.getBaseClassName());
mb.cast(ClassName.DataValueDescriptor);
return eb.newCastExpression(
ClassName.DataValueDescriptor,
getTypeCompiler().
generateNull(
eb,
acb.getBaseClassName(),
acb.getDataValueFactory(eb),
getColumnExpress));
}
*/
/**
Generate the code to create a column the same shape and
size as this ResultColumn.
Used in ResultColumnList.generateHolder().
@exception StandardException thrown on failure
*/
public void generateHolder(ExpressionClassBuilder acb,
MethodBuilder mb)
throws StandardException
{
// generate expression of the form
// (DataValueDescriptor) columnSpace
acb.generateNull(mb, getTypeCompiler(), getTypeServices().getCollationType());
mb.upCast(ClassName.DataValueDescriptor);
}
/**
** Check whether the column length and type of this result column
** match the expression under the columns. This is useful for
** INSERT and UPDATE statements. For SELECT statements this method
** should always return true. There is no need to call this for a
** DELETE statement.
**
** @return true means the column matches its expressions,
** false means it doesn't match.
*/
boolean columnTypeAndLengthMatch()
throws StandardException
{
/*
** We can never make any assumptions about
** parameters. So don't even bother in this
** case.
*/
if (getExpression().requiresTypeFromContext() && !getExpression().isParameterizedConstantNode())
{
return false;
}
// Are we inserting/updating an XML column? If so, we always
// return false so that normalization will occur. We have to
// do this because there are different "kinds" of XML values
// and we need to make sure they match--but we don't know
// the "kind" until execution time. See the "normalize"
// method in com.pivotal.gemfirexd.internal.iapi.types.XML for more.
if (getTypeId().isXMLTypeId())
return false;
DataTypeDescriptor expressionType = getExpression().getTypeServices();
if (!getTypeServices().isExactTypeAndLengthMatch(expressionType))
return false;
/* Is the source nullable and the target non-nullable? */
if ((! getTypeServices().isNullable()) && expressionType.isNullable())
{
return false;
}
return true;
}
boolean columnTypeAndLengthMatch(ResultColumn otherColumn)
throws StandardException
{
ValueNode otherExpression = otherColumn.getExpression();
DataTypeDescriptor resultColumnType = getTypeServices();
DataTypeDescriptor otherResultColumnType = otherColumn.getTypeServices();
if (SanityManager.DEBUG)
{
// GemStone changes BEGIN
if (resultColumnType == null)
// GemStone changes END
SanityManager.ASSERT(resultColumnType != null,
"Type is null for column " + this);
// GemStone changes BEGIN
if (otherResultColumnType == null)
// GemStone changes END
SanityManager.ASSERT(otherResultColumnType != null,
"Type is null for column " + otherColumn);
}
/*
** We can never make any assumptions about
** parameters. So don't even bother in this
** case.
*/
if ((otherExpression != null) && (otherExpression.requiresTypeFromContext() ||
expression.requiresTypeFromContext() ))
{
return false;
}
// Are we inserting/updating an XML column? If so, we always
// return false so that normalization will occur. We have to
// do this because there are different "kinds" of XML values
// and we need to make sure they match--but we don't know
// the "kind" until execution time. See the "normalize"
// method in com.pivotal.gemfirexd.internal.iapi.types.XML for more.
if (resultColumnType.getTypeId().isXMLTypeId())
return false;
/* Are they the same type? */
if ( ! resultColumnType.getTypeId().equals(
otherResultColumnType.getTypeId()
)
)
{
/* If the source is a constant of a different type then
* we try to convert that constant to a constant of our
* type. (The initial implementation only does the conversion
* to string types because the most common problem is a char
* constant with a varchar column.)
* NOTE: We do not attempt any conversion here if the source
* is a string type and the target is not or vice versa in
* order to avoid problems with implicit varchar conversions.
* Anyway, we will check if the "converted" constant has the
* same type as the original constant. If not, then the conversion
* happened. In that case, we will reuse the ConstantNode, for simplicity,
* and reset the type to match the desired type.
*/
if (otherExpression instanceof ConstantNode)
{
ConstantNode constant = (ConstantNode)otherColumn.getExpression();
DataValueDescriptor oldValue = constant.getValue();
DataValueDescriptor newValue = convertConstant(
resultColumnType.getTypeId(),
resultColumnType.getMaximumWidth(),
oldValue);
if ((oldValue != newValue) &&
(oldValue instanceof StringDataValue ==
newValue instanceof StringDataValue))
{
constant.setValue(newValue);
constant.setType(getTypeServices());
otherColumn.bindResultColumnToExpression();
otherResultColumnType = otherColumn.getType();
}
//If we are dealing with StringDataValue, then make sure we
//have correct collation type and derivaiton set and the value
//represented by collation is either SQLxxx or CollatorSQLxxx
//depending on the collation type.
if (newValue instanceof StringDataValue)
{
constant.setCollationInfo(resultColumnType);
DataValueFactory dvf = getDataValueFactory();
newValue = ((StringDataValue)newValue).getValue(dvf.getCharacterCollator(
constant.getTypeServices().getCollationType()));
constant.setValue(newValue);
}
}
if ( ! resultColumnType.getTypeId().equals(
otherResultColumnType.getTypeId()
)
)
{
return false;
}
}
/* Are they the same precision? */
if (resultColumnType.getPrecision() !=
otherResultColumnType.getPrecision())
{
return false;
}
/* Are they the same scale? */
if (resultColumnType.getScale() != otherResultColumnType.getScale())
{
return false;
}
/* Are they the same width? */
if (resultColumnType.getMaximumWidth() !=
otherResultColumnType.getMaximumWidth())
{
return false;
}
/* Is the source nullable and the target non-nullable?
* The source is nullable if it is nullable or if the target is generated
* for an unmatched column in an insert with a column list.
* This additional check is needed because when we generate any additional
* source RCs for an insert with a column list the generated RCs for any
* non-specified columns get the type info from the column. Thus,
* for t1(non_nullable, nullable)
* insert into t2 (nullable) values 1;
* RCType.isNullable() returns false for the generated source RC for
* non_nullable. In this case, we want to see it as
*/
if ((! resultColumnType.isNullable()) &&
(otherResultColumnType.isNullable() ||
otherColumn.isGeneratedForUnmatchedColumnInInsert()))
{
return false;
}
return true;
}
/**
* Is this a generated column?
*
* @return Boolean - whether or not this column is a generated column.
*/
public boolean isGenerated()
{
return (isGenerated == true);
}
/**
* Is this columm generated for an unmatched column in an insert?
*
* @return Boolean - whether or not this columm was generated for an unmatched column in an insert.
*/
public boolean isGeneratedForUnmatchedColumnInInsert()
{
return (isGeneratedForUnmatchedColumnInInsert == true);
}
/**
* Mark this a columm as a generated column
*/
public void markGenerated()
{
isGenerated = true;
/* A generated column is a referenced column */
isReferenced = true;
}
/**
* Mark this a columm as generated for an unmatched column in an insert
*/
public void markGeneratedForUnmatchedColumnInInsert()
{
isGeneratedForUnmatchedColumnInInsert = true;
/* A generated column is a referenced column */
isReferenced = true;
}
/**
* Is this a referenced column?
*
* @return Boolean - whether or not this column is a referenced column.
*/
public boolean isReferenced()
{
return isReferenced;
}
/**
* Mark this column as a referenced column.
*/
public void setReferenced()
{
isReferenced = true;
}
/**
* Mark this column as a referenced column if it is already marked as referenced or if any result column in
* its chain of virtual columns is marked as referenced.
*/
void pullVirtualIsReferenced()
{
if( isReferenced())
return;
for( ValueNode expr = expression; expr != null && (expr instanceof VirtualColumnNode);)
{
VirtualColumnNode vcn = (VirtualColumnNode) expr;
ResultColumn src = vcn.getSourceColumn();
if( src.isReferenced())
{
setReferenced();
return;
}
expr = src.getExpression();
}
} // end of pullVirtualIsReferenced
/**
* Mark this column as an unreferenced column.
*/
public void setUnreferenced()
{
isReferenced = false;
}
/**
* Mark this RC and all RCs in the underlying
* RC/VCN chain as referenced.
*/
void markAllRCsInChainReferenced()
{
setReferenced();
ValueNode vn = expression;
while (vn instanceof VirtualColumnNode)
{
VirtualColumnNode vcn = (VirtualColumnNode) vn;
ResultColumn rc = vcn.getSourceColumn();
rc.setReferenced();
vn = rc.getExpression();
}
}
/**
* Is this a redundant ResultColumn?
*
* @return Boolean - whether or not this RC is redundant.
*/
public boolean isRedundant()
{
return isRedundant;
}
/**
* Mark this ResultColumn as redundant.
*/
public void setRedundant()
{
isRedundant = true;
}
/**
* Mark this ResultColumn as a grouping column in the SELECT list
*/
public void markAsGroupingColumn()
{
isGroupingColumn = true;
}
/**
* Look for and reject ?/-?/+? parameter under this ResultColumn. This is
* called for SELECT statements.
*
* @exception StandardException Thrown if a ?/-?/+? parameter was found
* directly under this ResultColumn.
*/
void rejectParameter() throws StandardException
{
if ((expression != null) && (expression.isParameterNode()))
throw StandardException.newException(SQLState.LANG_PARAM_IN_SELECT_LIST);
}
/*
** The following methods implement the Comparable interface.
*/
public int compareTo(Object other)
{
ResultColumn otherResultColumn = (ResultColumn) other;
return this.getColumnPosition() - otherResultColumn.getColumnPosition();
}
/**
* Mark this column as being updated by an update statemment.
*/
void markUpdated()
{
updated = true;
}
/**
* Mark this column as being updatable, so we can make sure it is in the
* "for update" list of a positioned update.
*/
void markUpdatableByCursor()
{
updatableByCursor = true;
}
// GemStone changes BEGIN
public final String getActualName() {
return this.name;
}
/**
* Tell whether this column is being updated.
*
* @return true means this column is being updated.
*/
public boolean updated()
{
return updated;
}
/**
* Returns number of primary key columns if this column is part
* of the primary key else returns a value <= zero.
*
* Used by the ADO.NET driver.
*/
public final short primaryKey(final EngineConnection conn)
throws StandardException {
if (this.primaryKey == -1) {
if (this.isGenerated) {
this.primaryKey = 0;
}
else if (!com.pivotal.gemfirexd.internal.impl.sql.catalog
.GfxdDataDictionary.isSystemSchema(sourceSchemaName)) {
final GemFireContainer container = GemFireXDUtils
.getGemFireContainer(this.sourceSchemaName,
this.sourceTableName, this.lcc);
this.primaryKey = checkPrimaryKey(container.getExtraTableInfo(),
this.name, getSourceColumnName());
}
else if (this.columnDescriptor != null) {
TableDescriptor td = this.columnDescriptor.getTableDescriptor();
if (td != null) {
this.primaryKey = checkPrimaryKey(td,
this.columnDescriptor.getPosition());
}
}
}
return this.primaryKey;
}
public static short checkPrimaryKey(TableDescriptor td,
int columnPosition) throws StandardException {
short numPrimaryKeys = 0;
ReferencedKeyConstraintDescriptor rkcd = td.getPrimaryKey();
final int[] pkColumns;
if (rkcd != null
&& (pkColumns = rkcd.getReferencedColumns()) != null) {
for (int pkColumn : pkColumns) {
if (columnPosition == pkColumn) {
// we don't expect number of PKs to ever exceed short
numPrimaryKeys = (short)pkColumns.length;
break;
}
}
}
return numPrimaryKeys;
}
public static short checkPrimaryKey(ExtraTableInfo tableInfo,
String columnName, String sourceColumnName) {
short numPrimaryKeys = 0;
final String[] colNames;
if (tableInfo != null && (colNames = tableInfo
.getPrimaryKeyColumnNames()) != null) {
for (String colName : colNames) {
if (colName.equals(columnName) || (sourceColumnName != null
&& colName.equals(sourceColumnName))) {
// we don't expect number of PKs to ever exceed short
numPrimaryKeys = (short)colNames.length;
break;
}
}
}
return numPrimaryKeys;
}
private short primaryKey = -1;
// GemStone changes END
/**
* Tell whether this column is updatable by a positioned update.
*
* @return true means this column is updatable
*/
public boolean updatableByCursor()
{
return updatableByCursor;
}
/**
* @see QueryTreeNode#disablePrivilegeCollection
*/
public void disablePrivilegeCollection()
{
super.disablePrivilegeCollection();
if (expression != null)
expression.disablePrivilegeCollection();
}
/**
* Make a copy of this ResultColumn in a new ResultColumn
*
* @return A new ResultColumn with the same contents as this one
*
* @exception StandardException Thrown on error
*/
public ResultColumn cloneMe() throws StandardException
{
ResultColumn newResultColumn;
ValueNode cloneExpr;
/* If expression is a ColumnReference, then we want to
* have the RC's clone have a clone of the ColumnReference
* for it's expression. This is for the special case of
* cloning the SELECT list for the HAVING clause in the parser.
* The SELECT generated for the HAVING needs its own copy
* of the ColumnReferences.
*/
if (expression instanceof ColumnReference)
{
cloneExpr = ((ColumnReference) expression).getClone();
}
else
{
cloneExpr = expression;
}
/* If a columnDescriptor exists, then we must propagate it */
if (columnDescriptor != null)
{
newResultColumn = (ResultColumn) getNodeFactory().getNode(
C_NodeTypes.RESULT_COLUMN,
columnDescriptor,
expression,
getContextManager());
newResultColumn.setExpression(cloneExpr);
}
else
{
newResultColumn = (ResultColumn) getNodeFactory().getNode(
C_NodeTypes.RESULT_COLUMN,
getName(),
cloneExpr,
getContextManager());
}
/* Set the VirtualColumnId and name in the new node */
newResultColumn.setVirtualColumnId(getVirtualColumnId());
/* Set the type and name information in the new node */
newResultColumn.setName(getName());
newResultColumn.setType(getTypeServices());
newResultColumn.setNameGenerated(isNameGenerated());
/* Set the "is generated for unmatched column in insert" status in the new node
This if for bug 4194*/
if (isGeneratedForUnmatchedColumnInInsert())
newResultColumn.markGeneratedForUnmatchedColumnInInsert();
/* Set the "is referenced" status in the new node */
if (isReferenced())
newResultColumn.setReferenced();
/* Set the "updated" status in the new node */
if (updated())
newResultColumn.markUpdated();
/* Setthe "updatable by cursor" status in the new node */
if (updatableByCursor())
newResultColumn.markUpdatableByCursor();
if (isAutoincrementGenerated())
newResultColumn.setAutoincrementGenerated();
if (isAutoincrement())
newResultColumn.setAutoincrement();
if (isGroupingColumn())
newResultColumn.markAsGroupingColumn();
if (isGenerated()) {
newResultColumn.markGenerated();
}
return newResultColumn;
}
/**
* Get the maximum size of the column
*
* @return the max size
*/
public int getMaximumColumnSize()
{
return getTypeServices().getTypeId()
.getApproximateLengthInBytes(getTypeServices());
}
public DataTypeDescriptor getTypeServices()
{
DataTypeDescriptor type = super.getTypeServices();
if (type != null)
return type;
if (getExpression() != null)
return getExpression().getTypeServices();
return null;
}
/**
* Return the variant type for the underlying expression.
* The variant type can be:
* VARIANT - variant within a scan
* (method calls and non-static field access)
* SCAN_INVARIANT - invariant within a scan
* (column references from outer tables)
* QUERY_INVARIANT - invariant within the life of a query
* CONSTANT - constant
*
* @return The variant type for the underlying expression.
* @exception StandardException thrown on error
*/
protected int getOrderableVariantType() throws StandardException
{
/*
** If the expression is VARIANT, then
** return VARIANT. Otherwise, we return
** CONSTANT. For result columns that are
** generating autoincrement values, the result
** is variant.
*/
int expType;
if (isAutoincrementGenerated()) {
expType = Qualifier.VARIANT;
} else if (expression != null) {
expType = expression.getOrderableVariantType();
} else {
expType = Qualifier.CONSTANT;
}
switch (expType)
{
case Qualifier.VARIANT:
return Qualifier.VARIANT;
case Qualifier.SCAN_INVARIANT:
case Qualifier.QUERY_INVARIANT:
return Qualifier.SCAN_INVARIANT;
default:
return Qualifier.CONSTANT;
}
}
/**
* Accept a visitor, and call v.visit()
* on child nodes as necessary.
*
* @param v the visitor
*
* @exception StandardException on error
*/
public Visitable accept(Visitor v)
throws StandardException
{
Visitable returnNode = v.visit(this);
if (v.skipChildren(this))
{
return returnNode;
}
if (expression != null && !v.stopTraversal())
{
expression = (ValueNode)expression.accept(v);
}
return returnNode;
}
/**
* Is this column in this array of strings?
*
* @param list the array of column names to compare
*
* @return true/false
*/
public boolean foundInList(String[] list)
{
return foundString(list, name);
}
/**
* Verify that this RC is orderable.
*
* @exception StandardException Thrown on error
*/
void verifyOrderable() throws StandardException
{
/*
* Do not check to see if we can map user types
* to built-in types. The ability to do so does
* not mean that ordering will work. In fact,
* as of version 2.0, ordering does not work on
* user types.
*/
if (!getTypeId().orderable(getClassFactory()))
{
throw StandardException.newException(SQLState.LANG_COLUMN_NOT_ORDERABLE_DURING_EXECUTION,
getTypeId().getSQLTypeName());
}
}
// Gemtone changes BEGIN
//Asif: Increase the visibility
/**
If this ResultColumn is bound to a column in a table
get the column descriptor for the column in the table.
Otherwise return null.
*/
public final ColumnDescriptor getTableColumnDescriptor() {return columnDescriptor;}
// Gemtone changes END
/**
* Returns true if this result column is a placeholder for a generated
* autoincrement value.
*/
public boolean isAutoincrementGenerated()
{
return autoincrementGenerated;
}
public void setAutoincrementGenerated()
{
autoincrementGenerated = true;
}
public void resetAutoincrementGenerated()
{
autoincrementGenerated = false;
}
public boolean isAutoincrement()
{
return autoincrement;
}
public void setAutoincrement()
{
autoincrement = true;
}
public boolean isGroupingColumn()
{
return isGroupingColumn;
}
/**
* @exception StandardException Thrown on error
*/
private DataValueDescriptor convertConstant(TypeId toTypeId, int maxWidth,
DataValueDescriptor constantValue)
throws StandardException
{
int formatId = toTypeId.getTypeFormatId();
DataValueFactory dvf = getDataValueFactory();
switch (formatId)
{
default:
case StoredFormatIds.CHAR_TYPE_ID:
return constantValue;
case StoredFormatIds.VARCHAR_TYPE_ID:
String sourceValue = constantValue.getString();
int sourceWidth = sourceValue.length();
int posn;
/*
** If the input is already the right length, no normalization is
** necessary - just return the source.
**
*/
if (sourceWidth <= maxWidth)
{
if(formatId == StoredFormatIds.VARCHAR_TYPE_ID)
return dvf.getVarcharDataValue(sourceValue);
}
/*
** Check whether any non-blank characters will be truncated.
*/
for (posn = maxWidth; posn < sourceWidth; posn++)
{
if (sourceValue.charAt(posn) != ' ')
{
String typeName = null;
if (formatId == StoredFormatIds.VARCHAR_TYPE_ID)
typeName = TypeId.VARCHAR_NAME;
throw StandardException.newException(SQLState.LANG_STRING_TRUNCATION,
typeName,
StringUtil.formatForPrint(sourceValue),
String.valueOf(maxWidth));
}
}
if (formatId == StoredFormatIds.VARCHAR_TYPE_ID)
return dvf.getVarcharDataValue(sourceValue.substring(0, maxWidth));
case StoredFormatIds.LONGVARCHAR_TYPE_ID:
//No need to check widths here (unlike varchar), since no max width
return dvf.getLongvarcharDataValue(constantValue.getString());
}
}
public TableName getTableNameObject() {
return null;
}
/* Get the wrapped reference if any */
public ColumnReference getReference() { return reference; }
/**
* Get the source BaseColumnNode for this result column. The
* BaseColumnNode cannot be found unless the ResultColumn is bound
* and is a simple reference to a column in a BaseFromTable.
*
* @return a BaseColumnNode,
* or null if a BaseColumnNode cannot be found
*/
public BaseColumnNode getBaseColumnNode() {
ValueNode vn = expression;
while (true) {
if (vn instanceof ResultColumn) {
vn = ((ResultColumn) vn).expression;
} else if (vn instanceof ColumnReference) {
vn = ((ColumnReference) vn).getSource();
} else if (vn instanceof VirtualColumnNode) {
vn = ((VirtualColumnNode) vn).getSourceColumn();
} else if (vn instanceof BaseColumnNode) {
return (BaseColumnNode) vn;
} else {
return null;
}
}
}
/**
* Search the tree beneath this ResultColumn until we find
* the number of the table to which this RC points, and
* return that table number. If we can't determine which
* table this RC is for, then return -1.
*
* There are two places we can find the table number: 1) if
* our expression is a ColumnReference, then we can get the
* target table number from the ColumnReference and that's
* it; 2) if expression is a VirtualColumnNode, then if
* the VirtualColumnNode points to a FromBaseTable, we can
* get that FBT's table number; otherwise, we walk the
* VirtualColumnNode-ResultColumn chain and do a recursive
* search.
*
* @return The number of the table to which this ResultColumn
* points, or -1 if we can't determine that from where we are.
*/
public int getTableNumber()
throws StandardException
{
if (expression instanceof ColumnReference)
return ((ColumnReference)expression).getTableNumber();
else if (expression instanceof VirtualColumnNode)
{
VirtualColumnNode vcn = (VirtualColumnNode)expression;
// If the VCN points to a FromBaseTable, just get that
// table's number.
if (vcn.getSourceResultSet() instanceof FromBaseTable)
{
return ((FromBaseTable)vcn.getSourceResultSet()).
getTableNumber();
}
// Else recurse down the VCN.
return vcn.getSourceColumn().getTableNumber();
}
// We can get here if expression has neither a column
// reference nor a FromBaseTable beneath it--for example,
// if it is of type BaseColumnNode.
return -1;
}
protected boolean isEquivalent(ValueNode o) throws StandardException
{
if (o.getNodeType() == getNodeType())
{
ResultColumn other = (ResultColumn)o;
if (expression != null) {
return expression.isEquivalent(other.expression);
}
}
return false;
}
/**
* Check whether this ResultColumn immediate expression is a window function
* column or not.
*
* @return true if RCs expression is a window function column, false if not.
*/
public boolean expressionIsWindowFunction()
{
if (getExpression() instanceof WindowFunctionColumnNode){
return true;
}
return false;
}
/**
* Check whether this ResultColumn is a window function column or not, but
* do not traverse the complete chain of references.
*
* @return true if RC is a window function column, false if not.
*/
public boolean isWindowFunction()
{
ValueNode expr = getExpression();
if (expr instanceof WindowFunctionColumnNode ||
(expr instanceof VirtualColumnNode &&
expr.getSourceResultColumn().getExpression() instanceof WindowFunctionColumnNode)){
return true;
}
return false;
}
// GemStone changes BEGIN
/**
* UnMark this a columm as a generated column
* but retain the reference part.
*/
public void unmarkGenerated()
{
isGenerated = false;
}
/**
* Mark this RC's expression is expanded
* to its operands.
*/
public void markExpanded() {
isExpanded = true;
}
/**
* This indicates whether underlying expression
* of the RC got expanded or not in some phase
* of RC processing.
*
* @see CollectExpressionOperandsVisitor
* @return boolean
*/
public boolean isExpanded() {
return isExpanded;
}
@Override
public ValueNode genExpressionOperands(ResultColumnList outerResultColumns
,ResultColumn parentRC
,boolean remapToNew)
throws StandardException
{
expression = expression.genExpressionOperands(outerResultColumns
,parentRC
,remapToNew);
return this;
}
public ResultColumn getChildResultColumn() {
ValueNode vn = expression;
while(true) {
if( vn instanceof ColumnReference) {
vn = ((ColumnReference) vn).getSource();
}
else if( vn instanceof VirtualColumnNode) {
vn = ((VirtualColumnNode) vn).getSourceColumn();
}
else if( vn instanceof ResultColumn) {
return (ResultColumn)vn;
}
else {
return null;
}
}
}
public boolean hasExpression() {
return expression.hasExpression();
}
/**
* {@inheritDoc}
*/
@Override
public String findSourceSchemaName() {
TableDescriptor td;
if (sourceSchemaName != null) {
return sourceSchemaName;
}
else if (columnDescriptor != null
&& (td = columnDescriptor.getTableDescriptor()) != null) {
return td.getSchemaName();
}
else if (expression != null) {
return expression.findSourceSchemaName();
}
else {
return null;
}
}
/**
* {@inheritDoc}
*/
@Override
public String findSourceTableName() {
TableDescriptor td;
if (sourceTableName != null) {
return sourceTableName;
}
if (columnDescriptor != null
&& (td = columnDescriptor.getTableDescriptor()) != null) {
return td.getName();
}
else if (expression != null) {
return expression.findSourceTableName();
}
else {
return tableName;
}
}
/**
* {@inheritDoc}
*/
@Override
public String findSourceColumnName() {
if (expression != null) {
return expression.findSourceColumnName();
}
else if (this.name != null) {
return this.name;
}
else {
return getName();
}
}
// GemStone changes END
}
| apache-2.0 |
11xor6/presto | plugin/trino-raptor-legacy/src/main/java/io/trino/plugin/raptor/legacy/metadata/H2ShardDao.java | 1840 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.raptor.legacy.metadata;
import io.trino.plugin.raptor.legacy.util.UuidUtil.UuidArgumentFactory;
import io.trino.plugin.raptor.legacy.util.UuidUtil.UuidMapperFactory;
import org.skife.jdbi.v2.sqlobject.Bind;
import org.skife.jdbi.v2.sqlobject.SqlBatch;
import org.skife.jdbi.v2.sqlobject.SqlUpdate;
import org.skife.jdbi.v2.sqlobject.customizers.RegisterArgumentFactory;
import org.skife.jdbi.v2.sqlobject.customizers.RegisterMapperFactory;
import java.sql.Timestamp;
import java.util.UUID;
@RegisterArgumentFactory(UuidArgumentFactory.class)
@RegisterMapperFactory(UuidMapperFactory.class)
public interface H2ShardDao
extends ShardDao
{
@Override
@SqlBatch("MERGE INTO deleted_shards (shard_uuid, delete_time)\n" +
"VALUES (:shardUuid, CURRENT_TIMESTAMP)")
void insertDeletedShards(@Bind("shardUuid") Iterable<UUID> shardUuids);
@Override
@SqlUpdate("DELETE FROM transactions\n" +
"WHERE end_time < :maxEndTime\n" +
" AND successful IN (TRUE, FALSE)\n" +
" AND transaction_id NOT IN (SELECT transaction_id FROM created_shards)\n" +
"LIMIT " + CLEANUP_TRANSACTIONS_BATCH_SIZE)
int deleteOldCompletedTransactions(@Bind("maxEndTime") Timestamp maxEndTime);
}
| apache-2.0 |
osct/IDPPublic | src/java/it/infn/ct/security/entities/UserConfirmUpdate.java | 2152 | /***********************************************************************
* Copyright (c) 2011:
* Istituto Nazionale di Fisica Nucleare (INFN), Italy
* Consorzio COMETA (COMETA), Italy
*
* See http://www.infn.it and and http://www.consorzio-cometa.it for details on
* the copyright holders.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***********************************************************************/
package it.infn.ct.security.entities;
import java.io.Serializable;
import java.util.Date;
/**
*
* @author Marco Fargetta <marco.fargetta@ct.infn.it>
*/
public class UserConfirmUpdate implements Serializable{
private static final long serialVersionUID = 1L;
private String username;
private Date timelimit;
private boolean updated;
private long id;
public UserConfirmUpdate() {
}
public UserConfirmUpdate(String username, Date timelimit, boolean updated) {
this.username = username;
this.timelimit = timelimit;
this.updated = updated;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getUsername() {
return username;
}
public void setUsername(String username) {
this.username = username;
}
public Date getTimelimit() {
return timelimit;
}
public void setTimelimit(Date timelimit) {
this.timelimit = timelimit;
}
public boolean isUpdated() {
return updated;
}
public void setUpdated(boolean updated) {
this.updated = updated;
}
}
| apache-2.0 |
jamiepg1/jetty.project | jetty-spdy/spdy-core/src/test/java/org/eclipse/jetty/spdy/frames/GoAwayGenerateParseTest.java | 3373 | /*
* Copyright (c) 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eclipse.jetty.spdy.frames;
import java.nio.ByteBuffer;
import org.eclipse.jetty.spdy.StandardByteBufferPool;
import org.eclipse.jetty.spdy.StandardCompressionFactory;
import org.eclipse.jetty.spdy.api.SPDY;
import org.eclipse.jetty.spdy.generator.Generator;
import org.eclipse.jetty.spdy.parser.Parser;
import org.junit.Assert;
import org.junit.Test;
public class GoAwayGenerateParseTest
{
@Test
public void testGenerateParse() throws Exception
{
int lastStreamId = 13;
int statusCode = 1;
GoAwayFrame frame1 = new GoAwayFrame(SPDY.V3, lastStreamId, statusCode);
Generator generator = new Generator(new StandardByteBufferPool(), new StandardCompressionFactory().newCompressor());
ByteBuffer buffer = generator.control(frame1);
Assert.assertNotNull(buffer);
TestSPDYParserListener listener = new TestSPDYParserListener();
Parser parser = new Parser(new StandardCompressionFactory().newDecompressor());
parser.addListener(listener);
parser.parse(buffer);
ControlFrame frame2 = listener.getControlFrame();
Assert.assertNotNull(frame2);
Assert.assertEquals(ControlFrameType.GO_AWAY, frame2.getType());
GoAwayFrame goAway = (GoAwayFrame)frame2;
Assert.assertEquals(SPDY.V3, goAway.getVersion());
Assert.assertEquals(lastStreamId, goAway.getLastStreamId());
Assert.assertEquals(0, goAway.getFlags());
Assert.assertEquals(statusCode, goAway.getStatusCode());
}
@Test
public void testGenerateParseOneByteAtATime() throws Exception
{
int lastStreamId = 13;
int statusCode = 1;
GoAwayFrame frame1 = new GoAwayFrame(SPDY.V3, lastStreamId, statusCode);
Generator generator = new Generator(new StandardByteBufferPool(), new StandardCompressionFactory().newCompressor());
ByteBuffer buffer = generator.control(frame1);
Assert.assertNotNull(buffer);
TestSPDYParserListener listener = new TestSPDYParserListener();
Parser parser = new Parser(new StandardCompressionFactory().newDecompressor());
parser.addListener(listener);
while (buffer.hasRemaining())
parser.parse(ByteBuffer.wrap(new byte[]{buffer.get()}));
ControlFrame frame2 = listener.getControlFrame();
Assert.assertNotNull(frame2);
Assert.assertEquals(ControlFrameType.GO_AWAY, frame2.getType());
GoAwayFrame goAway = (GoAwayFrame)frame2;
Assert.assertEquals(SPDY.V3, goAway.getVersion());
Assert.assertEquals(lastStreamId, goAway.getLastStreamId());
Assert.assertEquals(0, goAway.getFlags());
Assert.assertEquals(statusCode, goAway.getStatusCode());
}
}
| apache-2.0 |
smarthi/DataflowJavaSDK | sdk/src/main/java/com/google/cloud/dataflow/sdk/runners/worker/GroupingShuffleReader.java | 15532 | /*******************************************************************************
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
******************************************************************************/
package com.google.cloud.dataflow.sdk.runners.worker;
import static com.google.api.client.util.Preconditions.checkNotNull;
import static com.google.cloud.dataflow.sdk.runners.worker.SourceTranslationUtils.cloudPositionToReaderPosition;
import static com.google.cloud.dataflow.sdk.runners.worker.SourceTranslationUtils.cloudProgressToReaderProgress;
import static com.google.cloud.dataflow.sdk.runners.worker.SourceTranslationUtils.splitRequestToApproximateProgress;
import static com.google.cloud.dataflow.sdk.util.common.Counter.AggregationKind.SUM;
import com.google.api.client.util.Preconditions;
import com.google.api.services.dataflow.model.ApproximateProgress;
import com.google.cloud.dataflow.sdk.coders.Coder;
import com.google.cloud.dataflow.sdk.coders.IterableCoder;
import com.google.cloud.dataflow.sdk.coders.KvCoder;
import com.google.cloud.dataflow.sdk.options.PipelineOptions;
import com.google.cloud.dataflow.sdk.util.BatchModeExecutionContext;
import com.google.cloud.dataflow.sdk.util.CoderUtils;
import com.google.cloud.dataflow.sdk.util.WindowedValue;
import com.google.cloud.dataflow.sdk.util.WindowedValue.WindowedValueCoder;
import com.google.cloud.dataflow.sdk.util.common.Counter;
import com.google.cloud.dataflow.sdk.util.common.CounterSet;
import com.google.cloud.dataflow.sdk.util.common.Reiterable;
import com.google.cloud.dataflow.sdk.util.common.Reiterator;
import com.google.cloud.dataflow.sdk.util.common.worker.AbstractBoundedReaderIterator;
import com.google.cloud.dataflow.sdk.util.common.worker.BatchingShuffleEntryReader;
import com.google.cloud.dataflow.sdk.util.common.worker.GroupingShuffleEntryIterator;
import com.google.cloud.dataflow.sdk.util.common.worker.KeyGroupedShuffleEntries;
import com.google.cloud.dataflow.sdk.util.common.worker.Reader;
import com.google.cloud.dataflow.sdk.util.common.worker.ShuffleEntry;
import com.google.cloud.dataflow.sdk.util.common.worker.ShuffleEntryReader;
import com.google.cloud.dataflow.sdk.util.common.worker.StateSampler;
import com.google.cloud.dataflow.sdk.values.KV;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Iterator;
import javax.annotation.Nullable;
/**
* A source that reads from a shuffled dataset and yields key-grouped data.
*
* @param <K> the type of the keys read from the shuffle
* @param <V> the type of the values read from the shuffle
*/
public class GroupingShuffleReader<K, V> extends Reader<WindowedValue<KV<K, Reiterable<V>>>> {
private static final Logger LOG = LoggerFactory.getLogger(GroupingShuffleReader.class);
public static final String SOURCE_NAME = "GroupingShuffleSource";
final byte[] shuffleReaderConfig;
@Nullable final String startShufflePosition;
@Nullable final String stopShufflePosition;
final BatchModeExecutionContext executionContext;
@Nullable final CounterSet.AddCounterMutator addCounterMutator;
@Nullable final String operationName;
// Counts how many bytes were from by a given operation from a given shuffle session.
@Nullable Counter<Long> perOperationPerDatasetBytesCounter;
Coder<K> keyCoder;
Coder<V> valueCoder;
public GroupingShuffleReader(
PipelineOptions options,
byte[] shuffleReaderConfig,
@Nullable String startShufflePosition,
@Nullable String stopShufflePosition,
Coder<WindowedValue<KV<K, Iterable<V>>>> coder,
BatchModeExecutionContext executionContext,
CounterSet.AddCounterMutator addCounterMutator,
String operationName)
throws Exception {
this.shuffleReaderConfig = shuffleReaderConfig;
this.startShufflePosition = startShufflePosition;
this.stopShufflePosition = stopShufflePosition;
this.executionContext = executionContext;
this.addCounterMutator = addCounterMutator;
this.operationName = operationName;
initCoder(coder);
// We cannot initialize perOperationPerDatasetBytesCounter here, as it
// depends on shuffleReaderConfig, which isn't populated yet.
}
private synchronized void initCounter(String datasetId) {
if (perOperationPerDatasetBytesCounter == null
&& addCounterMutator != null
&& operationName != null) {
perOperationPerDatasetBytesCounter =
addCounterMutator.addCounter(
Counter.longs(
"dax-shuffle-" + datasetId + "-wf-" + operationName + "-read-bytes",
SUM));
}
}
@Override
public ReaderIterator<WindowedValue<KV<K, Reiterable<V>>>> iterator() throws IOException {
Preconditions.checkArgument(shuffleReaderConfig != null);
ApplianceShuffleReader asr = new ApplianceShuffleReader(shuffleReaderConfig);
String datasetId = asr.getDatasetId();
initCounter(datasetId);
return iterator(new BatchingShuffleEntryReader(
new ChunkingShuffleBatchReader(asr)));
}
private void initCoder(Coder<WindowedValue<KV<K, Iterable<V>>>> coder) throws Exception {
if (!(coder instanceof WindowedValueCoder)) {
throw new Exception("unexpected kind of coder for WindowedValue: " + coder);
}
Coder<KV<K, Iterable<V>>> elemCoder =
((WindowedValueCoder<KV<K, Iterable<V>>>) coder).getValueCoder();
if (!(elemCoder instanceof KvCoder)) {
throw new Exception("unexpected kind of coder for elements read from "
+ "a key-grouping shuffle: " + elemCoder);
}
@SuppressWarnings("unchecked")
KvCoder<K, Iterable<V>> kvCoder = (KvCoder<K, Iterable<V>>) elemCoder;
this.keyCoder = kvCoder.getKeyCoder();
Coder<Iterable<V>> kvValueCoder = kvCoder.getValueCoder();
if (!(kvValueCoder instanceof IterableCoder)) {
throw new Exception("unexpected kind of coder for values of KVs read from "
+ "a key-grouping shuffle");
}
IterableCoder<V> iterCoder = (IterableCoder<V>) kvValueCoder;
this.valueCoder = iterCoder.getElemCoder();
}
final ReaderIterator<WindowedValue<KV<K, Reiterable<V>>>> iterator(ShuffleEntryReader reader) {
return new GroupingShuffleReaderIterator(reader);
}
/**
* A ReaderIterator that reads from a ShuffleEntryReader and groups
* all the values with the same key.
*
* <p>A key limitation of this implementation is that all iterator accesses
* must by externally synchronized (the iterator objects are not individually
* thread-safe, and the iterators derived from a single original iterator
* access shared state that is not thread-safe).
*
* <p>To access the current position, the iterator must advance
* on-demand and cache the next batch of key grouped shuffle
* entries. The iterator does not advance a second time in @next()
* to avoid asking the underlying iterator to advance to the next
* key before the caller/user iterates over the values corresponding
* to the current key, which would introduce a performance
* penalty.
*/
private final class GroupingShuffleReaderIterator
extends AbstractBoundedReaderIterator<WindowedValue<KV<K, Reiterable<V>>>> {
// N.B. This class is *not* static; it uses the keyCoder, valueCoder, and
// executionContext from its enclosing GroupingShuffleReader.
/** The iterator over shuffle entries, grouped by common key. */
private final Iterator<KeyGroupedShuffleEntries> groups;
private final GroupingShuffleRangeTracker rangeTracker;
private ByteArrayShufflePosition lastGroupStart;
/** The next group to be consumed, if available. */
private KeyGroupedShuffleEntries currentGroup = null;
protected StateSampler stateSampler = null;
protected int readState;
public GroupingShuffleReaderIterator(ShuffleEntryReader reader) {
if (GroupingShuffleReader.this.stateSampler == null) {
CounterSet counterSet = new CounterSet();
this.stateSampler = new StateSampler("local", counterSet.getAddCounterMutator());
this.readState = stateSampler.stateForName("shuffle");
} else {
checkNotNull(GroupingShuffleReader.this.stateSamplerOperationName);
this.stateSampler = GroupingShuffleReader.this.stateSampler;
this.readState = stateSampler.stateForName(
GroupingShuffleReader.this.stateSamplerOperationName + "-process");
}
this.rangeTracker =
new GroupingShuffleRangeTracker(
ByteArrayShufflePosition.fromBase64(startShufflePosition),
ByteArrayShufflePosition.fromBase64(stopShufflePosition));
try (StateSampler.ScopedState read = stateSampler.scopedState(readState)) {
this.groups =
new GroupingShuffleEntryIterator(
reader.read(rangeTracker.getStartPosition(), rangeTracker.getStopPosition())) {
@Override
protected void notifyElementRead(long byteSize) {
if (GroupingShuffleReader.this.perOperationPerDatasetBytesCounter != null) {
GroupingShuffleReader.this.perOperationPerDatasetBytesCounter.addValue(byteSize);
}
GroupingShuffleReader.this.notifyElementRead(byteSize);
}
};
}
}
@Override
protected boolean hasNextImpl() throws IOException {
try (StateSampler.ScopedState read = stateSampler.scopedState(readState)) {
if (!groups.hasNext()) {
return false;
}
currentGroup = groups.next();
}
ByteArrayShufflePosition groupStart = ByteArrayShufflePosition.of(currentGroup.position);
boolean isAtSplitPoint = (lastGroupStart == null) || (!groupStart.equals(lastGroupStart));
lastGroupStart = groupStart;
return rangeTracker.tryReturnRecordAt(isAtSplitPoint, groupStart);
}
@Override
protected WindowedValue<KV<K, Reiterable<V>>> nextImpl() throws IOException {
K key = CoderUtils.decodeFromByteArray(keyCoder, currentGroup.key);
if (executionContext != null) {
executionContext.setKey(key);
}
KeyGroupedShuffleEntries group = currentGroup;
currentGroup = null;
return WindowedValue.valueInEmptyWindows(
KV.<K, Reiterable<V>>of(key, new ValuesIterable(group.values)));
}
/**
* Returns the position before the next {@code KV<K, Reiterable<V>>} to be returned by the
* {@link GroupingShuffleReaderIterator}. Returns null if the
* {@link GroupingShuffleReaderIterator} is finished.
*/
@Override
public Progress getProgress() {
com.google.api.services.dataflow.model.Position position =
new com.google.api.services.dataflow.model.Position();
ApproximateProgress progress = new ApproximateProgress();
ByteArrayShufflePosition groupStart = rangeTracker.getLastGroupStart();
if (groupStart != null) {
position.setShufflePosition(groupStart.encodeBase64());
progress.setPosition(position);
}
return cloudProgressToReaderProgress(progress);
}
/**
* Updates the stop position of the shuffle source to the position proposed. Ignores the
* proposed stop position if it is smaller than or equal to the position before the next
* {@code KV<K, Reiterable<V>>} to be returned by the {@link GroupingShuffleReaderIterator}.
*/
@Override
public DynamicSplitResult requestDynamicSplit(DynamicSplitRequest splitRequest) {
checkNotNull(splitRequest);
ApproximateProgress splitProgress = splitRequestToApproximateProgress(
splitRequest);
com.google.api.services.dataflow.model.Position splitPosition = splitProgress.getPosition();
if (splitPosition == null) {
LOG.warn("GroupingShuffleReader only supports split at a Position. Requested: {}",
splitRequest);
return null;
}
String splitShufflePosition = splitPosition.getShufflePosition();
if (splitShufflePosition == null) {
LOG.warn("GroupingShuffleReader only supports split at a shuffle position. Requested: {}",
splitPosition);
return null;
}
ByteArrayShufflePosition newStopPosition =
ByteArrayShufflePosition.fromBase64(splitShufflePosition);
if (rangeTracker.trySplitAtPosition(newStopPosition)) {
LOG.info(
"Split GroupingShuffleReader at {}, now {}",
newStopPosition.encodeBase64(),
rangeTracker);
return new DynamicSplitResultWithPosition(cloudPositionToReaderPosition(splitPosition));
} else {
LOG.info(
"Refused to split GroupingShuffleReader {} at {}",
rangeTracker,
newStopPosition.encodeBase64());
return null;
}
}
/**
* Provides the {@link Reiterable} used to iterate through the values part
* of a {@code KV<K, Reiterable<V>>} entry produced by a
* {@link GroupingShuffleReader}.
*/
private final class ValuesIterable implements Reiterable<V> {
// N.B. This class is *not* static; it uses the valueCoder from
// its enclosing GroupingShuffleReader.
private final Reiterable<ShuffleEntry> base;
public ValuesIterable(Reiterable<ShuffleEntry> base) {
this.base = checkNotNull(base);
}
@Override
public ValuesIterator iterator() {
return new ValuesIterator(base.iterator());
}
}
/**
* Provides the {@link Reiterator} used to iterate through the values part
* of a {@code KV<K, Reiterable<V>>} entry produced by a
* {@link GroupingShuffleReader}.
*/
private final class ValuesIterator implements Reiterator<V> {
// N.B. This class is *not* static; it uses the valueCoder from
// its enclosing GroupingShuffleReader.
private final Reiterator<ShuffleEntry> base;
public ValuesIterator(Reiterator<ShuffleEntry> base) {
this.base = checkNotNull(base);
}
@Override
public boolean hasNext() {
try (StateSampler.ScopedState read =
GroupingShuffleReaderIterator.this.stateSampler.scopedState(
GroupingShuffleReaderIterator.this.readState)) {
return base.hasNext();
}
}
@Override
public V next() {
try (StateSampler.ScopedState read =
GroupingShuffleReaderIterator.this.stateSampler.scopedState(
GroupingShuffleReaderIterator.this.readState)) {
ShuffleEntry entry = base.next();
try {
return CoderUtils.decodeFromByteArray(valueCoder, entry.getValue());
} catch (IOException exn) {
throw new RuntimeException(exn);
}
}
}
@Override
public void remove() {
base.remove();
}
@Override
public ValuesIterator copy() {
return new ValuesIterator(base.copy());
}
}
}
}
| apache-2.0 |
pcmoritz/arrow | java/flight/src/main/java/org/apache/arrow/flight/FlightEndpoint.java | 1935 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.flight;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.arrow.flight.impl.Flight;
import com.google.common.collect.ImmutableList;
public class FlightEndpoint {
private List<Location> locations;
private Ticket ticket;
public FlightEndpoint(Ticket ticket, Location... locations) {
super();
this.locations = ImmutableList.copyOf(locations);
this.ticket = ticket;
}
public FlightEndpoint(Flight.FlightEndpoint flt) {
locations = flt.getLocationList().stream()
.map(t -> new Location(t)).collect(Collectors.toList());
ticket = new Ticket(flt.getTicket());
}
public List<Location> getLocations() {
return locations;
}
public Ticket getTicket() {
return ticket;
}
Flight.FlightEndpoint toProtocol() {
Flight.FlightEndpoint.Builder b = Flight.FlightEndpoint.newBuilder()
.setTicket(ticket.toProtocol());
for (Location l : locations) {
b.addLocation(Flight.Location.newBuilder()
.setHost(l.getHost())
.setPort(l.getPort())
.build());
}
return b.build();
}
}
| apache-2.0 |
jinghaomiao/apollo | modules/planning/open_space/trajectory_smoother/dual_variable_warm_start_ipopt_interface_test.cc | 3994 | /******************************************************************************
* Copyright 2018 The Apollo Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*****************************************************************************/
/**
* @file
**/
#include "modules/planning/open_space/trajectory_smoother/dual_variable_warm_start_ipopt_interface.h"
#include "cyber/common/file.h"
#include "gtest/gtest.h"
#include "modules/planning/common/planning_gflags.h"
namespace apollo {
namespace planning {
class DualVariableWarmStartIPOPTInterfaceTest : public ::testing::Test {
public:
virtual void SetUp() {
FLAGS_planner_open_space_config_filename =
"/apollo/modules/planning/testdata/conf/"
"open_space_standard_parking_lot.pb.txt";
ACHECK(apollo::cyber::common::GetProtoFromFile(
FLAGS_planner_open_space_config_filename, &planner_open_space_config_))
<< "Failed to load open space config file "
<< FLAGS_planner_open_space_config_filename;
ProblemSetup();
}
protected:
void ProblemSetup();
protected:
size_t horizon_ = 5;
size_t obstacles_num_ = 10;
double ts_ = 0.01;
Eigen::MatrixXd ego_ = Eigen::MatrixXd::Ones(4, 1);
Eigen::MatrixXd last_time_u_ = Eigen::MatrixXd::Zero(2, 1);
Eigen::MatrixXi obstacles_edges_num_;
Eigen::MatrixXd obstacles_A_ = Eigen::MatrixXd::Ones(10, 2);
Eigen::MatrixXd obstacles_b_ = Eigen::MatrixXd::Ones(10, 1);
int num_of_variables_ = 0;
double rx_ = 0.0;
double ry_ = 0.0;
double r_yaw_ = 0.0;
int num_of_constraints_ = 0;
std::unique_ptr<DualVariableWarmStartIPOPTInterface> ptop_ = nullptr;
PlannerOpenSpaceConfig planner_open_space_config_;
};
void DualVariableWarmStartIPOPTInterfaceTest::ProblemSetup() {
obstacles_edges_num_ = 4 * Eigen::MatrixXi::Ones(obstacles_num_, 1);
Eigen::MatrixXd xWS = Eigen::MatrixXd::Ones(4, horizon_ + 1);
ptop_.reset(new DualVariableWarmStartIPOPTInterface(
horizon_, ts_, ego_, obstacles_edges_num_, obstacles_num_, obstacles_A_,
obstacles_b_, xWS, planner_open_space_config_));
}
TEST_F(DualVariableWarmStartIPOPTInterfaceTest, initilization) {
EXPECT_NE(ptop_, nullptr);
}
TEST_F(DualVariableWarmStartIPOPTInterfaceTest, get_bounds_info) {
int kNumOfVariables = 540;
int kNumOfConstraints = 240;
double x_l[kNumOfVariables];
double x_u[kNumOfVariables];
double g_l[kNumOfConstraints];
double g_u[kNumOfConstraints];
bool res = ptop_->get_bounds_info(kNumOfVariables, x_l, x_u,
kNumOfConstraints, g_l, g_u);
EXPECT_TRUE(res);
}
TEST_F(DualVariableWarmStartIPOPTInterfaceTest, get_starting_point) {
int kNumOfVariables = 540;
int kNumOfConstraints = 240;
bool init_x = true;
bool init_z = false;
bool init_lambda = false;
double x[kNumOfVariables];
double z_L[kNumOfVariables];
double z_U[kNumOfVariables];
double lambda[kNumOfVariables];
bool res =
ptop_->get_starting_point(kNumOfVariables, init_x, x, init_z, z_L, z_U,
kNumOfConstraints, init_lambda, lambda);
EXPECT_TRUE(res);
}
TEST_F(DualVariableWarmStartIPOPTInterfaceTest, eval_f) {
int kNumOfVariables = 540;
double obj_value;
double x[kNumOfVariables];
std::fill_n(x, kNumOfVariables, 1.2);
bool res = ptop_->eval_f(kNumOfVariables, x, true, obj_value);
EXPECT_DOUBLE_EQ(obj_value, 72.000000000000085);
EXPECT_TRUE(res);
}
} // namespace planning
} // namespace apollo
| apache-2.0 |
biospi/seamass-windeps | src/boost_1_57_0/libs/units/test/test_output.cpp | 27493 | // Boost.Units - A C++ library for zero-overhead dimensional analysis and
// unit/quantity manipulation and conversion
//
// Copyright (C) 2009 Steven Watanabe
// Copyright Paul A. Bristow 2010
//
// Distributed under the Boost Software License, Version 1.0. (See
// accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
/**
\file test_output.cpp
\brief
Test unit and quantity printing
\details
Tests for output from various units, name, symbol and raw formats, and automatic prefixing in engineering and binary units.
**/
#include <boost/units/quantity.hpp>
#include <boost/units/io.hpp>
#include <boost/units/unit.hpp>
#include <boost/units/scale.hpp>
#include <boost/units/scaled_base_unit.hpp>
#include <boost/units/make_scaled_unit.hpp>
#include <boost/units/base_unit.hpp>
#include <boost/units/make_system.hpp>
#include <boost/units/absolute.hpp>
#include <boost/units/physical_dimensions/length.hpp>
#include <boost/units/physical_dimensions/time.hpp>
#include <boost/units/physical_dimensions/velocity.hpp>
#include <boost/units/physical_dimensions/volume.hpp>
#include <boost/units/physical_dimensions/acceleration.hpp>
#include <boost/units/physical_dimensions/area.hpp>
#include <boost/regex.hpp>
#include <iostream>
#include <sstream>
#include <boost/config.hpp>
#include <limits>
#define BOOST_TEST_MAIN
#include <boost/test/unit_test.hpp>
struct meter_base_unit : boost::units::base_unit<meter_base_unit, boost::units::length_dimension, 1> {
static const char* name() { return("meter"); }
static const char* symbol() { return("m"); }
};
struct second_base_unit : boost::units::base_unit<second_base_unit, boost::units::time_dimension, 2> {
static const char* name() { return("second"); }
static const char* symbol() { return("s"); }
};
struct byte_base_unit : boost::units::base_unit<byte_base_unit, boost::units::dimensionless_type, 3> {
static const char* name() { return("byte"); }
static const char* symbol() { return("b"); }
};
typedef boost::units::make_system<meter_base_unit, second_base_unit>::type my_system;
typedef boost::units::unit<boost::units::length_dimension, my_system> length;
typedef boost::units::unit<boost::units::velocity_dimension, my_system> velocity;
typedef boost::units::make_scaled_unit<length, boost::units::scale<10, boost::units::static_rational<3> > >::type scaled_length;
typedef boost::units::make_scaled_unit<velocity, boost::units::scale<10, boost::units::static_rational<3> > >::type scaled_velocity1;
typedef boost::units::scaled_base_unit<second_base_unit, boost::units::scale<10, boost::units::static_rational<-3> > > millisecond_base_unit;
typedef boost::units::make_system<meter_base_unit, millisecond_base_unit>::type scaled_system;
typedef boost::units::unit<boost::units::time_dimension, scaled_system> scaled_time;
typedef boost::units::unit<boost::units::velocity_dimension, scaled_system> scaled_velocity2;
typedef boost::units::unit<boost::units::area_dimension, my_system> area;
typedef boost::units::make_scaled_unit<area, boost::units::scale<10, boost::units::static_rational<3> > >::type scaled_area;
typedef boost::units::make_scaled_unit<scaled_length, boost::units::scale<2, boost::units::static_rational<10> > >::type double_scaled_length;
typedef boost::units::scaled_base_unit<meter_base_unit, boost::units::scale<100, boost::units::static_rational<1> > > scaled_length_base_unit;
namespace boost {
namespace units {
template<>
struct base_unit_info<scaled_length_base_unit> {
static const char* symbol() { return("scm"); }
static const char* name() { return("scaled_meter"); }
};
}
}
typedef boost::units::scaled_base_unit<scaled_length_base_unit, boost::units::scale<10, boost::units::static_rational<3> > > double_scaled_length_base_unit;
typedef double_scaled_length_base_unit::unit_type double_scaled_length2;
typedef boost::units::reduce_unit<boost::units::unit<boost::units::volume_dimension, my_system> >::type custom1;
std::string name_string(const custom1&) { return("custom1"); }
std::string symbol_string(const custom1&) { return("c1"); }
typedef boost::units::reduce_unit<boost::units::unit<boost::units::acceleration_dimension, my_system> >::type custom2;
const char* name_string(const custom2&) { return("custom2"); }
const char* symbol_string(const custom2&) { return("c2"); }
typedef boost::units::make_scaled_unit<custom1, boost::units::scale<10, boost::units::static_rational<3> > >::type scaled_custom1;
typedef boost::units::make_scaled_unit<custom2, boost::units::scale<10, boost::units::static_rational<3> > >::type scaled_custom2;
#ifndef BOOST_NO_CWCHAR
#define BOOST_UNITS_TEST_OUTPUT(v, expected) \
{ \
std::ostringstream ss; \
ss FORMATTERS << v; \
BOOST_CHECK_EQUAL(ss.str(), expected); \
} \
{ \
std::wostringstream ss; \
ss FORMATTERS << v; \
BOOST_CHECK(ss.str() == BOOST_PP_CAT(L, expected)); \
}
#define BOOST_UNITS_TEST_OUTPUT_REGEX(v, expected) \
{ \
std::ostringstream ss; \
ss FORMATTERS << v; \
boost::regex r(expected); \
BOOST_CHECK_MESSAGE(boost::regex_match(ss.str(), r), \
ss.str() + " does not match " + expected); \
} \
{ \
std::wostringstream ss; \
ss FORMATTERS << v; \
boost::wregex r(BOOST_PP_CAT(L, expected)); \
BOOST_CHECK(boost::regex_match(ss.str(), r)); \
}
#define BOOST_UNITS_TEST_OUTPUT_DISPLAY(v) \
{ \
std::ostringstream ss; \
ss FORMATTERS << v; \
std::cout << #v << ": " << ss.str() << std::endl; \
} \
{ \
std::wostringstream ss; \
ss FORMATTERS << v; \
std::wcout << #v << ": " << ss.str() << std::endl; \
}
#else
#define BOOST_UNITS_TEST_OUTPUT(v, expected) \
{ \
std::ostringstream ss; \
ss FORMATTERS << v; \
BOOST_CHECK_EQUAL(ss.str(), expected); \
}
#define BOOST_UNITS_TEST_OUTPUT_REGEX(v, expected) \
{ \
std::ostringstream ss; \
ss FORMATTERS << v; \
boost::regex r(expected); \
BOOST_CHECK_MESSAGE(boost::regex_match(ss.str(), r), \
ss.str() + " does not match " + expected); \
}
#define BOOST_UNITS_TEST_OUTPUT_DISPLAY(v) \
{ \
std::ostringstream ss; \
ss FORMATTERS << v; \
std::cout << #v << ": " << ss.str() << std::endl; \
}
#endif
BOOST_AUTO_TEST_CASE(test_output_unit_symbol)
{ // base units using default symbol_format (no format specified) and no auto prefixing.
#define FORMATTERS
BOOST_UNITS_TEST_OUTPUT(meter_base_unit::unit_type(), "m");
BOOST_UNITS_TEST_OUTPUT(velocity(), "m s^-1");
BOOST_UNITS_TEST_OUTPUT(scaled_length(), "km");
BOOST_UNITS_TEST_OUTPUT(scaled_velocity1(), "k(m s^-1)");
BOOST_UNITS_TEST_OUTPUT(millisecond_base_unit::unit_type(), "ms");
BOOST_UNITS_TEST_OUTPUT(scaled_time(), "ms");
BOOST_UNITS_TEST_OUTPUT(scaled_velocity2(), "m ms^-1");
BOOST_UNITS_TEST_OUTPUT(area(), "m^2");
BOOST_UNITS_TEST_OUTPUT(scaled_area(), "k(m^2)");
BOOST_UNITS_TEST_OUTPUT(double_scaled_length(), "Kikm");
BOOST_UNITS_TEST_OUTPUT(double_scaled_length2(), "kscm");
BOOST_UNITS_TEST_OUTPUT(custom1(), "c1");
BOOST_UNITS_TEST_OUTPUT(custom2(), "c2");
BOOST_UNITS_TEST_OUTPUT(scaled_custom1(), "kc1");
BOOST_UNITS_TEST_OUTPUT(scaled_custom2(), "kc2");
BOOST_UNITS_TEST_OUTPUT(boost::units::absolute<meter_base_unit::unit_type>(), "absolute m");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_unit_raw)
{ // raw format specified
#define FORMATTERS << boost::units::raw_format
BOOST_UNITS_TEST_OUTPUT(meter_base_unit::unit_type(), "m");
BOOST_UNITS_TEST_OUTPUT(velocity(), "m s^-1");
BOOST_UNITS_TEST_OUTPUT(scaled_length(), "km");
BOOST_UNITS_TEST_OUTPUT(scaled_velocity1(), "k(m s^-1)");
BOOST_UNITS_TEST_OUTPUT(millisecond_base_unit::unit_type(), "ms");
BOOST_UNITS_TEST_OUTPUT(scaled_time(), "ms");
BOOST_UNITS_TEST_OUTPUT(scaled_velocity2(), "m ms^-1");
BOOST_UNITS_TEST_OUTPUT(area(), "m^2");
BOOST_UNITS_TEST_OUTPUT(scaled_area(), "k(m^2)");
BOOST_UNITS_TEST_OUTPUT(double_scaled_length(), "Kikm");
BOOST_UNITS_TEST_OUTPUT(double_scaled_length2(), "kscm");
// when using raw format, we ignore the user defined overloads
BOOST_UNITS_TEST_OUTPUT(custom1(), "m^3");
BOOST_UNITS_TEST_OUTPUT(custom2(), "m s^-2");
BOOST_UNITS_TEST_OUTPUT(scaled_custom1(), "k(m^3)");
BOOST_UNITS_TEST_OUTPUT(scaled_custom2(), "k(m s^-2)");
BOOST_UNITS_TEST_OUTPUT(boost::units::absolute<meter_base_unit::unit_type>(), "absolute m");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_unit_name)
{ // name format specified.
#define FORMATTERS << boost::units::name_format
BOOST_UNITS_TEST_OUTPUT(meter_base_unit::unit_type(), "meter");
BOOST_UNITS_TEST_OUTPUT(velocity(), "meter second^-1");
BOOST_UNITS_TEST_OUTPUT(scaled_length(), "kilometer");
BOOST_UNITS_TEST_OUTPUT(scaled_velocity1(), "kilo(meter second^-1)");
BOOST_UNITS_TEST_OUTPUT(millisecond_base_unit::unit_type(), "millisecond");
BOOST_UNITS_TEST_OUTPUT(scaled_time(), "millisecond");
BOOST_UNITS_TEST_OUTPUT(scaled_velocity2(), "meter millisecond^-1");
BOOST_UNITS_TEST_OUTPUT(area(), "meter^2");
BOOST_UNITS_TEST_OUTPUT(scaled_area(), "kilo(meter^2)");
BOOST_UNITS_TEST_OUTPUT(double_scaled_length(), "kibikilometer");
BOOST_UNITS_TEST_OUTPUT(double_scaled_length2(), "kiloscaled_meter");
BOOST_UNITS_TEST_OUTPUT(custom1(), "custom1");
BOOST_UNITS_TEST_OUTPUT(custom2(), "custom2");
BOOST_UNITS_TEST_OUTPUT(scaled_custom1(), "kilocustom1");
BOOST_UNITS_TEST_OUTPUT(scaled_custom2(), "kilocustom2");
BOOST_UNITS_TEST_OUTPUT(boost::units::absolute<meter_base_unit::unit_type>(), "absolute meter");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_quantity_symbol)
{ // quantity symbols using default format.
#define FORMATTERS
BOOST_UNITS_TEST_OUTPUT(1.5*meter_base_unit::unit_type(), "1.5 m");
BOOST_UNITS_TEST_OUTPUT(1.5*velocity(), "1.5 m s^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_length(), "1.5 km");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity1(), "1.5 k(m s^-1)");
BOOST_UNITS_TEST_OUTPUT(1.5*millisecond_base_unit::unit_type(), "1.5 ms");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_time(), "1.5 ms");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity2(), "1.5 m ms^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*area(), "1.5 m^2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_area(), "1.5 k(m^2)");
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length(), "1.5 Kikm");
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length2(), "1.5 kscm");
BOOST_UNITS_TEST_OUTPUT(1.5*custom1(), "1.5 c1");
BOOST_UNITS_TEST_OUTPUT(1.5*custom2(), "1.5 c2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom1(), "1.5 kc1");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom2(), "1.5 kc2");
BOOST_UNITS_TEST_OUTPUT(1.5*boost::units::absolute<meter_base_unit::unit_type>(), "1.5 absolute m");
BOOST_UNITS_TEST_OUTPUT(pow(2., 10) * byte_base_unit::unit_type(), "1024 b");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_quantity_raw)
{ // quantity symbols using raw format.
#define FORMATTERS << boost::units::raw_format
BOOST_UNITS_TEST_OUTPUT(1.5*meter_base_unit::unit_type(), "1.5 m");
BOOST_UNITS_TEST_OUTPUT(1.5*velocity(), "1.5 m s^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_length(), "1.5 km");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity1(), "1.5 k(m s^-1)");
BOOST_UNITS_TEST_OUTPUT(1.5*millisecond_base_unit::unit_type(), "1.5 ms");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_time(), "1.5 ms");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity2(), "1.5 m ms^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*area(), "1.5 m^2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_area(), "1.5 k(m^2)");
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length(), "1.5 Kikm");
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length2(), "1.5 kscm");
// when using raw format, we ignore the user defined overloads
BOOST_UNITS_TEST_OUTPUT(1.5*custom1(), "1.5 m^3");
BOOST_UNITS_TEST_OUTPUT(1.5*custom2(), "1.5 m s^-2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom1(), "1.5 k(m^3)");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom2(), "1.5 k(m s^-2)");
BOOST_UNITS_TEST_OUTPUT(1.5*boost::units::absolute<meter_base_unit::unit_type>(), "1.5 absolute m");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_quantity_name)
{ // // quantity symbols using name format.
#define FORMATTERS << boost::units::name_format
BOOST_UNITS_TEST_OUTPUT(1.5*meter_base_unit::unit_type(), "1.5 meter");
BOOST_UNITS_TEST_OUTPUT(1.5*velocity(), "1.5 meter second^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_length(), "1.5 kilometer");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity1(), "1.5 kilo(meter second^-1)");
BOOST_UNITS_TEST_OUTPUT(1.5*millisecond_base_unit::unit_type(), "1.5 millisecond");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_time(), "1.5 millisecond");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity2(), "1.5 meter millisecond^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*area(), "1.5 meter^2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_area(), "1.5 kilo(meter^2)");
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length(), "1.5 kibikilometer");
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length2(), "1.5 kiloscaled_meter");
BOOST_UNITS_TEST_OUTPUT(1.5*custom1(), "1.5 custom1");
BOOST_UNITS_TEST_OUTPUT(1.5*custom2(), "1.5 custom2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom1(), "1.5 kilocustom1");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom2(), "1.5 kilocustom2");
BOOST_UNITS_TEST_OUTPUT(1.5*boost::units::absolute<meter_base_unit::unit_type>(), "1.5 absolute meter");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_autoprefixed_quantity_name)
{ // Engineering autoprefix, with name format.
#define FORMATTERS << boost::units::name_format << boost::units::engineering_prefix
// Single base unit like meter.
BOOST_UNITS_TEST_OUTPUT(1.5*meter_base_unit::unit_type(), "1.5 meter");
BOOST_UNITS_TEST_OUTPUT(1500.0*meter_base_unit::unit_type(), "1.5 kilometer");
BOOST_UNITS_TEST_OUTPUT(1.5e7*meter_base_unit::unit_type(), "15 megameter");
BOOST_UNITS_TEST_OUTPUT(1.5e-3*meter_base_unit::unit_type(), "1.5 millimeter");
BOOST_UNITS_TEST_OUTPUT(1.5e-9*meter_base_unit::unit_type(), "1.5 nanometer");
BOOST_UNITS_TEST_OUTPUT(1.5e-8*meter_base_unit::unit_type(), "15 nanometer");
BOOST_UNITS_TEST_OUTPUT(1.5e-10*meter_base_unit::unit_type(), "150 picometer");
BOOST_UNITS_TEST_OUTPUT(0.0000000012345 * meter_base_unit::unit_type(), "1.2345 nanometer");
// Too small or large for a multiple name.
BOOST_UNITS_TEST_OUTPUT_REGEX(9.99999e-25 * meter_base_unit::unit_type(), "9\\.99999e-0?25 meter"); // Just too small for multiple.
BOOST_UNITS_TEST_OUTPUT_REGEX(1e+28 * meter_base_unit::unit_type(), "1e\\+0?28 meter"); // Just too large for multiple.
BOOST_UNITS_TEST_OUTPUT_REGEX(1.5e-25 * meter_base_unit::unit_type(), "1\\.5e-0?25 meter"); // Too small for multiple.
BOOST_UNITS_TEST_OUTPUT_REGEX(1.5e+28 * meter_base_unit::unit_type(), "1\\.5e\\+0?28 meter"); // Too large for multiple.
// Too 'biggest or too smallest'.
BOOST_UNITS_TEST_OUTPUT_REGEX(std::numeric_limits<float>::max()*meter_base_unit::unit_type(), "3\\.40282e\\+0?38 meter");
BOOST_UNITS_TEST_OUTPUT_REGEX(std::numeric_limits<float>::min()*meter_base_unit::unit_type(), "1\\.17549e-0?38 meter");
BOOST_UNITS_TEST_OUTPUT(std::numeric_limits<double>::max()*meter_base_unit::unit_type(), "1.79769e+308 meter");
BOOST_UNITS_TEST_OUTPUT(std::numeric_limits<double>::min()*meter_base_unit::unit_type(), "2.22507e-308 meter");
// Infinity and NaN
BOOST_UNITS_TEST_OUTPUT_REGEX(std::numeric_limits<float>::infinity()*meter_base_unit::unit_type(), "(1\\.#INF|inf|INF) meter");
BOOST_UNITS_TEST_OUTPUT_REGEX(-std::numeric_limits<float>::infinity()*meter_base_unit::unit_type(), "-(1\\.#INF|inf|INF) meter");
BOOST_UNITS_TEST_OUTPUT_REGEX(std::numeric_limits<double>::quiet_NaN()*meter_base_unit::unit_type(), "(1\\.#QNAN|nan|NaNQ) meter");
BOOST_UNITS_TEST_OUTPUT_REGEX(-std::numeric_limits<double>::quiet_NaN()*meter_base_unit::unit_type(), "-?(1\\.#IND|nan|nan\\(ind\\)|NaNQ) meter");
BOOST_UNITS_TEST_OUTPUT(1.5*velocity(), "1.5 meter second^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_length(), "1.5 kilometer");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity1(), "1.5 kilo(meter second^-1)");
BOOST_UNITS_TEST_OUTPUT(1.5*millisecond_base_unit::unit_type(), "1.5 millisecond");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_time(), "1.5 millisecond");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity2(), "1.5 meter millisecond^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*area(), "1.5 meter^2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_area(), "1.5 kilo(meter^2)");
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length(), "1.536 megameter"); // 1.5 * 2^10 = 1.5 * 1024 = 1.536
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length2(), "1.5 kiloscaled_meter");
BOOST_UNITS_TEST_OUTPUT(1.5*custom1(), "1.5 custom1");
BOOST_UNITS_TEST_OUTPUT(1.5*custom2(), "1.5 custom2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom1(), "1.5 kilocustom1");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom2(), "1.5 kilocustom2");
BOOST_UNITS_TEST_OUTPUT(1.5*boost::units::absolute<meter_base_unit::unit_type>(), "1.5 absolute meter");
BOOST_UNITS_TEST_OUTPUT(pow(2., 10) * byte_base_unit::unit_type(), "1.024 kilobyte");
BOOST_UNITS_TEST_OUTPUT(1.5, "1.5"); // scalar.
BOOST_UNITS_TEST_OUTPUT(1567., "1567"); // scalars are *not* autoprefixed.
BOOST_UNITS_TEST_OUTPUT(0.00015, "0.00015"); // scalars are *not* autoprefixed.
BOOST_UNITS_TEST_OUTPUT(-1.5, "-1.5"); // scalar.
BOOST_UNITS_TEST_OUTPUT(-1567., "-1567"); // scalars are *not* autoprefixed.
BOOST_UNITS_TEST_OUTPUT(-0.00015, "-0.00015"); // scalars are *not* autoprefixed.
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_autoprefixed_quantity_symbol)
{ // Engineering autoprefix, with symbol format.
#define FORMATTERS << boost::units::symbol_format << boost::units::engineering_prefix
// Single base unit like m.
BOOST_UNITS_TEST_OUTPUT(1.5*meter_base_unit::unit_type(), "1.5 m");
BOOST_UNITS_TEST_OUTPUT(1500.0*meter_base_unit::unit_type(), "1.5 km");
BOOST_UNITS_TEST_OUTPUT(1.5e7*meter_base_unit::unit_type(), "15 Mm");
BOOST_UNITS_TEST_OUTPUT(1.5e-3*meter_base_unit::unit_type(), "1.5 mm");
BOOST_UNITS_TEST_OUTPUT(1.5e-9*meter_base_unit::unit_type(), "1.5 nm");
BOOST_UNITS_TEST_OUTPUT(1.5e-8*meter_base_unit::unit_type(), "15 nm");
BOOST_UNITS_TEST_OUTPUT(1.5e-10*meter_base_unit::unit_type(), "150 pm");
// Too small or large for a multiple name.
BOOST_UNITS_TEST_OUTPUT_REGEX(9.99999e-25 * meter_base_unit::unit_type(), "9\\.99999e-0?25 m"); // Just too small for multiple.
BOOST_UNITS_TEST_OUTPUT_REGEX(1e+28 * meter_base_unit::unit_type(), "1e\\+0?28 m"); // Just too large for multiple.
BOOST_UNITS_TEST_OUTPUT_REGEX(1.5e-25 * meter_base_unit::unit_type(), "1\\.5e-0?25 m"); // Too small for multiple.
BOOST_UNITS_TEST_OUTPUT_REGEX(1.5e+28 * meter_base_unit::unit_type(), "1\\.5e\\+0?28 m"); // Too large for multiple.
//
BOOST_UNITS_TEST_OUTPUT_REGEX(std::numeric_limits<float>::max()*meter_base_unit::unit_type(), "3\\.40282e\\+0?38 m");
BOOST_UNITS_TEST_OUTPUT_REGEX(std::numeric_limits<float>::min()*meter_base_unit::unit_type(), "1\\.17549e-0?38 m");
BOOST_UNITS_TEST_OUTPUT(std::numeric_limits<double>::max()*meter_base_unit::unit_type(), "1.79769e+308 m");
BOOST_UNITS_TEST_OUTPUT(std::numeric_limits<double>::min()*meter_base_unit::unit_type(), "2.22507e-308 m");
BOOST_UNITS_TEST_OUTPUT(1.5*velocity(), "1.5 m s^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_length(), "1.5 km");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity1(), "1.5 k(m s^-1)");
BOOST_UNITS_TEST_OUTPUT(1.5*millisecond_base_unit::unit_type(), "1.5 ms");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_time(), "1.5 ms");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_velocity2(), "1.5 m ms^-1");
BOOST_UNITS_TEST_OUTPUT(1.5*area(), "1.5 m^2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_area(), "1.5 k(m^2)");
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length(), "1.536 Mm"); // 1.5 * 2^10 = 1.5 * 1024 = 1.536
BOOST_UNITS_TEST_OUTPUT(1.5*double_scaled_length2(), "1.5 kscm");
BOOST_UNITS_TEST_OUTPUT(1.5*custom1(), "1.5 c1");
BOOST_UNITS_TEST_OUTPUT(1.5*custom2(), "1.5 c2");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom1(), "1.5 kc1");
BOOST_UNITS_TEST_OUTPUT(1.5*scaled_custom2(), "1.5 kc2");
BOOST_UNITS_TEST_OUTPUT(1.5*boost::units::absolute<meter_base_unit::unit_type>(), "1.5 absolute m");
BOOST_UNITS_TEST_OUTPUT(pow(2., 10) * byte_base_unit::unit_type(), "1.024 kb");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_auto_binary_prefixed_quantity_symbol)
{ // Binary prefix with symbol format.
#define FORMATTERS << boost::units::symbol_format << boost::units::binary_prefix
BOOST_UNITS_TEST_OUTPUT(1024 * byte_base_unit::unit_type(), "1 Kib");
BOOST_UNITS_TEST_OUTPUT(pow(2., 20) * byte_base_unit::unit_type(), "1 Mib");
BOOST_UNITS_TEST_OUTPUT(pow(2., 30) * byte_base_unit::unit_type(), "1 Gib");
BOOST_UNITS_TEST_OUTPUT(pow(2., 40) * byte_base_unit::unit_type(), "1 Tib");
BOOST_UNITS_TEST_OUTPUT(pow(2., 50) * byte_base_unit::unit_type(), "1 Pib");
BOOST_UNITS_TEST_OUTPUT(pow(2., 60) * byte_base_unit::unit_type(), "1 Eib");
BOOST_UNITS_TEST_OUTPUT(pow(2., 70) * byte_base_unit::unit_type(), "1 Zib");
BOOST_UNITS_TEST_OUTPUT(pow(2., 80) * byte_base_unit::unit_type(), "1 Yib");
BOOST_UNITS_TEST_OUTPUT(42, "42"); // integer scalar.
BOOST_UNITS_TEST_OUTPUT(-42, "-42"); // integer scalar.
BOOST_UNITS_TEST_OUTPUT(1567, "1567"); // scalars are *not* autoprefixed.
BOOST_UNITS_TEST_OUTPUT(-1567, "-1567"); // scalars are *not* autoprefixed.
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_auto_binary_prefixed_quantity_name)
{ // Binary prefix with name format.
// http://physics.nist.gov/cuu/Units/binary.html
// 1998 the International Electrotechnical Commission (IEC) approved
// IEC 60027-2, Second edition, 2000-11, Letter symbols to be used in electrical technology
// - Part 2: Telecommunications and electronics.
// IEC 80000-13:2008, Quantities and units
// – Part 13: Information science and technology
#define FORMATTERS << boost::units::name_format << boost::units::binary_prefix
BOOST_UNITS_TEST_OUTPUT(2048 * byte_base_unit::unit_type(), "2 kibibyte");
BOOST_UNITS_TEST_OUTPUT(pow(2., 32) *byte_base_unit::unit_type(), "4 gibibyte");
BOOST_UNITS_TEST_OUTPUT(pow(2., 41) *byte_base_unit::unit_type(), "2 tebibyte"); // http://en.wikipedia.org/wiki/Tebibyte
BOOST_UNITS_TEST_OUTPUT(pow(2., 50) *byte_base_unit::unit_type(), "1 pebibyte");
BOOST_UNITS_TEST_OUTPUT(pow(2., 60) *byte_base_unit::unit_type(), "1 exbibyte");
BOOST_UNITS_TEST_OUTPUT(pow(2., 70) *byte_base_unit::unit_type(), "1 zebibyte");
BOOST_UNITS_TEST_OUTPUT(pow(2., 80) *byte_base_unit::unit_type(), "1 yobibyte");
BOOST_UNITS_TEST_OUTPUT(2048, "2048"); // scalars are *not* autoprefixed.
BOOST_UNITS_TEST_OUTPUT(-4096, "-4096"); // scalars are *not* autoprefixed.
#undef FORMATTERS
}
// Tests on using more than one format or prefix - only the last specified should be used.
// (This may indicate a programming mistake, but it is ignored).
BOOST_AUTO_TEST_CASE(test_output_quantity_name_duplicate)
{ // Ensure that if more than one format specified, only the last is used.
#define FORMATTERS << boost::units::symbol_format << boost::units::name_format
BOOST_UNITS_TEST_OUTPUT(1.5*meter_base_unit::unit_type(), "1.5 meter");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_quantity_symbol_duplicate)
{ // Ensure that if more than one format specified, only the last is used.
#define FORMATTERS << boost::units::name_format << boost::units::symbol_format
BOOST_UNITS_TEST_OUTPUT(1.5*meter_base_unit::unit_type(), "1.5 m");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_auto_binary_prefixed_quantity_name_duplicate)
{ // Ensure that if more than one auto prefix specified, only the last is used.
#define FORMATTERS << boost::units::name_format << boost::units::binary_prefix << boost::units::engineering_prefix
BOOST_UNITS_TEST_OUTPUT(2048 * byte_base_unit::unit_type(), "2.048 kilobyte");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_auto_binary_prefixed_quantity_symbol_duplicate)
{ // Ensure that if more than one auto prefix specified, only the last is used.
#define FORMATTERS << boost::units::symbol_format << boost::units::engineering_prefix << boost::units::binary_prefix
BOOST_UNITS_TEST_OUTPUT(2048 * byte_base_unit::unit_type(), "2 Kib");
#undef FORMATTERS
}
BOOST_AUTO_TEST_CASE(test_output_typename_format)
{ // Displays typename formatting result. The test doesn't check the formatting result
// and thus doesn't fail because the formatting result is platform-dependent.
#define FORMATTERS << boost::units::typename_format
BOOST_UNITS_TEST_OUTPUT_DISPLAY(meter_base_unit::unit_type());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(velocity());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(scaled_length());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(scaled_velocity1());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(millisecond_base_unit::unit_type());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(scaled_time());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(scaled_velocity2());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(area());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(scaled_area());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(double_scaled_length());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(double_scaled_length2());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(custom1());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(custom2());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(scaled_custom1());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(scaled_custom2());
BOOST_UNITS_TEST_OUTPUT_DISPLAY(boost::units::absolute<meter_base_unit::unit_type>());
#undef FORMATTERS
}
| apache-2.0 |
CaoLP/tranhviet | Code/tranhviet/gocart/language/english/filter_lang.php | 882 | <?php
/******************************************
US English
Admin filter Language
******************************************/
$lang['filters'] = 'Filters';
$lang['error_not_found'] = 'The requested filter could not be found.';
$lang['filter_id'] = 'ID';
$lang['filter_form'] = 'Filter Form';
$lang['desc_name'] = 'Descriptive Name';
$lang['url_handle'] = 'URL Handle (Must be Unique)';
$lang['message_filter_saved'] = 'The filter has been saved!';
$lang['message_delete_filter'] = 'The filter has been deleted.';
$lang['confirm_delete_filter'] = 'Are you sure you want to delete this filter?';
$lang['add_new_filter'] = 'Add New filter';
$lang['no_filters'] = 'There are currently no filters';
$lang['slug_exists'] = 'The filter slug entered already exists';
$lang['attributes'] = 'Attributes';
$lang['slug'] = 'URL Handle';
$lang['parent'] = 'Parent';
$lang['name'] = 'Name';
| apache-2.0 |
aws/aws-sdk-cpp | aws-cpp-sdk-sagemaker/source/model/Endpoint.cpp | 6313 | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/sagemaker/model/Endpoint.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <utility>
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
namespace Aws
{
namespace SageMaker
{
namespace Model
{
Endpoint::Endpoint() :
m_endpointNameHasBeenSet(false),
m_endpointArnHasBeenSet(false),
m_endpointConfigNameHasBeenSet(false),
m_productionVariantsHasBeenSet(false),
m_dataCaptureConfigHasBeenSet(false),
m_endpointStatus(EndpointStatus::NOT_SET),
m_endpointStatusHasBeenSet(false),
m_failureReasonHasBeenSet(false),
m_creationTimeHasBeenSet(false),
m_lastModifiedTimeHasBeenSet(false),
m_monitoringSchedulesHasBeenSet(false),
m_tagsHasBeenSet(false)
{
}
Endpoint::Endpoint(JsonView jsonValue) :
m_endpointNameHasBeenSet(false),
m_endpointArnHasBeenSet(false),
m_endpointConfigNameHasBeenSet(false),
m_productionVariantsHasBeenSet(false),
m_dataCaptureConfigHasBeenSet(false),
m_endpointStatus(EndpointStatus::NOT_SET),
m_endpointStatusHasBeenSet(false),
m_failureReasonHasBeenSet(false),
m_creationTimeHasBeenSet(false),
m_lastModifiedTimeHasBeenSet(false),
m_monitoringSchedulesHasBeenSet(false),
m_tagsHasBeenSet(false)
{
*this = jsonValue;
}
Endpoint& Endpoint::operator =(JsonView jsonValue)
{
if(jsonValue.ValueExists("EndpointName"))
{
m_endpointName = jsonValue.GetString("EndpointName");
m_endpointNameHasBeenSet = true;
}
if(jsonValue.ValueExists("EndpointArn"))
{
m_endpointArn = jsonValue.GetString("EndpointArn");
m_endpointArnHasBeenSet = true;
}
if(jsonValue.ValueExists("EndpointConfigName"))
{
m_endpointConfigName = jsonValue.GetString("EndpointConfigName");
m_endpointConfigNameHasBeenSet = true;
}
if(jsonValue.ValueExists("ProductionVariants"))
{
Array<JsonView> productionVariantsJsonList = jsonValue.GetArray("ProductionVariants");
for(unsigned productionVariantsIndex = 0; productionVariantsIndex < productionVariantsJsonList.GetLength(); ++productionVariantsIndex)
{
m_productionVariants.push_back(productionVariantsJsonList[productionVariantsIndex].AsObject());
}
m_productionVariantsHasBeenSet = true;
}
if(jsonValue.ValueExists("DataCaptureConfig"))
{
m_dataCaptureConfig = jsonValue.GetObject("DataCaptureConfig");
m_dataCaptureConfigHasBeenSet = true;
}
if(jsonValue.ValueExists("EndpointStatus"))
{
m_endpointStatus = EndpointStatusMapper::GetEndpointStatusForName(jsonValue.GetString("EndpointStatus"));
m_endpointStatusHasBeenSet = true;
}
if(jsonValue.ValueExists("FailureReason"))
{
m_failureReason = jsonValue.GetString("FailureReason");
m_failureReasonHasBeenSet = true;
}
if(jsonValue.ValueExists("CreationTime"))
{
m_creationTime = jsonValue.GetDouble("CreationTime");
m_creationTimeHasBeenSet = true;
}
if(jsonValue.ValueExists("LastModifiedTime"))
{
m_lastModifiedTime = jsonValue.GetDouble("LastModifiedTime");
m_lastModifiedTimeHasBeenSet = true;
}
if(jsonValue.ValueExists("MonitoringSchedules"))
{
Array<JsonView> monitoringSchedulesJsonList = jsonValue.GetArray("MonitoringSchedules");
for(unsigned monitoringSchedulesIndex = 0; monitoringSchedulesIndex < monitoringSchedulesJsonList.GetLength(); ++monitoringSchedulesIndex)
{
m_monitoringSchedules.push_back(monitoringSchedulesJsonList[monitoringSchedulesIndex].AsObject());
}
m_monitoringSchedulesHasBeenSet = true;
}
if(jsonValue.ValueExists("Tags"))
{
Array<JsonView> tagsJsonList = jsonValue.GetArray("Tags");
for(unsigned tagsIndex = 0; tagsIndex < tagsJsonList.GetLength(); ++tagsIndex)
{
m_tags.push_back(tagsJsonList[tagsIndex].AsObject());
}
m_tagsHasBeenSet = true;
}
return *this;
}
JsonValue Endpoint::Jsonize() const
{
JsonValue payload;
if(m_endpointNameHasBeenSet)
{
payload.WithString("EndpointName", m_endpointName);
}
if(m_endpointArnHasBeenSet)
{
payload.WithString("EndpointArn", m_endpointArn);
}
if(m_endpointConfigNameHasBeenSet)
{
payload.WithString("EndpointConfigName", m_endpointConfigName);
}
if(m_productionVariantsHasBeenSet)
{
Array<JsonValue> productionVariantsJsonList(m_productionVariants.size());
for(unsigned productionVariantsIndex = 0; productionVariantsIndex < productionVariantsJsonList.GetLength(); ++productionVariantsIndex)
{
productionVariantsJsonList[productionVariantsIndex].AsObject(m_productionVariants[productionVariantsIndex].Jsonize());
}
payload.WithArray("ProductionVariants", std::move(productionVariantsJsonList));
}
if(m_dataCaptureConfigHasBeenSet)
{
payload.WithObject("DataCaptureConfig", m_dataCaptureConfig.Jsonize());
}
if(m_endpointStatusHasBeenSet)
{
payload.WithString("EndpointStatus", EndpointStatusMapper::GetNameForEndpointStatus(m_endpointStatus));
}
if(m_failureReasonHasBeenSet)
{
payload.WithString("FailureReason", m_failureReason);
}
if(m_creationTimeHasBeenSet)
{
payload.WithDouble("CreationTime", m_creationTime.SecondsWithMSPrecision());
}
if(m_lastModifiedTimeHasBeenSet)
{
payload.WithDouble("LastModifiedTime", m_lastModifiedTime.SecondsWithMSPrecision());
}
if(m_monitoringSchedulesHasBeenSet)
{
Array<JsonValue> monitoringSchedulesJsonList(m_monitoringSchedules.size());
for(unsigned monitoringSchedulesIndex = 0; monitoringSchedulesIndex < monitoringSchedulesJsonList.GetLength(); ++monitoringSchedulesIndex)
{
monitoringSchedulesJsonList[monitoringSchedulesIndex].AsObject(m_monitoringSchedules[monitoringSchedulesIndex].Jsonize());
}
payload.WithArray("MonitoringSchedules", std::move(monitoringSchedulesJsonList));
}
if(m_tagsHasBeenSet)
{
Array<JsonValue> tagsJsonList(m_tags.size());
for(unsigned tagsIndex = 0; tagsIndex < tagsJsonList.GetLength(); ++tagsIndex)
{
tagsJsonList[tagsIndex].AsObject(m_tags[tagsIndex].Jsonize());
}
payload.WithArray("Tags", std::move(tagsJsonList));
}
return payload;
}
} // namespace Model
} // namespace SageMaker
} // namespace Aws
| apache-2.0 |
dzimine/mistral | mistral/openstack/common/exception.py | 3315 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exceptions common to OpenStack projects
"""
import logging
from mistral.openstack.common.gettextutils import _ # noqa
_FATAL_EXCEPTION_FORMAT_ERRORS = False
class Error(Exception):
def __init__(self, message=None):
super(Error, self).__init__(message)
class ApiError(Error):
def __init__(self, message='Unknown', code='Unknown'):
self.api_message = message
self.code = code
super(ApiError, self).__init__('%s: %s' % (code, message))
class NotFound(Error):
pass
class UnknownScheme(Error):
msg_fmt = "Unknown scheme '%s' found in URI"
def __init__(self, scheme):
msg = self.msg_fmt % scheme
super(UnknownScheme, self).__init__(msg)
class BadStoreUri(Error):
msg_fmt = "The Store URI %s was malformed. Reason: %s"
def __init__(self, uri, reason):
msg = self.msg_fmt % (uri, reason)
super(BadStoreUri, self).__init__(msg)
class Duplicate(Error):
pass
class NotAuthorized(Error):
pass
class NotEmpty(Error):
pass
class Invalid(Error):
pass
class BadInputError(Exception):
"""Error resulting from a client sending bad input to a server"""
pass
class MissingArgumentError(Error):
pass
class DatabaseMigrationError(Error):
pass
class ClientConnectionError(Exception):
"""Error resulting from a client connecting to a server"""
pass
def wrap_exception(f):
def _wrap(*args, **kw):
try:
return f(*args, **kw)
except Exception as e:
if not isinstance(e, Error):
logging.exception(_('Uncaught exception'))
raise Error(str(e))
raise
_wrap.func_name = f.func_name
return _wrap
class OpenstackException(Exception):
"""Base Exception class.
To correctly use this class, inherit from it and define
a 'msg_fmt' property. That message will get printf'd
with the keyword arguments provided to the constructor.
"""
msg_fmt = "An unknown exception occurred"
def __init__(self, **kwargs):
try:
self._error_string = self.msg_fmt % kwargs
except Exception:
if _FATAL_EXCEPTION_FORMAT_ERRORS:
raise
else:
# at least get the core message out if something happened
self._error_string = self.msg_fmt
def __str__(self):
return self._error_string
class MalformedRequestBody(OpenstackException):
msg_fmt = "Malformed message body: %(reason)s"
class InvalidContentType(OpenstackException):
msg_fmt = "Invalid content type %(content_type)s"
| apache-2.0 |
arvindsv/gocd | common/src/main/java/com/thoughtworks/go/remote/work/artifact/ArtifactPlanFilter.java | 2157 | /*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.remote.work.artifact;
import com.thoughtworks.go.domain.ArtifactPlan;
import com.thoughtworks.go.domain.ArtifactPlanType;
import com.thoughtworks.go.domain.MergedTestArtifactPlan;
import java.util.ArrayList;
import java.util.List;
public class ArtifactPlanFilter {
public List<ArtifactPlan> getBuiltInMergedArtifactPlans(List<ArtifactPlan> artifactPlans) {
MergedTestArtifactPlan testArtifactPlan = null;
final List<ArtifactPlan> mergedPlans = new ArrayList<>();
for (ArtifactPlan artifactPlan : artifactPlans) {
if (artifactPlan.getArtifactPlanType().isTest()) {
if (testArtifactPlan == null) {
testArtifactPlan = new MergedTestArtifactPlan(artifactPlan);
mergedPlans.add(testArtifactPlan);
} else {
testArtifactPlan.add(artifactPlan);
}
} else if (artifactPlan.getArtifactPlanType() == ArtifactPlanType.file) {
mergedPlans.add(artifactPlan);
}
}
return mergedPlans;
}
public List<ArtifactPlan> getPluggableArtifactPlans(List<ArtifactPlan> artifactPlans) {
final ArrayList<ArtifactPlan> pluggableArtifactPlans = new ArrayList<>();
for (ArtifactPlan artifactPlan : artifactPlans) {
if (artifactPlan.getArtifactPlanType() == ArtifactPlanType.external) {
pluggableArtifactPlans.add(artifactPlan);
}
}
return pluggableArtifactPlans;
}
}
| apache-2.0 |
Bogatinov/angular | modules/angular2/test/render/dom/view/view_spec.js | 2570 | import {
AsyncTestCompleter,
beforeEach,
ddescribe,
xdescribe,
describe,
el,
dispatchEvent,
expect,
iit,
inject,
beforeEachBindings,
it,
xit,
SpyObject, proxy
} from 'angular2/test_lib';
import {IMPLEMENTS, isBlank} from 'angular2/src/facade/lang';
import {ListWrapper} from 'angular2/src/facade/collection';
import {DomProtoView} from 'angular2/src/render/dom/view/proto_view';
import {ElementBinder} from 'angular2/src/render/dom/view/element_binder';
import {DomView} from 'angular2/src/render/dom/view/view';
import {LightDom} from 'angular2/src/render/dom/shadow_dom/light_dom';
import {DOM} from 'angular2/src/dom/dom_adapter';
export function main() {
describe('DomView', () => {
function createProtoView(binders = null) {
if (isBlank(binders)) {
binders = [];
}
var rootEl = el('<div></div>');
return new DomProtoView({
element: rootEl,
elementBinders: binders
});
}
function createView(pv=null, boundElementCount=0) {
if (isBlank(pv)) {
pv = createProtoView();
}
var root = el('<div><div></div></div>');
var boundElements = [];
for (var i=0; i<boundElementCount; i++) {
ListWrapper.push(boundElements, el('<span></span'));
}
return new DomView(pv, [DOM.childNodes(root)[0]],
[], boundElements, []);
}
describe('getDirectParentLightDom', () => {
it('should return the LightDom of the direct parent', () => {
var pv = createProtoView(
[new ElementBinder(), new ElementBinder({
parentIndex: 0,
distanceToParent: 1
})]
);
var view = createView(pv, 2);
view.lightDoms[0] = new SpyLightDom();
view.lightDoms[1] = new SpyLightDom();
expect(view.getDirectParentLightDom(1)).toBe(view.lightDoms[0]);
});
it('should return null if the direct parent is not bound', () => {
var pv = createProtoView(
[new ElementBinder(), new ElementBinder(), new ElementBinder({
parentIndex: 0,
distanceToParent: 2
})]
);
var view = createView(pv, 3);
view.lightDoms[0] = new SpyLightDom();
view.lightDoms[1] = new SpyLightDom();
view.lightDoms[2] = new SpyLightDom();
expect(view.getDirectParentLightDom(2)).toBe(null);
});
});
});
}
@proxy
@IMPLEMENTS(LightDom)
class SpyLightDom extends SpyObject {
constructor(){super(LightDom);}
noSuchMethod(m){return super.noSuchMethod(m)}
}
| apache-2.0 |
aws/aws-sdk-cpp | aws-cpp-sdk-greengrass/source/model/GetSubscriptionDefinitionVersionResult.cpp | 1579 | /**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/greengrass/model/GetSubscriptionDefinitionVersionResult.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/UnreferencedParam.h>
#include <utility>
using namespace Aws::Greengrass::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
using namespace Aws;
GetSubscriptionDefinitionVersionResult::GetSubscriptionDefinitionVersionResult()
{
}
GetSubscriptionDefinitionVersionResult::GetSubscriptionDefinitionVersionResult(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
*this = result;
}
GetSubscriptionDefinitionVersionResult& GetSubscriptionDefinitionVersionResult::operator =(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
JsonView jsonValue = result.GetPayload().View();
if(jsonValue.ValueExists("Arn"))
{
m_arn = jsonValue.GetString("Arn");
}
if(jsonValue.ValueExists("CreationTimestamp"))
{
m_creationTimestamp = jsonValue.GetString("CreationTimestamp");
}
if(jsonValue.ValueExists("Definition"))
{
m_definition = jsonValue.GetObject("Definition");
}
if(jsonValue.ValueExists("Id"))
{
m_id = jsonValue.GetString("Id");
}
if(jsonValue.ValueExists("NextToken"))
{
m_nextToken = jsonValue.GetString("NextToken");
}
if(jsonValue.ValueExists("Version"))
{
m_version = jsonValue.GetString("Version");
}
return *this;
}
| apache-2.0 |
ingorichtsmeier/camunda-bpm-platform | engine-rest/engine-rest/src/test/java/org/camunda/bpm/engine/rest/AuthorizationRestServiceQueryTest.java | 10055 | /*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.rest;
import static io.restassured.RestAssured.expect;
import static io.restassured.RestAssured.given;
import static io.restassured.path.json.JsonPath.from;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.when;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import javax.ws.rs.core.Response.Status;
import org.camunda.bpm.engine.AuthorizationService;
import org.camunda.bpm.engine.IdentityService;
import org.camunda.bpm.engine.authorization.Authorization;
import org.camunda.bpm.engine.authorization.AuthorizationQuery;
import org.camunda.bpm.engine.authorization.Permissions;
import org.camunda.bpm.engine.impl.AuthorizationServiceImpl;
import org.camunda.bpm.engine.impl.IdentityServiceImpl;
import org.camunda.bpm.engine.impl.calendar.DateTimeUtil;
import org.camunda.bpm.engine.impl.cfg.ProcessEngineConfigurationImpl;
import org.camunda.bpm.engine.impl.cfg.auth.DefaultPermissionProvider;
import org.camunda.bpm.engine.rest.exception.InvalidRequestException;
import org.camunda.bpm.engine.rest.helper.MockProvider;
import org.camunda.bpm.engine.rest.util.container.TestContainerRule;
import org.junit.Assert;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.mockito.InOrder;
import io.restassured.http.ContentType;
import io.restassured.response.Response;
import io.restassured.specification.RequestSpecification;
/**
* @author Daniel Meyer
*
*/
public class AuthorizationRestServiceQueryTest extends AbstractRestServiceTest {
protected static final String SERVICE_PATH = TEST_RESOURCE_ROOT_PATH + AuthorizationRestService.PATH;
protected static final String SERVICE_COUNT_PATH = TEST_RESOURCE_ROOT_PATH + AuthorizationRestService.PATH+"/count";
protected AuthorizationService authorizationServiceMock;
protected IdentityService identityServiceMock;
protected ProcessEngineConfigurationImpl processEngineConfigurationMock;
@ClassRule
public static TestContainerRule rule = new TestContainerRule();
@Before
public void setUpRuntimeData() {
authorizationServiceMock = mock(AuthorizationServiceImpl.class);
identityServiceMock = mock(IdentityServiceImpl.class);
processEngineConfigurationMock = mock(ProcessEngineConfigurationImpl.class);
when(processEngine.getAuthorizationService()).thenReturn(authorizationServiceMock);
when(processEngine.getIdentityService()).thenReturn(identityServiceMock);
when(processEngine.getProcessEngineConfiguration()).thenReturn(processEngineConfigurationMock);
when(processEngineConfigurationMock.getPermissionProvider()).thenReturn(new DefaultPermissionProvider());
}
private AuthorizationQuery setUpMockQuery(List<Authorization> list) {
AuthorizationQuery query = mock(AuthorizationQuery.class);
when(query.list()).thenReturn(list);
when(query.count()).thenReturn((long) list.size());
when(processEngine.getAuthorizationService().createAuthorizationQuery()).thenReturn(query);
return query;
}
@Test
public void testEmptyQuery() {
setUpMockQuery(MockProvider.createMockAuthorizations());
String queryKey = "";
given().queryParam("name", queryKey)
.then().expect().statusCode(Status.OK.getStatusCode())
.when().get(SERVICE_PATH);
}
@Test
public void testSortByParameterOnly() {
given().queryParam("sortBy", "resourceType")
.then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()).contentType(ContentType.JSON)
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Only a single sorting parameter specified. sortBy and sortOrder required"))
.when().get(SERVICE_PATH);
}
@Test
public void testSortOrderParameterOnly() {
given().queryParam("sortOrder", "asc")
.then().expect().statusCode(Status.BAD_REQUEST.getStatusCode()).contentType(ContentType.JSON)
.body("type", equalTo(InvalidRequestException.class.getSimpleName()))
.body("message", equalTo("Only a single sorting parameter specified. sortBy and sortOrder required"))
.when().get(SERVICE_PATH);
}
@Test
public void testNoParametersQuery() {
AuthorizationQuery mockQuery = setUpMockQuery(MockProvider.createMockAuthorizations());
expect().statusCode(Status.OK.getStatusCode()).when().get(SERVICE_PATH);
verify(mockQuery).list();
verifyNoMoreInteractions(mockQuery);
}
@Test
public void testSimpleAuthorizationQuery() {
List<Authorization> mockAuthorizations = MockProvider.createMockGlobalAuthorizations();
AuthorizationQuery mockQuery = setUpMockQuery(mockAuthorizations);
Response response = given().queryParam("type", Authorization.AUTH_TYPE_GLOBAL)
.then().expect().statusCode(Status.OK.getStatusCode())
.when().get(SERVICE_PATH);
InOrder inOrder = inOrder(mockQuery);
inOrder.verify(mockQuery).authorizationType(Authorization.AUTH_TYPE_GLOBAL);
inOrder.verify(mockQuery).list();
String content = response.asString();
List<String> instances = from(content).getList("");
Assert.assertEquals("There should be one authorization returned.", 1, instances.size());
Assert.assertNotNull("The returned authorization should not be null.", instances.get(0));
Authorization mockAuthorization = mockAuthorizations.get(0);
Assert.assertEquals(mockAuthorization.getId(), from(content).getString("[0].id"));
Assert.assertEquals(mockAuthorization.getAuthorizationType(), from(content).getInt("[0].type"));
Assert.assertEquals(Permissions.READ.getName(), from(content).getString("[0].permissions[0]"));
Assert.assertEquals(Permissions.UPDATE.getName(), from(content).getString("[0].permissions[1]"));
Assert.assertEquals(mockAuthorization.getUserId(), from(content).getString("[0].userId"));
Assert.assertEquals(mockAuthorization.getGroupId(), from(content).getString("[0].groupId"));
Assert.assertEquals(mockAuthorization.getResourceType(), from(content).getInt("[0].resourceType"));
Assert.assertEquals(mockAuthorization.getResourceId(), from(content).getString("[0].resourceId"));
Assert.assertEquals(mockAuthorization.getRemovalTime(),
DateTimeUtil.parseDate(from(content).getString("[0].removalTime")));
Assert.assertEquals(mockAuthorization.getRootProcessInstanceId(),
from(content).getString("[0].rootProcessInstanceId"));
}
@Test
public void testCompleteGetParameters() {
List<Authorization> mockAuthorizations = MockProvider.createMockGlobalAuthorizations();
AuthorizationQuery mockQuery = setUpMockQuery(mockAuthorizations);
Map<String, String> queryParameters = getCompleteStringQueryParameters();
RequestSpecification requestSpecification = given().contentType(POST_JSON_CONTENT_TYPE);
for (Entry<String, String> paramEntry : queryParameters.entrySet()) {
requestSpecification.parameter(paramEntry.getKey(), paramEntry.getValue());
}
requestSpecification.expect().statusCode(Status.OK.getStatusCode())
.when().get(SERVICE_PATH);
verify(mockQuery).authorizationId(MockProvider.EXAMPLE_AUTHORIZATION_ID);
verify(mockQuery).authorizationType(MockProvider.EXAMPLE_AUTHORIZATION_TYPE);
verify(mockQuery).userIdIn(new String[]{MockProvider.EXAMPLE_USER_ID, MockProvider.EXAMPLE_USER_ID2});
verify(mockQuery).groupIdIn(new String[]{MockProvider.EXAMPLE_GROUP_ID, MockProvider.EXAMPLE_GROUP_ID2});
verify(mockQuery).resourceType(MockProvider.EXAMPLE_RESOURCE_TYPE_ID);
verify(mockQuery).resourceId(MockProvider.EXAMPLE_RESOURCE_ID);
verify(mockQuery).list();
}
private Map<String, String> getCompleteStringQueryParameters() {
Map<String, String> parameters = new HashMap<String, String>();
parameters.put("id", MockProvider.EXAMPLE_AUTHORIZATION_ID);
parameters.put("type", MockProvider.EXAMPLE_AUTHORIZATION_TYPE_STRING);
parameters.put("userIdIn", MockProvider.EXAMPLE_USER_ID + ","+MockProvider.EXAMPLE_USER_ID2);
parameters.put("groupIdIn", MockProvider.EXAMPLE_GROUP_ID+","+MockProvider.EXAMPLE_GROUP_ID2);
parameters.put("resourceType", MockProvider.EXAMPLE_RESOURCE_TYPE_ID_STRING);
parameters.put("resourceId", MockProvider.EXAMPLE_RESOURCE_ID);
return parameters;
}
@Test
public void testQueryCount() {
AuthorizationQuery mockQuery = setUpMockQuery(MockProvider.createMockAuthorizations());
expect().statusCode(Status.OK.getStatusCode())
.body("count", equalTo(3))
.when().get(SERVICE_COUNT_PATH);
verify(mockQuery).count();
}
@Test
public void testSuccessfulPagination() {
AuthorizationQuery mockQuery = setUpMockQuery(MockProvider.createMockAuthorizations());
int firstResult = 0;
int maxResults = 10;
given().queryParam("firstResult", firstResult).queryParam("maxResults", maxResults)
.then().expect().statusCode(Status.OK.getStatusCode())
.when().get(SERVICE_PATH);
verify(mockQuery).listPage(firstResult, maxResults);
}
}
| apache-2.0 |
bestlingna/test | 极客学院任务学习/任务八/server/update.php | 941 | <?php
require_once 'dbconfig.php';
// header("Content-type: application/json;charset=utf-8");
// //连接myqul
// $link = mysqli_connect('localhost','bestlingna','lingnazheng','baidunews',8889);
if($link){
//要发出的数据
$newstitle = $_POST['newstitle'];
$newstype = $_POST['newstype'];
$newsimg = $_POST['newsimg'];
$newstime = $_POST['newstime'];
$newssrc = $_POST['newssrc'];
$newsid =$_POST['id'];
$sql ="UPDATE `news` SET `newstitle`='{$newstitle}',`newstype`='{$newstype}',
`newsimg`='{$newsimg}',`newssrc`='{$newssrc}',`newstime`='{$newstime}' WHERE `id`={$newsid}";
mysqli_query($link,"SET NAMES utf8");
$result = mysqli_query($link, $sql);
// echo json_encode(array('success'=>'ok'));
// echo json_encode(array($sql));
if($result){
echo json_encode(array('success'=>'插入数据成功'));
}else{
echo json_encode(array($sql));
}
}else{
echo json_encode(array('success'=>'未连接数据库'));
}
?> | apache-2.0 |
simondi88/schoolbus | Server/src/SchoolBusAPI/Models/City.cs | 6613 | /*
* REST API Documentation for the MOTI School Bus Application
*
* The School Bus application tracks that inspections are performed in a timely fashion. For each school bus the application tracks information about the bus (including data from ICBC, NSC, etc.), it's past and next inspection dates and results, contacts, and the inspector responsible for next inspecting the bus.
*
* OpenAPI spec version: v1
*
*
*/
using System;
using System.Linq;
using System.IO;
using System.Text;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Runtime.Serialization;
using Newtonsoft.Json;
using System.ComponentModel.DataAnnotations.Schema;
using System.ComponentModel.DataAnnotations;
using SchoolBusAPI.Models;
namespace SchoolBusAPI.Models
{
/// <summary>
/// A list of cities in BC. Authoritative source to be determined.
/// </summary>
[MetaDataExtension (Description = "A list of cities in BC. Authoritative source to be determined.")]
public partial class City : AuditableEntity, IEquatable<City>
{
/// <summary>
/// Default constructor, required by entity framework
/// </summary>
public City()
{
this.Id = 0;
}
/// <summary>
/// Initializes a new instance of the <see cref="City" /> class.
/// </summary>
/// <param name="Id">A system-generated unique identifier for a City (required).</param>
/// <param name="Name">The name of the City.</param>
/// <param name="Province">The name of the province of the city.</param>
public City(int Id, string Name = null, string Province = null)
{
this.Id = Id;
this.Name = Name;
this.Province = Province;
}
/// <summary>
/// A system-generated unique identifier for a City
/// </summary>
/// <value>A system-generated unique identifier for a City</value>
[MetaDataExtension (Description = "A system-generated unique identifier for a City")]
public int Id { get; set; }
/// <summary>
/// The name of the City
/// </summary>
/// <value>The name of the City</value>
[MetaDataExtension (Description = "The name of the City")]
[MaxLength(255)]
public string Name { get; set; }
/// <summary>
/// The name of the province of the city
/// </summary>
/// <value>The name of the province of the city</value>
[MetaDataExtension (Description = "The name of the province of the city")]
[MaxLength(255)]
public string Province { get; set; }
/// <summary>
/// Returns the string presentation of the object
/// </summary>
/// <returns>String presentation of the object</returns>
public override string ToString()
{
var sb = new StringBuilder();
sb.Append("class City {\n");
sb.Append(" Id: ").Append(Id).Append("\n");
sb.Append(" Name: ").Append(Name).Append("\n");
sb.Append(" Province: ").Append(Province).Append("\n");
sb.Append("}\n");
return sb.ToString();
}
/// <summary>
/// Returns the JSON string presentation of the object
/// </summary>
/// <returns>JSON string presentation of the object</returns>
public string ToJson()
{
return JsonConvert.SerializeObject(this, Formatting.Indented);
}
/// <summary>
/// Returns true if objects are equal
/// </summary>
/// <param name="obj">Object to be compared</param>
/// <returns>Boolean</returns>
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) { return false; }
if (ReferenceEquals(this, obj)) { return true; }
if (obj.GetType() != GetType()) { return false; }
return Equals((City)obj);
}
/// <summary>
/// Returns true if City instances are equal
/// </summary>
/// <param name="other">Instance of City to be compared</param>
/// <returns>Boolean</returns>
public bool Equals(City other)
{
if (ReferenceEquals(null, other)) { return false; }
if (ReferenceEquals(this, other)) { return true; }
return
(
this.Id == other.Id ||
this.Id.Equals(other.Id)
) &&
(
this.Name == other.Name ||
this.Name != null &&
this.Name.Equals(other.Name)
) &&
(
this.Province == other.Province ||
this.Province != null &&
this.Province.Equals(other.Province)
);
}
/// <summary>
/// Gets the hash code
/// </summary>
/// <returns>Hash code</returns>
public override int GetHashCode()
{
// credit: http://stackoverflow.com/a/263416/677735
unchecked // Overflow is fine, just wrap
{
int hash = 41;
// Suitable nullity checks
hash = hash * 59 + this.Id.GetHashCode(); if (this.Name != null)
{
hash = hash * 59 + this.Name.GetHashCode();
}
if (this.Province != null)
{
hash = hash * 59 + this.Province.GetHashCode();
}
return hash;
}
}
#region Operators
/// <summary>
/// Equals
/// </summary>
/// <param name="left"></param>
/// <param name="right"></param>
/// <returns></returns>
public static bool operator ==(City left, City right)
{
return Equals(left, right);
}
/// <summary>
/// Not Equals
/// </summary>
/// <param name="left"></param>
/// <param name="right"></param>
/// <returns></returns>
public static bool operator !=(City left, City right)
{
return !Equals(left, right);
}
#endregion Operators
}
}
| apache-2.0 |
googlearchive/android-AutofillFramework | Application/src/main/java/com/example/android/autofill/app/commoncases/StandardAutoCompleteSignInActivity.java | 5355 | /*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.autofill.app.commoncases;
import android.content.Context;
import android.content.Intent;
import android.os.Bundle;
import android.support.annotation.NonNull;
import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.view.View;
import android.view.autofill.AutofillManager;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.TextView;
import android.widget.Toast;
import com.example.android.autofill.app.R;
import com.example.android.autofill.app.WelcomeActivity;
import static com.example.android.autofill.app.Util.TAG;
public class StandardAutoCompleteSignInActivity extends AppCompatActivity {
private AutoCompleteTextView mUsernameAutoCompleteField;
private TextView mPasswordField;
private TextView mLoginButton;
private TextView mClearButton;
private boolean mAutofillReceived = false;
private AutofillManager.AutofillCallback mAutofillCallback;
private AutofillManager mAutofillManager;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.login_with_autocomplete_activity);
mLoginButton = findViewById(R.id.login);
mClearButton = findViewById(R.id.clear);
mUsernameAutoCompleteField = findViewById(R.id.usernameField);
mPasswordField = findViewById(R.id.passwordField);
mLoginButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
login();
}
});
mClearButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
AutofillManager afm = getSystemService(AutofillManager.class);
if (afm != null) {
afm.cancel();
}
resetFields();
}
});
mAutofillCallback = new MyAutofillCallback();
mAutofillManager = getSystemService(AutofillManager.class);
ArrayAdapter<CharSequence> mockAutocompleteAdapter = ArrayAdapter.createFromResource
(this, R.array.mock_autocomplete_sign_in_suggestions,
android.R.layout.simple_dropdown_item_1line);
mUsernameAutoCompleteField.setAdapter(mockAutocompleteAdapter);
}
@Override
protected void onResume() {
super.onResume();
mAutofillManager.registerCallback(mAutofillCallback);
}
@Override
protected void onPause() {
super.onPause();
mAutofillManager.unregisterCallback(mAutofillCallback);
}
private void resetFields() {
mUsernameAutoCompleteField.setText("");
mPasswordField.setText("");
}
/**
* Emulates a login action.
*/
private void login() {
String username = mUsernameAutoCompleteField.getText().toString();
String password = mPasswordField.getText().toString();
boolean valid = isValidCredentials(username, password);
if (valid) {
Intent intent = WelcomeActivity.getStartActivityIntent(StandardAutoCompleteSignInActivity.this);
startActivity(intent);
finish();
} else {
Toast.makeText(this, "Authentication failed.", Toast.LENGTH_SHORT).show();
}
}
/**
* Dummy implementation for demo purposes. A real service should use secure mechanisms to
* authenticate users.
*/
public boolean isValidCredentials(String username, String password) {
return username != null && password != null && username.equals(password);
}
private class MyAutofillCallback extends AutofillManager.AutofillCallback {
@Override
public void onAutofillEvent(@NonNull View view, int event) {
if (view instanceof AutoCompleteTextView) {
switch (event) {
case AutofillManager.AutofillCallback.EVENT_INPUT_UNAVAILABLE:
// no break on purpose
case AutofillManager.AutofillCallback.EVENT_INPUT_HIDDEN:
if (!mAutofillReceived) {
((AutoCompleteTextView) view).showDropDown();
}
break;
case AutofillManager.AutofillCallback.EVENT_INPUT_SHOWN:
mAutofillReceived = true;
((AutoCompleteTextView) view).setAdapter(null);
break;
default:
Log.d(TAG, "Unexpected callback: " + event);
}
}
}
}
} | apache-2.0 |
mdunker/usergrid | stack/core/src/main/java/org/apache/usergrid/batch/service/JobRuntimeService.java | 1614 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.usergrid.batch.service;
import org.apache.usergrid.batch.JobRuntime;
/**
* Methods to allow job executions to interact with the distributed runtime.
*/
public interface JobRuntimeService {
/**
* Perform any heartbeat operations required. Update jobExecution with the appropriate data
*
* @param execution The job execution to update
* @param delay The delay
*/
void heartbeat( JobRuntime execution, long delay );
/**
* Heartbeat with the system defaults. Update jobExecution with the appropriate data
*
* @param execution The execution
*/
void heartbeat( JobRuntime execution );
/**
* Delay this execution.
*
* @param execution the execution to delay
*/
void delay( JobRuntime execution );
}
| apache-2.0 |
myfreecomm/fixofx | lib/ofx/validators.py | 3726 | #coding: utf-8
# Copyright 2005-2010 Wesabe, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ofx.validators - Classes to validate certain financial data types.
#
class RoutingNumber:
def __init__(self, number):
self.number = number
# FIXME: need to make sure we're really getting a number and not any non-number characters.
try:
self.digits = [int(digit) for digit in str(self.number).strip()]
self.region_code = int(str(self.digits[0]) + str(self.digits[1]))
self.converted = True
except ValueError:
# Not a number, failed to convert
self.digits = None
self.region_code = None
self.converted = False
def is_valid(self):
if self.converted is False or len(self.digits) != 9:
return False
checksum = ((self.digits[0] * 3) +
(self.digits[1] * 7) +
self.digits[2] +
(self.digits[3] * 3) +
(self.digits[4] * 7) +
self.digits[5] +
(self.digits[6] * 3) +
(self.digits[7] * 7) +
self.digits[8] )
return (checksum % 10 == 0)
def get_type(self):
# Remember that range() stops one short of the second argument.
# In other words, "x in range(1, 13)" means "x >= 1 and x < 13".
if self.region_code == 0:
return "United States Government"
elif self.region_code in range(1, 13):
return "Primary"
elif self.region_code in range(21, 33):
return "Thrift"
elif self.region_code in range(61, 73):
return "Electronic"
elif self.region_code == 80:
return "Traveller's Cheque"
else:
return None
def get_region(self):
if self.region_code == 0:
return "United States Government"
elif self.region_code in [1, 21, 61]:
return "Boston"
elif self.region_code in [2, 22, 62]:
return "New York"
elif self.region_code in [3, 23, 63]:
return "Philadelphia"
elif self.region_code in [4, 24, 64]:
return "Cleveland"
elif self.region_code in [5, 25, 65]:
return "Richmond"
elif self.region_code in [6, 26, 66]:
return "Atlanta"
elif self.region_code in [7, 27, 67]:
return "Chicago"
elif self.region_code in [8, 28, 68]:
return "St. Louis"
elif self.region_code in [9, 29, 69]:
return "Minneapolis"
elif self.region_code in [10, 30, 70]:
return "Kansas City"
elif self.region_code in [11, 31, 71]:
return "Dallas"
elif self.region_code in [12, 32, 72]:
return "San Francisco"
elif self.region_code == 80:
return "Traveller's Cheque"
else:
return None
def to_s(self):
return str(self.number) + " (valid: %s; type: %s; region: %s)" % \
(self.is_valid(), self.get_type(), self.get_region())
def __repr__(self):
return self.to_s()
| apache-2.0 |
wapalxj/Android_C2_UI | C2_UI_2/chart/src/main/java/com/github/mikephil/charting/formatter/PercentFormatter.java | 1228 |
package com.github.mikephil.charting.formatter;
import com.github.mikephil.charting.components.AxisBase;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.utils.ViewPortHandler;
import java.text.DecimalFormat;
/**
* This IValueFormatter is just for convenience and simply puts a "%" sign after
* each value. (Recommeded for PieChart)
*
* @author Philipp Jahoda
*/
public class PercentFormatter implements IValueFormatter, IAxisValueFormatter
{
protected DecimalFormat mFormat;
public PercentFormatter() {
mFormat = new DecimalFormat("###,###,##0.0");
}
/**
* Allow a custom decimalformat
*
* @param format
*/
public PercentFormatter(DecimalFormat format) {
this.mFormat = format;
}
// IValueFormatter
@Override
public String getFormattedValue(float value, Entry entry, int dataSetIndex, ViewPortHandler viewPortHandler) {
return mFormat.format(value) + " %";
}
// IAxisValueFormatter
@Override
public String getFormattedValue(float value, AxisBase axis) {
return mFormat.format(value) + " %";
}
@Override
public int getDecimalDigits() {
return 1;
}
}
| apache-2.0 |
bryanriddle/ha-jobs | ha-jobs-core/src/main/scala/de/kaufhof/hajobs/package.scala | 886 | package de.kaufhof
import org.slf4j.LoggerFactory._
import scala.concurrent.{ExecutionContext, Future}
package object hajobs {
private val logger = getLogger("de.kaufhof.hajobs")
type Jobs = Map[JobType, Job]
// scalastyle:off method.name We allow an upper case method name to mimic a Jobs apply method
def Jobs(jobs: Seq[Job]): Jobs = jobs.map(s => s.jobType -> s).toMap
// scalastyle:on
/**
* Tries function max n times.
*/
final def retry[T](n: Int, description: String)(fn: => Future[T])(implicit ec: ExecutionContext): Future[T] = {
fn.recoverWith {
case e if n > 1 =>
logger.warn(s"$description failed, trying again (${n - 1} attempts left)", e)
retry(n - 1, description)(fn)
case e: Throwable =>
logger.warn(s"All retries for $description failed, returning error.", e)
Future.failed[T](e)
}
}
}
| apache-2.0 |
scala-js/scala-js | junit-test/shared/src/test/scala/org/scalajs/junit/AsyncTest.scala | 987 | /*
* Scala.js (https://www.scala-js.org/)
*
* Copyright EPFL.
*
* Licensed under Apache License 2.0
* (https://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package org.scalajs.junit
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
import org.junit.Assert._
import org.junit.Test
import org.scalajs.junit.utils._
import org.scalajs.junit.async._
class AsyncTest {
@Test
def success(): AsyncResult = await {
Future(1 + 1).filter(_ == 2)
}
@Test(expected = classOf[IllegalArgumentException])
def expectedException(): AsyncResult = await {
// Do not throw synchronously.
Future.failed(new IllegalArgumentException)
}
@Test
def asyncFailure(): AsyncResult = await {
// Do not throw synchronously.
Future.failed(new IllegalArgumentException)
}
}
class AsyncTestAssertions extends JUnitTest
| apache-2.0 |
plxaye/chromium | src/chrome/browser/policy/async_policy_provider.cc | 4970 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/policy/async_policy_provider.h"
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/message_loop.h"
#include "base/message_loop_proxy.h"
#include "chrome/browser/policy/async_policy_loader.h"
#include "chrome/browser/policy/policy_bundle.h"
#include "content/public/browser/browser_thread.h"
using content::BrowserThread;
namespace policy {
AsyncPolicyProvider::AsyncPolicyProvider(scoped_ptr<AsyncPolicyLoader> loader)
: loader_(loader.release()),
ALLOW_THIS_IN_INITIALIZER_LIST(weak_factory_(this)) {
// Make an immediate synchronous load on startup.
OnLoaderReloaded(loader_->InitialLoad());
}
AsyncPolicyProvider::~AsyncPolicyProvider() {
DCHECK(CalledOnValidThread());
// Shutdown() must have been called before.
DCHECK(!loader_);
}
void AsyncPolicyProvider::Init() {
DCHECK(CalledOnValidThread());
ConfigurationPolicyProvider::Init();
if (!loader_)
return;
AsyncPolicyLoader::UpdateCallback callback =
base::Bind(&AsyncPolicyProvider::LoaderUpdateCallback,
base::MessageLoopProxy::current(),
weak_factory_.GetWeakPtr());
bool post = BrowserThread::PostTask(
BrowserThread::FILE, FROM_HERE,
base::Bind(&AsyncPolicyLoader::Init,
base::Unretained(loader_),
callback));
DCHECK(post) << "AsyncPolicyProvider::Init() called with threads not running";
}
void AsyncPolicyProvider::Shutdown() {
DCHECK(CalledOnValidThread());
// Note on the lifetime of |loader_|:
// The |loader_| lives on the FILE thread, and is deleted from here. This
// means that posting tasks on the |loader_| to FILE from the
// AsyncPolicyProvider is always safe, since a potential DeleteSoon() is only
// posted from here. The |loader_| posts back to the AsyncPolicyProvider
// through the |update_callback_|, which has a WeakPtr to |this|.
if (!BrowserThread::DeleteSoon(BrowserThread::FILE, FROM_HERE, loader_)) {
// The FILE thread doesn't exist; this only happens on unit tests.
delete loader_;
}
loader_ = NULL;
ConfigurationPolicyProvider::Shutdown();
}
void AsyncPolicyProvider::RefreshPolicies() {
DCHECK(CalledOnValidThread());
// Subtle: RefreshPolicies() has a contract that requires the next policy
// update notification (triggered from UpdatePolicy()) to reflect any changes
// made before this call. So if a caller has modified the policy settings and
// invoked RefreshPolicies(), then by the next notification these policies
// should already be provided.
// However, it's also possible that an asynchronous Reload() is in progress
// and just posted OnLoaderReloaded(). Therefore a task is posted to the
// FILE thread before posting the next Reload, to prevent a potential
// concurrent Reload() from triggering a notification too early. If another
// refresh task has been posted, it is invalidated now.
refresh_callback_.Reset(
base::Bind(&AsyncPolicyProvider::ReloadAfterRefreshSync,
base::Unretained(this)));
BrowserThread::PostTaskAndReply(
BrowserThread::FILE, FROM_HERE,
base::Bind(base::DoNothing),
refresh_callback_.callback());
}
void AsyncPolicyProvider::ReloadAfterRefreshSync() {
DCHECK(CalledOnValidThread());
// This task can only enter if it was posted from RefreshPolicies(), and it
// hasn't been cancelled meanwhile by another call to RefreshPolicies().
DCHECK(!refresh_callback_.IsCancelled());
// There can't be another refresh callback pending now, since its creation
// in RefreshPolicies() would have cancelled the current execution. So it's
// safe to cancel the |refresh_callback_| now, so that OnLoaderReloaded()
// sees that there is no refresh pending.
refresh_callback_.Cancel();
if (!loader_)
return;
BrowserThread::PostTask(
BrowserThread::FILE, FROM_HERE,
base::Bind(&AsyncPolicyLoader::Reload,
base::Unretained(loader_),
true /* force */));
}
void AsyncPolicyProvider::OnLoaderReloaded(scoped_ptr<PolicyBundle> bundle) {
DCHECK(CalledOnValidThread());
// Only propagate policy updates if there are no pending refreshes, and if
// Shutdown() hasn't been called yet.
if (refresh_callback_.IsCancelled() && loader_)
UpdatePolicy(bundle.Pass());
}
// static
void AsyncPolicyProvider::LoaderUpdateCallback(
scoped_refptr<base::MessageLoopProxy> loop,
base::WeakPtr<AsyncPolicyProvider> weak_this,
scoped_ptr<PolicyBundle> bundle) {
DCHECK(BrowserThread::CurrentlyOn(BrowserThread::FILE));
loop->PostTask(FROM_HERE,
base::Bind(&AsyncPolicyProvider::OnLoaderReloaded,
weak_this,
base::Passed(&bundle)));
}
} // namespace policy
| apache-2.0 |
samrobin/database-enterprise | pages/js/easyui/jquery.edatagrid.js | 11998 | /**
* edatagrid - jQuery EasyUI
*
* Licensed under the GPL:
* http://www.gnu.org/licenses/gpl.txt
*
* Copyright 2011 stworthy [ stworthy@gmail.com ]
*
* Dependencies:
* datagrid
* messager
*
*/
(function($){
// var oldLoadDataMethod = $.fn.datagrid.methods.loadData;
// $.fn.datagrid.methods.loadData = function(jq, data){
// jq.each(function(){
// $.data(this, 'datagrid').filterSource = null;
// });
// return oldLoadDataMethod.call($.fn.datagrid.methods, jq, data);
// };
var currTarget;
$(function(){
$(document).unbind('.edatagrid').bind('mousedown.edatagrid', function(e){
var p = $(e.target).closest('div.datagrid-view,div.combo-panel');
if (p.length){
if (p.hasClass('datagrid-view')){
var dg = p.children('table');
if (dg.length && currTarget != dg[0]){
_save();
}
}
return;
}
_save();
function _save(){
var dg = $(currTarget);
if (dg.length){
dg.edatagrid('saveRow');
currTarget = undefined;
}
}
});
});
function buildGrid(target){
var opts = $.data(target, 'edatagrid').options;
$(target).datagrid($.extend({}, opts, {
onDblClickCell:function(index,field,value){
if (opts.editing){
$(this).edatagrid('editRow', index);
focusEditor(field);
}
if (opts.onDblClickCell){
opts.onDblClickCell.call(target, index, field, value);
}
},
onClickCell:function(index,field,value){
if (opts.editing && opts.editIndex >= 0){
$(this).edatagrid('editRow', index);
focusEditor(field);
}
if (opts.onClickCell){
opts.onClickCell.call(target, index, field, value);
}
},
onAfterEdit: function(index, row){
opts.editIndex = -1;
var url = row.isNewRecord ? opts.saveUrl : opts.updateUrl;
if (url){
$.post(url, row, function(data){
if (data.isError){
$(target).edatagrid('cancelRow',index);
$(target).edatagrid('selectRow',index);
$(target).edatagrid('editRow',index);
opts.onError.call(target, index, data);
return;
}
data.isNewRecord = null;
$(target).datagrid('updateRow', {
index: index,
row: data
});
if (opts.tree){
var idValue = row[opts.idField||'id'];
var t = $(opts.tree);
var node = t.tree('find', idValue);
if (node){
node.text = row[opts.treeTextField];
t.tree('update', node);
} else {
var pnode = t.tree('find', row[opts.treeParentField]);
t.tree('append', {
parent: (pnode ? pnode.target : null),
data: [{id:idValue,text:row[opts.treeTextField]}]
});
}
}
opts.onSave.call(target, index, row);
},'json');
} else {
opts.onSave.call(target, index, row);
}
if (opts.onAfterEdit) opts.onAfterEdit.call(target, index, row);
},
onCancelEdit: function(index, row){
opts.editIndex = -1;
if (row.isNewRecord) {
$(this).datagrid('deleteRow', index);
}
if (opts.onCancelEdit) opts.onCancelEdit.call(target, index, row);
},
onBeforeLoad: function(param){
if (opts.onBeforeLoad.call(target, param) == false){return false}
$(this).edatagrid('cancelRow');
if (opts.tree){
var node = $(opts.tree).tree('getSelected');
param[opts.treeParentField] = node ? node.id : undefined;
}
}
}));
function focusEditor(field){
var editor = $(target).datagrid('getEditor', {index:opts.editIndex,field:field});
if (editor){
editor.target.focus();
} else {
var editors = $(target).datagrid('getEditors', opts.editIndex);
if (editors.length){
editors[0].target.focus();
}
}
}
if (opts.tree){
$(opts.tree).tree({
url: opts.treeUrl,
onClick: function(node){
$(target).datagrid('load');
},
onDrop: function(dest,source,point){
var targetId = $(this).tree('getNode', dest).id;
$.ajax({
url: opts.treeDndUrl,
type:'post',
data:{
id:source.id,
targetId:targetId,
point:point
},
dataType:'json',
success:function(){
$(target).datagrid('load');
}
});
}
});
}
}
$.fn.edatagrid = function(options, param){
if (typeof options == 'string'){
var method = $.fn.edatagrid.methods[options];
if (method){
return method(this, param);
} else {
return this.datagrid(options, param);
}
}
options = options || {};
return this.each(function(){
var state = $.data(this, 'edatagrid');
if (state){
$.extend(state.options, options);
} else {
$.data(this, 'edatagrid', {
options: $.extend({}, $.fn.edatagrid.defaults, $.fn.edatagrid.parseOptions(this), options)
});
}
buildGrid(this);
});
};
$.fn.edatagrid.parseOptions = function(target){
return $.extend({}, $.fn.datagrid.parseOptions(target), {
});
};
$.fn.edatagrid.methods = {
options: function(jq){
var opts = $.data(jq[0], 'edatagrid').options;
return opts;
},
loadData: function(jq, data){
return jq.each(function(){
$(this).edatagrid('cancelRow');
$(this).datagrid('loadData', data);
});
},
enableEditing: function(jq){
return jq.each(function(){
var opts = $.data(this, 'edatagrid').options;
opts.editing = true;
});
},
disableEditing: function(jq){
return jq.each(function(){
var opts = $.data(this, 'edatagrid').options;
opts.editing = false;
});
},
editRow: function(jq, index){
return jq.each(function(){
var dg = $(this);
var opts = $.data(this, 'edatagrid').options;
var editIndex = opts.editIndex;
if (editIndex != index){
if (dg.datagrid('validateRow', editIndex)){
if (editIndex>=0){
if (opts.onBeforeSave.call(this, editIndex) == false) {
setTimeout(function(){
dg.datagrid('selectRow', editIndex);
},0);
return;
}
}
dg.datagrid('endEdit', editIndex);
dg.datagrid('beginEdit', index);
opts.editIndex = index;
if (currTarget != this && $(currTarget).length){
$(currTarget).edatagrid('saveRow');
currTarget = undefined;
}
if (opts.autoSave){
currTarget = this;
}
var rows = dg.datagrid('getRows');
opts.onEdit.call(this, index, rows[index]);
} else {
setTimeout(function(){
dg.datagrid('selectRow', editIndex);
}, 0);
}
}
});
},
addRow: function(jq, index){
return jq.each(function(){
var dg = $(this);
var opts = $.data(this, 'edatagrid').options;
if (opts.editIndex >= 0){
if (!dg.datagrid('validateRow', opts.editIndex)){
dg.datagrid('selectRow', opts.editIndex);
return;
}
if (opts.onBeforeSave.call(this, opts.editIndex) == false){
setTimeout(function(){
dg.datagrid('selectRow', opts.editIndex);
},0);
return;
}
dg.datagrid('endEdit', opts.editIndex);
}
var rows = dg.datagrid('getRows');
function _add(index, row){
if (index == undefined){
dg.datagrid('appendRow', row);
opts.editIndex = rows.length - 1;
} else {
dg.datagrid('insertRow', {index:index,row:row});
opts.editIndex = index;
}
}
if (typeof index == 'object'){
_add(index.index, $.extend(index.row, {isNewRecord:true}))
} else {
_add(index, {isNewRecord:true});
}
// if (index == undefined){
// dg.datagrid('appendRow', {isNewRecord:true});
// opts.editIndex = rows.length - 1;
// } else {
// dg.datagrid('insertRow', {
// index: index,
// row: {isNewRecord:true}
// });
// opts.editIndex = index;
// }
dg.datagrid('beginEdit', opts.editIndex);
dg.datagrid('selectRow', opts.editIndex);
if (opts.tree){
var node = $(opts.tree).tree('getSelected');
rows[opts.editIndex][opts.treeParentField] = (node ? node.id : 0);
}
opts.onAdd.call(this, opts.editIndex, rows[opts.editIndex]);
});
},
saveRow: function(jq){
return jq.each(function(){
var dg = $(this);
var opts = $.data(this, 'edatagrid').options;
if (opts.editIndex >= 0){
if (opts.onBeforeSave.call(this, opts.editIndex) == false) {
setTimeout(function(){
dg.datagrid('selectRow', opts.editIndex);
},0);
return;
}
$(this).datagrid('endEdit', opts.editIndex);
}
});
},
cancelRow: function(jq){
return jq.each(function(){
var opts = $.data(this, 'edatagrid').options;
if (opts.editIndex >= 0){
$(this).datagrid('cancelEdit', opts.editIndex);
}
});
},
destroyRow: function(jq, index){
return jq.each(function(){
var dg = $(this);
var opts = $.data(this, 'edatagrid').options;
var rows = [];
if (index == undefined){
rows = dg.datagrid('getSelections');
} else {
var rowIndexes = $.isArray(index) ? index : [index];
for(var i=0; i<rowIndexes.length; i++){
var row = opts.finder.getRow(this, rowIndexes[i]);
if (row){
rows.push(row);
}
}
}
if (!rows.length){
$.messager.show({
title: opts.destroyMsg.norecord.title,
msg: opts.destroyMsg.norecord.msg
});
return;
}
$.messager.confirm(opts.destroyMsg.confirm.title,opts.destroyMsg.confirm.msg,function(r){
if (r){
for(var i=0; i<rows.length; i++){
_del(rows[i]);
}
dg.datagrid('clearSelections');
}
});
function _del(row){
var index = dg.datagrid('getRowIndex', row);
if (index == -1){return}
if (row.isNewRecord){
dg.datagrid('cancelEdit', index);
} else {
if (opts.destroyUrl){
var idValue = row[opts.idField||'id'];
$.post(opts.destroyUrl, {id:idValue}, function(data){
var index = dg.datagrid('getRowIndex', idValue);
if (data.isError){
dg.datagrid('selectRow', index);
opts.onError.call(dg[0], index, data);
return;
}
if (opts.tree){
dg.datagrid('reload');
var t = $(opts.tree);
var node = t.tree('find', idValue);
if (node){
t.tree('remove', node.target);
}
} else {
dg.datagrid('cancelEdit', index);
dg.datagrid('deleteRow', index);
}
opts.onDestroy.call(dg[0], index, row);
var pager = dg.datagrid('getPager');
if (pager.length && !dg.datagrid('getRows').length){
dg.datagrid('options').pageNumber = pager.pagination('options').pageNumber;
dg.datagrid('reload');
}
}, 'json');
} else {
dg.datagrid('cancelEdit', index);
dg.datagrid('deleteRow', index);
opts.onDestroy.call(dg[0], index, row);
}
}
}
});
}
};
$.fn.edatagrid.defaults = $.extend({}, $.fn.datagrid.defaults, {
singleSelect: true,
editing: true,
editIndex: -1,
destroyMsg:{
norecord:{
title:'Warning',
msg:'No record is selected.'
},
confirm:{
title:'Confirm',
msg:'Are you sure you want to delete?'
}
},
// destroyConfirmTitle: 'Confirm',
// destroyConfirmMsg: 'Are you sure you want to delete?',
autoSave: false, // auto save the editing row when click out of datagrid
url: null, // return the datagrid data
saveUrl: null, // return the added row
updateUrl: null, // return the updated row
destroyUrl: null, // return {success:true}
tree: null, // the tree selector
treeUrl: null, // return tree data
treeDndUrl: null, // to process the drag and drop operation, return {success:true}
treeTextField: 'name',
treeParentField: 'parentId',
onAdd: function(index, row){},
onEdit: function(index, row){},
onBeforeSave: function(index){},
onSave: function(index, row){},
onDestroy: function(index, row){},
onError: function(index, row){}
});
////////////////////////////////
$.parser.plugins.push('edatagrid');
})(jQuery); | apache-2.0 |
sshrdp/mclab | lib/antlr-3.0.1/src/org/antlr/tool/RecursionOverflowMessage.java | 2991 | /*
[The "BSD licence"]
Copyright (c) 2005-2006 Terence Parr
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.tool;
import org.antlr.stringtemplate.StringTemplate;
import org.antlr.analysis.*;
import antlr.Token;
import java.util.*;
/** Indicates recursion overflow. A DFA state tried add an NFA configuration
* with NFA state p that was mentioned in its stack context too many times.
*/
public class RecursionOverflowMessage extends Message {
public DecisionProbe probe;
public DFAState sampleBadState;
public int alt;
public Collection targetRules;
public Collection callSiteStates;
public RecursionOverflowMessage(DecisionProbe probe,
DFAState sampleBadState,
int alt,
Collection targetRules,
Collection callSiteStates)
{
super(ErrorManager.MSG_RECURSION_OVERLOW);
this.probe = probe;
this.sampleBadState = sampleBadState;
this.alt = alt;
this.targetRules = targetRules;
this.callSiteStates = callSiteStates;
}
public String toString() {
GrammarAST decisionASTNode = probe.dfa.getDecisionASTNode();
line = decisionASTNode.getLine();
column = decisionASTNode.getColumn();
String fileName = probe.dfa.nfa.grammar.getFileName();
if ( fileName!=null ) {
file = fileName;
}
StringTemplate st = getMessageTemplate();
st.setAttribute("targetRules", targetRules);
st.setAttribute("alt", alt);
st.setAttribute("callSiteStates", callSiteStates);
List labels =
probe.getSampleNonDeterministicInputSequence(sampleBadState);
String input = probe.getInputSequenceDisplay(labels);
st.setAttribute("input", input);
return super.toString(st);
}
}
| apache-2.0 |
LaphiLee/bi-platform | queryrouter/src/main/java/com/baidu/rigel/biplatform/queryrouter/queryplugin/plugins/model/ColumnCondition.java | 2388 | /**
* Copyright (c) 2014 Baidu, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
*
*/
package com.baidu.rigel.biplatform.queryrouter.queryplugin.plugins.model;
import com.baidu.rigel.biplatform.ac.query.model.MetaCondition;
import com.baidu.rigel.biplatform.ac.query.model.SQLCondition;
/**
* 指标条件
*
* @author luowenlei
*
*/
public class ColumnCondition implements MetaCondition {
/**
* serialVersionUID
*/
private static final long serialVersionUID = 5059328459757316603L;
/**
* metaName 元数据的name
*/
private String metaName;
/**
* columnConditions 列条件
*/
private SQLCondition columnConditions;
/**
* metaType
*/
private MetaType metaType;
/**
* construct with metaUniqueName
*
* @param metaName meta unique name
*/
public ColumnCondition(String metaName) {
this.metaName = metaName;
}
public ColumnCondition() {
}
/*
* (non-Javadoc)
*
* @see com.baidu.rigel.biplatform.ac.query.model.MetaCondition#getMetaUniqueName ()
*/
@Override
public String getMetaName() {
return metaName;
}
/**
* getter method for property columnConditions
*
* @return the columnConditions
*/
public SQLCondition getColumnConditions() {
return columnConditions;
}
/**
* setter method for property columnConditions
*
* @param ColumnConditions the ColumnConditions to set
*/
public void setColumnConditions(SQLCondition columnConditions) {
this.columnConditions = columnConditions;
}
@Override
public MetaType getMetaType() {
return metaType;
}
}
| apache-2.0 |
raehalme/keycloak | examples/providers/federation-provider/src/main/java/org/keycloak/examples/federation/properties/ReadonlyUserModelProxy.java | 1290 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.examples.federation.properties;
import org.keycloak.models.UserModel;
import org.keycloak.models.utils.UserModelDelegate;
/**
* Readonly proxy for a UserModel that prevents passwords from being updated.
*
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @version $Revision: 1 $
*/
public class ReadonlyUserModelProxy extends UserModelDelegate {
public ReadonlyUserModelProxy(UserModel delegate) {
super(delegate);
}
@Override
public void setUsername(String username) {
throw new IllegalStateException("Username is readonly");
}
}
| apache-2.0 |
datanucleus/tests | jdo/json/src/test/org/json/JSONArray.java | 29103 | package org.json;
/*
Copyright (c) 2002 JSON.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
The Software shall be used for Good, not Evil.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
import java.io.IOException;
import java.io.Writer;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Map;
/**
* A JSONArray is an ordered sequence of values. Its external text form is a
* string wrapped in square brackets with commas separating the values. The
* internal form is an object having <code>get</code> and <code>opt</code>
* methods for accessing the values by index, and <code>put</code> methods for
* adding or replacing values. The values can be any of these types:
* <code>Boolean</code>, <code>JSONArray</code>, <code>JSONObject</code>,
* <code>Number</code>, <code>String</code>, or the
* <code>JSONObject.NULL object</code>.
* <p>
* The constructor can convert a JSON text into a Java object. The
* <code>toString</code> method converts to JSON text.
* <p>
* A <code>get</code> method returns a value if one can be found, and throws an
* exception if one cannot be found. An <code>opt</code> method returns a
* default value instead of throwing an exception, and so is useful for
* obtaining optional values.
* <p>
* The generic <code>get()</code> and <code>opt()</code> methods return an
* object which you can cast or query for type. There are also typed
* <code>get</code> and <code>opt</code> methods that do type checking and type
* coersion for you.
* <p>
* The texts produced by the <code>toString</code> methods strictly conform to
* JSON syntax rules. The constructors are more forgiving in the texts they will
* accept:
* <ul>
* <li>An extra <code>,</code> <small>(comma)</small> may appear just
* before the closing bracket.</li>
* <li>The <code>null</code> value will be inserted when there
* is <code>,</code> <small>(comma)</small> elision.</li>
* <li>Strings may be quoted with <code>'</code> <small>(single
* quote)</small>.</li>
* <li>Strings do not need to be quoted at all if they do not begin with a quote
* or single quote, and if they do not contain leading or trailing spaces,
* and if they do not contain any of these characters:
* <code>{ } [ ] / \ : , = ; #</code> and if they do not look like numbers
* and if they are not the reserved words <code>true</code>,
* <code>false</code>, or <code>null</code>.</li>
* <li>Values can be separated by <code>;</code> <small>(semicolon)</small> as
* well as by <code>,</code> <small>(comma)</small>.</li>
* <li>Numbers may have the <code>0-</code> <small>(octal)</small> or
* <code>0x-</code> <small>(hex)</small> prefix.</li>
* <li>Comments written in the slashshlash, slashstar, and hash conventions
* will be ignored.</li>
* </ul>
* @author JSON.org
* @version 2
*/
public class JSONArray {
/**
* The arrayList where the JSONArray's properties are kept.
*/
private ArrayList myArrayList;
/**
* Construct an empty JSONArray.
*/
public JSONArray() {
this.myArrayList = new ArrayList();
}
/**
* Construct a JSONArray from a JSONTokener.
* @param x A JSONTokener
* @throws JSONException If there is a syntax error.
*/
public JSONArray(JSONTokener x) throws JSONException {
this();
char c = x.nextClean();
char q;
if (c == '[') {
q = ']';
} else if (c == '(') {
q = ')';
} else {
throw x.syntaxError("A JSONArray text must start with '['");
}
if (x.nextClean() == ']') {
return;
}
x.back();
for (;;) {
if (x.nextClean() == ',') {
x.back();
this.myArrayList.add(null);
} else {
x.back();
this.myArrayList.add(x.nextValue());
}
c = x.nextClean();
switch (c) {
case ';':
case ',':
if (x.nextClean() == ']') {
return;
}
x.back();
break;
case ']':
case ')':
if (q != c) {
throw x.syntaxError("Expected a '" + new Character(q) + "'");
}
return;
default:
throw x.syntaxError("Expected a ',' or ']'");
}
}
}
/**
* Construct a JSONArray from a source JSON text.
* @param source A string that begins with
* <code>[</code> <small>(left bracket)</small>
* and ends with <code>]</code> <small>(right bracket)</small>.
* @throws JSONException If there is a syntax error.
*/
public JSONArray(String source) throws JSONException {
this(new JSONTokener(source));
}
/**
* Construct a JSONArray from a Collection.
* @param collection A Collection.
*/
public JSONArray(Collection collection) {
this.myArrayList = (collection == null) ?
new ArrayList() :
new ArrayList(collection);
}
/**
* Construct a JSONArray from an array
* @throws JSONException If not an array.
*/
public JSONArray(Object array) throws JSONException {
this();
if (array.getClass().isArray()) {
int length = Array.getLength(array);
for (int i = 0; i < length; i += 1) {
this.put(Array.get(array, i));
}
} else {
throw new JSONException("JSONArray initial value should be a string or collection or array.");
}
}
/**
* Get the object value associated with an index.
* @param index
* The index must be between 0 and length() - 1.
* @return An object value.
* @throws JSONException If there is no value for the index.
*/
public Object get(int index) throws JSONException {
Object o = opt(index);
if (o == null) {
throw new JSONException("JSONArray[" + index + "] not found.");
}
return o;
}
/**
* Get the boolean value associated with an index.
* The string values "true" and "false" are converted to boolean.
*
* @param index The index must be between 0 and length() - 1.
* @return The truth.
* @throws JSONException If there is no value for the index or if the
* value is not convertable to boolean.
*/
public boolean getBoolean(int index) throws JSONException {
Object o = get(index);
if (o.equals(Boolean.FALSE) ||
(o instanceof String &&
((String)o).equalsIgnoreCase("false"))) {
return false;
} else if (o.equals(Boolean.TRUE) ||
(o instanceof String &&
((String)o).equalsIgnoreCase("true"))) {
return true;
}
throw new JSONException("JSONArray[" + index + "] is not a Boolean.");
}
/**
* Get the double value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
* @throws JSONException If the key is not found or if the value cannot
* be converted to a number.
*/
public double getDouble(int index) throws JSONException {
Object o = get(index);
try {
return o instanceof Number ?
((Number)o).doubleValue() :
Double.valueOf((String)o).doubleValue();
} catch (Exception e) {
throw new JSONException("JSONArray[" + index +
"] is not a number.");
}
}
/**
* Get the int value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
* @throws JSONException If the key is not found or if the value cannot
* be converted to a number.
* if the value cannot be converted to a number.
*/
public int getInt(int index) throws JSONException {
Object o = get(index);
return o instanceof Number ?
((Number)o).intValue() : (int)getDouble(index);
}
/**
* Get the JSONArray associated with an index.
* @param index The index must be between 0 and length() - 1.
* @return A JSONArray value.
* @throws JSONException If there is no value for the index. or if the
* value is not a JSONArray
*/
public JSONArray getJSONArray(int index) throws JSONException {
Object o = get(index);
if (o instanceof JSONArray) {
return (JSONArray)o;
}
throw new JSONException("JSONArray[" + index +
"] is not a JSONArray.");
}
/**
* Get the JSONObject associated with an index.
* @param index subscript
* @return A JSONObject value.
* @throws JSONException If there is no value for the index or if the
* value is not a JSONObject
*/
public JSONObject getJSONObject(int index) throws JSONException {
Object o = get(index);
if (o instanceof JSONObject) {
return (JSONObject)o;
}
throw new JSONException("JSONArray[" + index +
"] is not a JSONObject.");
}
/**
* Get the long value associated with an index.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
* @throws JSONException If the key is not found or if the value cannot
* be converted to a number.
*/
public long getLong(int index) throws JSONException {
Object o = get(index);
return o instanceof Number ?
((Number)o).longValue() : (long)getDouble(index);
}
/**
* Get the string associated with an index.
* @param index The index must be between 0 and length() - 1.
* @return A string value.
* @throws JSONException If there is no value for the index.
*/
public String getString(int index) throws JSONException {
return get(index).toString();
}
/**
* Determine if the value is null.
* @param index The index must be between 0 and length() - 1.
* @return true if the value at the index is null, or if there is no value.
*/
public boolean isNull(int index) {
return JSONObject.NULL.equals(opt(index));
}
/**
* Make a string from the contents of this JSONArray. The
* <code>separator</code> string is inserted between each element.
* Warning: This method assumes that the data structure is acyclical.
* @param separator A string that will be inserted between the elements.
* @return a string.
* @throws JSONException If the array contains an invalid number.
*/
public String join(String separator) throws JSONException {
int len = length();
StringBuffer sb = new StringBuffer();
for (int i = 0; i < len; i += 1) {
if (i > 0) {
sb.append(separator);
}
sb.append(JSONObject.valueToString(this.myArrayList.get(i)));
}
return sb.toString();
}
/**
* Get the number of elements in the JSONArray, included nulls.
*
* @return The length (or size).
*/
public int length() {
return this.myArrayList.size();
}
/**
* Get the optional object value associated with an index.
* @param index The index must be between 0 and length() - 1.
* @return An object value, or null if there is no
* object at that index.
*/
public Object opt(int index) {
return (index < 0 || index >= length()) ?
null : this.myArrayList.get(index);
}
/**
* Get the optional boolean value associated with an index.
* It returns false if there is no value at that index,
* or if the value is not Boolean.TRUE or the String "true".
*
* @param index The index must be between 0 and length() - 1.
* @return The truth.
*/
public boolean optBoolean(int index) {
return optBoolean(index, false);
}
/**
* Get the optional boolean value associated with an index.
* It returns the defaultValue if there is no value at that index or if
* it is not a Boolean or the String "true" or "false" (case insensitive).
*
* @param index The index must be between 0 and length() - 1.
* @param defaultValue A boolean default.
* @return The truth.
*/
public boolean optBoolean(int index, boolean defaultValue) {
try {
return getBoolean(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional double value associated with an index.
* NaN is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
*/
public double optDouble(int index) {
return optDouble(index, Double.NaN);
}
/**
* Get the optional double value associated with an index.
* The defaultValue is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index subscript
* @param defaultValue The default value.
* @return The value.
*/
public double optDouble(int index, double defaultValue) {
try {
return getDouble(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional int value associated with an index.
* Zero is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
*/
public int optInt(int index) {
return optInt(index, 0);
}
/**
* Get the optional int value associated with an index.
* The defaultValue is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
* @param index The index must be between 0 and length() - 1.
* @param defaultValue The default value.
* @return The value.
*/
public int optInt(int index, int defaultValue) {
try {
return getInt(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional JSONArray associated with an index.
* @param index subscript
* @return A JSONArray value, or null if the index has no value,
* or if the value is not a JSONArray.
*/
public JSONArray optJSONArray(int index) {
Object o = opt(index);
return o instanceof JSONArray ? (JSONArray)o : null;
}
/**
* Get the optional JSONObject associated with an index.
* Null is returned if the key is not found, or null if the index has
* no value, or if the value is not a JSONObject.
*
* @param index The index must be between 0 and length() - 1.
* @return A JSONObject value.
*/
public JSONObject optJSONObject(int index) {
Object o = opt(index);
return o instanceof JSONObject ? (JSONObject)o : null;
}
/**
* Get the optional long value associated with an index.
* Zero is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
*
* @param index The index must be between 0 and length() - 1.
* @return The value.
*/
public long optLong(int index) {
return optLong(index, 0);
}
/**
* Get the optional long value associated with an index.
* The defaultValue is returned if there is no value for the index,
* or if the value is not a number and cannot be converted to a number.
* @param index The index must be between 0 and length() - 1.
* @param defaultValue The default value.
* @return The value.
*/
public long optLong(int index, long defaultValue) {
try {
return getLong(index);
} catch (Exception e) {
return defaultValue;
}
}
/**
* Get the optional string value associated with an index. It returns an
* empty string if there is no value at that index. If the value
* is not a string and is not null, then it is coverted to a string.
*
* @param index The index must be between 0 and length() - 1.
* @return A String value.
*/
public String optString(int index) {
return optString(index, "");
}
/**
* Get the optional string associated with an index.
* The defaultValue is returned if the key is not found.
*
* @param index The index must be between 0 and length() - 1.
* @param defaultValue The default value.
* @return A String value.
*/
public String optString(int index, String defaultValue) {
Object o = opt(index);
return o != null ? o.toString() : defaultValue;
}
/**
* Append a boolean value. This increases the array's length by one.
*
* @param value A boolean value.
* @return this.
*/
public JSONArray put(boolean value) {
put(value ? Boolean.TRUE : Boolean.FALSE);
return this;
}
/**
* Put a value in the JSONArray, where the value will be a
* JSONArray which is produced from a Collection.
* @param value A Collection value.
* @return this.
*/
public JSONArray put(Collection value) {
put(new JSONArray(value));
return this;
}
/**
* Append a double value. This increases the array's length by one.
*
* @param value A double value.
* @throws JSONException if the value is not finite.
* @return this.
*/
public JSONArray put(double value) throws JSONException {
Double d = new Double(value);
JSONObject.testValidity(d);
put(d);
return this;
}
/**
* Append an int value. This increases the array's length by one.
*
* @param value An int value.
* @return this.
*/
public JSONArray put(int value) {
put(new Integer(value));
return this;
}
/**
* Append an long value. This increases the array's length by one.
*
* @param value A long value.
* @return this.
*/
public JSONArray put(long value) {
put(new Long(value));
return this;
}
/**
* Put a value in the JSONArray, where the value will be a
* JSONObject which is produced from a Map.
* @param value A Map value.
* @return this.
*/
public JSONArray put(Map value) {
put(new JSONObject(value));
return this;
}
/**
* Append an object value. This increases the array's length by one.
* @param value An object value. The value should be a
* Boolean, Double, Integer, JSONArray, JSONObject, Long, or String, or the
* JSONObject.NULL object.
* @return this.
*/
public JSONArray put(Object value) {
this.myArrayList.add(value);
return this;
}
/**
* Put or replace a boolean value in the JSONArray. If the index is greater
* than the length of the JSONArray, then null elements will be added as
* necessary to pad it out.
* @param index The subscript.
* @param value A boolean value.
* @return this.
* @throws JSONException If the index is negative.
*/
public JSONArray put(int index, boolean value) throws JSONException {
put(index, value ? Boolean.TRUE : Boolean.FALSE);
return this;
}
/**
* Put a value in the JSONArray, where the value will be a
* JSONArray which is produced from a Collection.
* @param index The subscript.
* @param value A Collection value.
* @return this.
* @throws JSONException If the index is negative or if the value is
* not finite.
*/
public JSONArray put(int index, Collection value) throws JSONException {
put(index, new JSONArray(value));
return this;
}
/**
* Put or replace a double value. If the index is greater than the length of
* the JSONArray, then null elements will be added as necessary to pad
* it out.
* @param index The subscript.
* @param value A double value.
* @return this.
* @throws JSONException If the index is negative or if the value is
* not finite.
*/
public JSONArray put(int index, double value) throws JSONException {
put(index, new Double(value));
return this;
}
/**
* Put or replace an int value. If the index is greater than the length of
* the JSONArray, then null elements will be added as necessary to pad
* it out.
* @param index The subscript.
* @param value An int value.
* @return this.
* @throws JSONException If the index is negative.
*/
public JSONArray put(int index, int value) throws JSONException {
put(index, new Integer(value));
return this;
}
/**
* Put or replace a long value. If the index is greater than the length of
* the JSONArray, then null elements will be added as necessary to pad
* it out.
* @param index The subscript.
* @param value A long value.
* @return this.
* @throws JSONException If the index is negative.
*/
public JSONArray put(int index, long value) throws JSONException {
put(index, new Long(value));
return this;
}
/**
* Put a value in the JSONArray, where the value will be a
* JSONObject which is produced from a Map.
* @param index The subscript.
* @param value The Map value.
* @return this.
* @throws JSONException If the index is negative or if the the value is
* an invalid number.
*/
public JSONArray put(int index, Map value) throws JSONException {
put(index, new JSONObject(value));
return this;
}
/**
* Put or replace an object value in the JSONArray. If the index is greater
* than the length of the JSONArray, then null elements will be added as
* necessary to pad it out.
* @param index The subscript.
* @param value The value to put into the array. The value should be a
* Boolean, Double, Integer, JSONArray, JSONObject, Long, or String, or the
* JSONObject.NULL object.
* @return this.
* @throws JSONException If the index is negative or if the the value is
* an invalid number.
*/
public JSONArray put(int index, Object value) throws JSONException {
JSONObject.testValidity(value);
if (index < 0) {
throw new JSONException("JSONArray[" + index + "] not found.");
}
if (index < length()) {
this.myArrayList.set(index, value);
} else {
while (index != length()) {
put(JSONObject.NULL);
}
put(value);
}
return this;
}
/**
* Produce a JSONObject by combining a JSONArray of names with the values
* of this JSONArray.
* @param names A JSONArray containing a list of key strings. These will be
* paired with the values.
* @return A JSONObject, or null if there are no names or if this JSONArray
* has no values.
* @throws JSONException If any of the names are null.
*/
public JSONObject toJSONObject(JSONArray names) throws JSONException {
if (names == null || names.length() == 0 || length() == 0) {
return null;
}
JSONObject jo = new JSONObject();
for (int i = 0; i < names.length(); i += 1) {
jo.put(names.getString(i), this.opt(i));
}
return jo;
}
/**
* Make a JSON text of this JSONArray. For compactness, no
* unnecessary whitespace is added. If it is not possible to produce a
* syntactically correct JSON text then null will be returned instead. This
* could occur if the array contains an invalid number.
* <p>
* Warning: This method assumes that the data structure is acyclical.
*
* @return a printable, displayable, transmittable
* representation of the array.
*/
public String toString() {
try {
return '[' + join(",") + ']';
} catch (Exception e) {
return null;
}
}
/**
* Make a prettyprinted JSON text of this JSONArray.
* Warning: This method assumes that the data structure is acyclical.
* @param indentFactor The number of spaces to add to each level of
* indentation.
* @return a printable, displayable, transmittable
* representation of the object, beginning
* with <code>[</code> <small>(left bracket)</small> and ending
* with <code>]</code> <small>(right bracket)</small>.
* @throws JSONException
*/
public String toString(int indentFactor) throws JSONException {
return toString(indentFactor, 0);
}
/**
* Make a prettyprinted JSON text of this JSONArray.
* Warning: This method assumes that the data structure is acyclical.
* @param indentFactor The number of spaces to add to each level of
* indentation.
* @param indent The indention of the top level.
* @return a printable, displayable, transmittable
* representation of the array.
* @throws JSONException
*/
String toString(int indentFactor, int indent) throws JSONException {
int len = length();
if (len == 0) {
return "[]";
}
int i;
StringBuffer sb = new StringBuffer("[");
if (len == 1) {
sb.append(JSONObject.valueToString(this.myArrayList.get(0),
indentFactor, indent));
} else {
int newindent = indent + indentFactor;
sb.append('\n');
for (i = 0; i < len; i += 1) {
if (i > 0) {
sb.append(",\n");
}
for (int j = 0; j < newindent; j += 1) {
sb.append(' ');
}
sb.append(JSONObject.valueToString(this.myArrayList.get(i),
indentFactor, newindent));
}
sb.append('\n');
for (i = 0; i < indent; i += 1) {
sb.append(' ');
}
}
sb.append(']');
return sb.toString();
}
/**
* Write the contents of the JSONArray as JSON text to a writer.
* For compactness, no whitespace is added.
* <p>
* Warning: This method assumes that the data structure is acyclical.
*
* @return The writer.
* @throws JSONException
*/
public Writer write(Writer writer) throws JSONException {
try {
boolean b = false;
int len = length();
writer.write('[');
for (int i = 0; i < len; i += 1) {
if (b) {
writer.write(',');
}
Object v = this.myArrayList.get(i);
if (v instanceof JSONObject) {
((JSONObject)v).write(writer);
} else if (v instanceof JSONArray) {
((JSONArray)v).write(writer);
} else {
writer.write(JSONObject.valueToString(v));
}
b = true;
}
writer.write(']');
return writer;
} catch (IOException e) {
throw new JSONException(e);
}
}
} | apache-2.0 |
rancher/rancher | pkg/controllers/management/auth/register.go | 4115 | package auth
import (
"context"
"github.com/rancher/rancher/pkg/clustermanager"
"github.com/rancher/rancher/pkg/types/config"
"github.com/rancher/rancher/pkg/wrangler"
v1 "k8s.io/api/rbac/v1"
"k8s.io/client-go/tools/cache"
)
func RegisterWranglerIndexers(config *wrangler.Context) {
config.RBAC.ClusterRoleBinding().Cache().AddIndexer(rbByRoleAndSubjectIndex, rbByClusterRoleAndSubject)
config.RBAC.ClusterRoleBinding().Cache().AddIndexer(membershipBindingOwnerIndex, func(obj *v1.ClusterRoleBinding) ([]string, error) {
return indexByMembershipBindingOwner(obj)
})
config.RBAC.RoleBinding().Cache().AddIndexer(rbByOwnerIndex, rbByOwner)
config.RBAC.RoleBinding().Cache().AddIndexer(rbByRoleAndSubjectIndex, rbByRoleAndSubject)
config.RBAC.RoleBinding().Cache().AddIndexer(membershipBindingOwnerIndex, func(obj *v1.RoleBinding) ([]string, error) {
return indexByMembershipBindingOwner(obj)
})
}
func RegisterIndexers(ctx context.Context, scaledContext *config.ScaledContext) error {
prtbInformer := scaledContext.Management.ProjectRoleTemplateBindings("").Controller().Informer()
prtbIndexers := map[string]cache.IndexFunc{
prtbByRoleTemplateIndex: prtbByRoleTemplate,
prtbByUserRefKey: prtbByUserRefFunc,
}
if err := prtbInformer.AddIndexers(prtbIndexers); err != nil {
return err
}
crtbInformer := scaledContext.Management.ClusterRoleTemplateBindings("").Controller().Informer()
crtbIndexers := map[string]cache.IndexFunc{
crtbByRoleTemplateIndex: crtbByRoleTemplate,
crtbByUserRefKey: crtbByUserRefFunc,
}
if err := crtbInformer.AddIndexers(crtbIndexers); err != nil {
return err
}
tokenInformer := scaledContext.Management.Tokens("").Controller().Informer()
if err := tokenInformer.AddIndexers(map[string]cache.IndexFunc{
tokenByUserRefKey: tokenByUserRefFunc,
}); err != nil {
return err
}
grbInformer := scaledContext.Management.GlobalRoleBindings("").Controller().Informer()
return grbInformer.AddIndexers(map[string]cache.IndexFunc{
grbByUserRefKey: grbByUserRefFunc,
})
}
func RegisterEarly(ctx context.Context, management *config.ManagementContext, clusterManager *clustermanager.Manager) {
prtb, crtb := newRTBLifecycles(management)
gr := newGlobalRoleLifecycle(management)
grb := newGlobalRoleBindingLifecycle(management, clusterManager)
p, c := newPandCLifecycles(management)
u := newUserLifecycle(management, clusterManager)
n := newTokenController(management)
ua := newUserAttributeController(management)
s := newAuthSettingController(management)
rt := newRoleTemplateLifecycle(management, clusterManager)
grbLegacy := newLegacyGRBCleaner(management)
rtLegacy := newLegacyRTCleaner(management)
management.Management.ClusterRoleTemplateBindings("").AddLifecycle(ctx, ctrbMGMTController, crtb)
management.Management.ProjectRoleTemplateBindings("").AddLifecycle(ctx, ptrbMGMTController, prtb)
management.Management.GlobalRoles("").AddLifecycle(ctx, grController, gr)
management.Management.GlobalRoleBindings("").AddLifecycle(ctx, grbController, grb)
management.Management.Users("").AddLifecycle(ctx, userController, u)
management.Management.RoleTemplates("").AddLifecycle(ctx, roleTemplateLifecycleName, rt)
management.Management.Clusters("").AddHandler(ctx, clusterCreateController, c.sync)
management.Management.Projects("").AddHandler(ctx, projectCreateController, p.sync)
management.Management.Tokens("").AddHandler(ctx, tokenController, n.sync)
management.Management.UserAttributes("").AddHandler(ctx, userAttributeController, ua.sync)
management.Management.Settings("").AddHandler(ctx, authSettingController, s.sync)
management.Management.GlobalRoleBindings("").AddHandler(ctx, "legacy-grb-cleaner", grbLegacy.sync)
management.Management.RoleTemplates("").AddHandler(ctx, "legacy-rt-cleaner", rtLegacy.sync)
}
func RegisterLate(ctx context.Context, management *config.ManagementContext) {
p, c := newPandCLifecycles(management)
management.Management.Projects("").AddLifecycle(ctx, projectRemoveController, p)
management.Management.Clusters("").AddLifecycle(ctx, clusterRemoveController, c)
}
| apache-2.0 |
huguesv/PTVS | Python/Tests/TestData/FormattingTests/async.py | 153 | async def f(x):
async for i in await x:
pass
# comment before
async with x:
pass
[ x async for x in await x] | apache-2.0 |
dremio/arrow | java/vector/src/main/java/org/apache/arrow/vector/UInt4Vector.java | 9017 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.arrow.vector;
import org.apache.arrow.memory.BufferAllocator;
import org.apache.arrow.vector.complex.impl.UInt4ReaderImpl;
import org.apache.arrow.vector.complex.reader.FieldReader;
import org.apache.arrow.vector.holders.UInt4Holder;
import org.apache.arrow.vector.holders.NullableUInt4Holder;
import org.apache.arrow.vector.types.Types;
import org.apache.arrow.vector.types.pojo.FieldType;
import org.apache.arrow.vector.util.TransferPair;
import org.slf4j.Logger;
/**
* UInt4Vector implements a fixed width (4 bytes) vector of
* integer values which could be null. A validity buffer (bit vector) is
* maintained to track which elements in the vector are null.
*/
public class UInt4Vector extends BaseFixedWidthVector {
private static final byte TYPE_WIDTH = 4;
private final FieldReader reader;
public UInt4Vector(String name, BufferAllocator allocator) {
this(name, FieldType.nullable(org.apache.arrow.vector.types.Types.MinorType.UINT4.getType()),
allocator);
}
public UInt4Vector(String name, FieldType fieldType, BufferAllocator allocator) {
super(name, allocator, fieldType, TYPE_WIDTH);
reader = new UInt4ReaderImpl(UInt4Vector.this);
}
@Override
public FieldReader getReader() {
return reader;
}
@Override
public Types.MinorType getMinorType() {
return Types.MinorType.UINT4;
}
/******************************************************************
* *
* vector value retrieval methods *
* *
******************************************************************/
/**
* Get the element at the given index from the vector.
*
* @param index position of element
* @return element at given index
*/
public int get(int index) throws IllegalStateException {
if (isSet(index) == 0) {
throw new IllegalStateException("Value at index is null");
}
return valueBuffer.getInt(index * TYPE_WIDTH);
}
/**
* Get the element at the given index from the vector and
* sets the state in holder. If element at given index
* is null, holder.isSet will be zero.
*
* @param index position of element
*/
public void get(int index, NullableUInt4Holder holder) {
if (isSet(index) == 0) {
holder.isSet = 0;
return;
}
holder.isSet = 1;
holder.value = valueBuffer.getInt(index * TYPE_WIDTH);
}
/**
* Same as {@link #get(int)}.
*
* @param index position of element
* @return element at given index
*/
public Integer getObject(int index) {
if (isSet(index) == 0) {
return null;
} else {
return valueBuffer.getInt(index * TYPE_WIDTH);
}
}
public void copyFrom(int fromIndex, int thisIndex, UInt4Vector from) {
BitVectorHelper.setValidityBit(validityBuffer, thisIndex, from.isSet(fromIndex));
final int value = from.valueBuffer.getInt(fromIndex * TYPE_WIDTH);
valueBuffer.setInt(thisIndex * TYPE_WIDTH, value);
}
public void copyFromSafe(int fromIndex, int thisIndex, UInt4Vector from) {
handleSafe(thisIndex);
copyFrom(fromIndex, thisIndex, from);
}
/******************************************************************
* *
* vector value setter methods *
* *
******************************************************************/
private void setValue(int index, int value) {
valueBuffer.setInt(index * TYPE_WIDTH, value);
}
/**
* Set the element at the given index to the given value.
*
* @param index position of element
* @param value value of element
*/
public void set(int index, int value) {
BitVectorHelper.setValidityBitToOne(validityBuffer, index);
setValue(index, value);
}
/**
* Set the element at the given index to the value set in data holder.
* If the value in holder is not indicated as set, element in the
* at the given index will be null.
*
* @param index position of element
* @param holder nullable data holder for value of element
*/
public void set(int index, NullableUInt4Holder holder) throws IllegalArgumentException {
if (holder.isSet < 0) {
throw new IllegalArgumentException();
} else if (holder.isSet > 0) {
BitVectorHelper.setValidityBitToOne(validityBuffer, index);
setValue(index, holder.value);
} else {
BitVectorHelper.setValidityBit(validityBuffer, index, 0);
}
}
/**
* Set the element at the given index to the value set in data holder.
*
* @param index position of element
* @param holder data holder for value of element
*/
public void set(int index, UInt4Holder holder) {
BitVectorHelper.setValidityBitToOne(validityBuffer, index);
setValue(index, holder.value);
}
/**
* Same as {@link #set(int, int)} except that it handles the
* case when index is greater than or equal to existing
* value capacity {@link #getValueCapacity()}.
*
* @param index position of element
* @param value value of element
*/
public void setSafe(int index, int value) {
handleSafe(index);
set(index, value);
}
/**
* Same as {@link #set(int, NullableUInt4Holder)} except that it handles the
* case when index is greater than or equal to existing
* value capacity {@link #getValueCapacity()}.
*
* @param index position of element
* @param holder nullable data holder for value of element
*/
public void setSafe(int index, NullableUInt4Holder holder) throws IllegalArgumentException {
handleSafe(index);
set(index, holder);
}
/**
* Same as {@link #set(int, UInt4Holder)} except that it handles the
* case when index is greater than or equal to existing
* value capacity {@link #getValueCapacity()}.
*
* @param index position of element
* @param holder data holder for value of element
*/
public void setSafe(int index, UInt4Holder holder) {
handleSafe(index);
set(index, holder);
}
/**
* Set the element at the given index to null.
*
* @param index position of element
*/
public void setNull(int index) {
handleSafe(index);
/* not really needed to set the bit to 0 as long as
* the buffer always starts from 0.
*/
BitVectorHelper.setValidityBit(validityBuffer, index, 0);
}
public void set(int index, int isSet, int value) {
if (isSet > 0) {
set(index, value);
} else {
BitVectorHelper.setValidityBit(validityBuffer, index, 0);
}
}
public void setSafe(int index, int isSet, int value) {
handleSafe(index);
set(index, isSet, value);
}
/******************************************************************
* *
* vector transfer *
* *
******************************************************************/
@Override
public TransferPair getTransferPair(String ref, BufferAllocator allocator) {
return new TransferImpl(ref, allocator);
}
@Override
public TransferPair makeTransferPair(ValueVector to) {
return new TransferImpl((UInt4Vector) to);
}
private class TransferImpl implements TransferPair {
UInt4Vector to;
public TransferImpl(String ref, BufferAllocator allocator) {
to = new UInt4Vector(ref, field.getFieldType(), allocator);
}
public TransferImpl(UInt4Vector to) {
this.to = to;
}
@Override
public UInt4Vector getTo() {
return to;
}
@Override
public void transfer() {
transferTo(to);
}
@Override
public void splitAndTransfer(int startIndex, int length) {
splitAndTransferTo(startIndex, length, to);
}
@Override
public void copyValueSafe(int fromIndex, int toIndex) {
to.copyFromSafe(fromIndex, toIndex, UInt4Vector.this);
}
}
} | apache-2.0 |
openstack/magnum | magnum/tests/functional/api/v1/models/bay_model.py | 1014 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from magnum.tests.functional.common import models
class BayData(models.BaseModel):
"""Data that encapsulates bay attributes"""
pass
class BayEntity(models.EntityModel):
"""Entity Model that represents a single instance of BayData"""
ENTITY_NAME = 'bay'
MODEL_TYPE = BayData
class BayCollection(models.CollectionModel):
"""Collection Model that represents a list of BayData objects"""
COLLECTION_NAME = 'baylists'
MODEL_TYPE = BayData
| apache-2.0 |
xuliugen/mina-project | mina-core/src/test/java/org/apache/mina/transport/vmpipe/VmPipeTrafficControlTest.java | 1942 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.mina.transport.vmpipe;
import java.net.SocketAddress;
import org.apache.mina.core.future.ConnectFuture;
import org.apache.mina.core.service.IoConnector;
import org.apache.mina.core.service.IoHandler;
import org.apache.mina.transport.AbstractTrafficControlTest;
/**
* Tests suspending and resuming reads and writes for the VM pipe transport
* type.
*
* @author <a href="http://mina.apache.org">Apache MINA Project</a>
*/
public class VmPipeTrafficControlTest extends AbstractTrafficControlTest {
public VmPipeTrafficControlTest() {
super(new VmPipeAcceptor());
}
@Override
protected ConnectFuture connect(int port, IoHandler handler) throws Exception {
IoConnector connector = new VmPipeConnector();
connector.setHandler(handler);
return connector.connect(new VmPipeAddress(port));
}
@Override
protected SocketAddress createServerSocketAddress(int port) {
return new VmPipeAddress(port);
}
@Override
protected int getPort(SocketAddress address) {
return ((VmPipeAddress) address).getPort();
}
}
| apache-2.0 |
hferentschik/hibernate-validator | engine/src/test/java/org/hibernate/validator/test/internal/engine/DefaultParameterNameProviderTest.java | 3468 | /*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc. and/or its affiliates, and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hibernate.validator.test.internal.engine;
import java.util.Collections;
import java.util.List;
import javax.validation.ParameterNameProvider;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.Test;
import org.hibernate.validator.internal.engine.DefaultParameterNameProvider;
import static org.hibernate.validator.internal.util.CollectionHelper.newArrayList;
import static org.testng.Assert.assertEquals;
/**
* Unit test for {@link DefaultParameterNameProvider}.
*
* @author Gunnar Morling
*/
public class DefaultParameterNameProviderTest {
private ParameterNameProvider parameterNameProvider;
@BeforeClass
public void setupParameterNameProvider() {
parameterNameProvider = new DefaultParameterNameProvider();
}
@Test
public void getParametersForParameterlessConstructor() throws Exception {
assertEquals(
parameterNameProvider.getParameterNames( Foo.class.getConstructor() ),
buildExpectedArgumentNameList()
);
}
@Test
public void getParametersForConstructorWithOneParameter() throws Exception {
assertEquals(
parameterNameProvider.getParameterNames( Foo.class.getConstructor( String.class ) ),
buildExpectedArgumentNameList( "arg0" )
);
}
@Test
public void getParametersForConstructorWithSeveralParameters() throws Exception {
assertEquals(
parameterNameProvider.getParameterNames( Foo.class.getConstructor( String.class, String.class ) ),
buildExpectedArgumentNameList( "arg0", "arg1" )
);
}
@Test
public void getParametersForParameterlessMethod() throws Exception {
assertEquals(
parameterNameProvider.getParameterNames( Foo.class.getMethod( "foo" ) ),
buildExpectedArgumentNameList()
);
}
@Test
public void getParametersForMethodWithOneParameter() throws Exception {
assertEquals(
parameterNameProvider.getParameterNames( Foo.class.getMethod( "foo", String.class ) ),
buildExpectedArgumentNameList( "arg0" )
);
}
@Test
public void getParametersForMethodWithSeveralParameters() throws Exception {
assertEquals(
parameterNameProvider.getParameterNames( Foo.class.getMethod( "foo", String.class, String.class ) ),
buildExpectedArgumentNameList( "arg0", "arg1" )
);
}
private List<String> buildExpectedArgumentNameList(String... names) {
List<String> parameterNames = newArrayList();
Collections.addAll( parameterNames, names );
return parameterNames;
}
@SuppressWarnings("unused")
private static class Foo {
public Foo() {
}
public Foo(String bar) {
}
public Foo(String bar, String baz) {
}
public void foo() {
}
public void foo(String bar) {
}
public void foo(String bar, String baz) {
}
}
}
| apache-2.0 |
chef-partners/knife-vsphere | spec/vsphere_connection_handling_spec.rb | 2431 | require "spec_helper"
require "chef/knife/helpers/base_vsphere_command"
describe Chef::Knife::BaseVsphereCommand do
describe "#password" do
before do
expect(subject).to receive(:get_config).with(:vsphere_pass).at_least(:once).and_return(password)
end
context "password is in config file" do
let(:password) { "ossifrage" }
it "returns the password" do
expect(subject.password).to eq "ossifrage"
end
end
context "password is in config file but encoded" do
let(:password) { "base64:c3F1ZWVtaXNo" }
it "decodes the password" do
expect(subject.password).to eq "squeemish"
end
end
context "password is not in config file" do
let(:password) { nil }
let(:ui) { double("Ui", ask: "passwurd") }
it "asks for a password" do
expect(subject).to receive(:ui).and_return ui
expect(subject.password).to eq "passwurd"
end
end
end
describe "#conn_opts" do
let(:ui) { double("Ui", ask: "passwurd") }
let(:config) do
{ vsphere_host: "hostname",
vsphere_path: "path",
vsphere_port: "port",
vsphere_nossl: true,
vsphere_user: "user",
vsphere_pass: "password",
vsphere_insecure: false,
proxy_host: "proxyhost",
proxy_port: "proxyport" }
end
before do
allow(subject).to receive(:get_config) do |option|
config[option]
end
end
it "includes the host" do
expect(subject.conn_opts).to include(host: "hostname")
end
it "includes the path" do
expect(subject.conn_opts).to include(path: "path")
end
it "includes the path" do
expect(subject.conn_opts).to include(port: "port")
end
it "includes whether or not to use ssl" do
expect(subject.conn_opts).to include(use_ssl: false)
end
it "includes the user" do
expect(subject.conn_opts).to include(user: "user")
end
it "includes the password" do
expect(subject.conn_opts).to include(password: "password")
end
it "includes whether or not to ignore certificates" do
expect(subject.conn_opts).to include(insecure: false)
end
it "includes the proxy host" do
expect(subject.conn_opts).to include(proxyHost: "proxyhost")
end
it "includes the proxy port" do
expect(subject.conn_opts).to include(proxyPort: "proxyport")
end
end
end
| apache-2.0 |
yoinx/kernel_adiutor | app/src/main/java/com/grarak/kerneladiutor/fragments/tools/BuildpropFragment.java | 11600 | /*
* Copyright (C) 2015 Willi Ye
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.grarak.kerneladiutor.fragments.tools;
import android.content.DialogInterface;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AlertDialog;
import android.support.v7.widget.AppCompatEditText;
import android.support.v7.widget.RecyclerView;
import android.support.v7.widget.SearchView;
import android.text.Html;
import android.view.Gravity;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.grarak.kerneladiutor.R;
import com.grarak.kerneladiutor.elements.cards.PopupCardView;
import com.grarak.kerneladiutor.fragments.RecyclerViewFragment;
import com.grarak.kerneladiutor.utils.Constants;
import com.grarak.kerneladiutor.utils.Utils;
import com.grarak.kerneladiutor.utils.tools.Buildprop;
import com.kerneladiutor.library.root.RootUtils;
import java.util.LinkedHashMap;
/**
* Created by willi on 31.12.14.
*/
public class BuildpropFragment extends RecyclerViewFragment implements View.OnClickListener {
private Handler hand;
private TextView title;
private SwipeRefreshLayout refreshLayout;
private LinkedHashMap<String, String> buildpropItem;
private MenuItem searchItem;
@Override
public RecyclerView getRecyclerView() {
View view = getParentView(R.layout.swiperefresh_fragment);
title = (TextView) view.findViewById(R.id.title_view);
refreshLayout = (SwipeRefreshLayout) view.findViewById(R.id.refresh_layout);
refreshLayout.setColorSchemeResources(R.color.color_primary);
refreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
hand.postDelayed(refresh, 500);
}
});
view.findViewById(R.id.add_button).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
addKeyDialog(null, null, false);
}
});
view.findViewById(R.id.backup_button).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
backup();
}
});
return (RecyclerView) view.findViewById(R.id.recycler_view);
}
@Override
public void preInit(Bundle savedInstanceState) {
super.preInit(savedInstanceState);
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP)
fabView.setVisibility(View.VISIBLE);
}
@Override
public void init(Bundle savedInstanceState) {
super.init(savedInstanceState);
hand = getHandler();
buildpropItem = Buildprop.getProps();
for (int i = 0; i < buildpropItem.size(); i++) {
PopupCardView.DPopupCard mPropCard = new PopupCardView.DPopupCard(null);
mPropCard.setDescription((String) buildpropItem.keySet().toArray()[i]);
mPropCard.setItem((String) buildpropItem.values().toArray()[i]);
mPropCard.setOnClickListener(this);
addView(mPropCard);
}
}
@Override
public void postInit(Bundle savedInstanceState) {
super.postInit(savedInstanceState);
title.setText(getString(R.string.items_found, buildpropItem.size()));
}
@Override
public void onClick(final View v) {
AlertDialog.Builder dialog = new AlertDialog.Builder(getActivity());
dialog.setItems(getResources().getStringArray(R.array.build_prop_menu),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
PopupCardView popupCardView = (PopupCardView) v;
switch (which) {
case 0:
addKeyDialog(popupCardView.getDescription().toString(),
popupCardView.getItem(), true);
break;
case 1:
deleteDialog(popupCardView.getDescription().toString(),
popupCardView.getItem());
break;
}
}
}).show();
}
private final Runnable refresh = new Runnable() {
@Override
public void run() {
if (searchItem != null) MenuItemCompat.collapseActionView(searchItem);
removeAllViews();
buildpropItem = Buildprop.getProps();
for (int i = 0; i < buildpropItem.size(); i++) {
PopupCardView.DPopupCard mPropCard = new PopupCardView.DPopupCard(null);
mPropCard.setDescription((String) buildpropItem.keySet().toArray()[i]);
mPropCard.setItem((String) buildpropItem.values().toArray()[i]);
mPropCard.setOnClickListener(BuildpropFragment.this);
addView(mPropCard);
}
try {
getActivity().runOnUiThread(new Runnable() {
@Override
public void run() {
title.setText(getString(R.string.items_found, buildpropItem.size()));
refreshLayout.setRefreshing(false);
}
});
} catch (NullPointerException ignored) {
}
}
};
private void addKeyDialog(final String key, final String value, final boolean modify) {
LinearLayout dialogLayout = new LinearLayout(getActivity());
dialogLayout.setOrientation(LinearLayout.VERTICAL);
dialogLayout.setGravity(Gravity.CENTER);
dialogLayout.setPadding(30, 20, 30, 20);
final AppCompatEditText keyEdit = new AppCompatEditText(getActivity());
keyEdit.setTextColor(getResources().getColor(Utils.DARKTHEME ? R.color.white : R.color.black));
if (modify) keyEdit.setText(key.trim());
else keyEdit.setHint(getString(R.string.key));
final AppCompatEditText valueEdit = new AppCompatEditText(getActivity());
valueEdit.setTextColor(getResources().getColor(Utils.DARKTHEME ? R.color.white : R.color.black));
if (modify) valueEdit.setText(value.trim());
else valueEdit.setHint(getString(R.string.value));
dialogLayout.addView(keyEdit);
dialogLayout.addView(valueEdit);
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setView(dialogLayout).setNegativeButton(getString(android.R.string.cancel),
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
}
})
.setPositiveButton(getString(android.R.string.ok), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog,
int which) {
if (modify)
overwrite(key.trim(), value.trim(), keyEdit.getText().toString().trim(),
valueEdit.getText().toString().trim());
else
add(keyEdit.getText().toString().trim(), valueEdit.getText().toString().trim());
}
}).show();
}
private void deleteDialog(final String key, final String value) {
Utils.confirmDialog(null, getString(R.string.delete_question, key), new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
delete(key.trim());
}
}, getActivity());
}
private void backup() {
RootUtils.mount(true, "/system");
RootUtils.runCommand("cp -f " + Constants.BUILD_PROP + " " + Constants.BUILD_PROP + ".bak");
Utils.toast(getString(R.string.backup_created, Constants.BUILD_PROP + ".bak"), getActivity());
}
private void add(String key, String value) {
Buildprop.addKey(key, value);
hand.postDelayed(refresh, 500);
}
private void overwrite(String oldKey, String oldValue, String newKey, String newValue) {
Buildprop.overwrite(oldKey, oldValue, newKey, newValue);
hand.postDelayed(refresh, 500);
}
private void delete(String Key) {
Buildprop.delete(Key);
hand.postDelayed(refresh, 500);
}
@Override
public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) {
super.onCreateOptionsMenu(menu, inflater);
inflater.inflate(R.menu.build_prop_menu, menu);
searchItem = menu.findItem(R.id.search);
SearchView searchView = new SearchView(getActionBar().getThemedContext());
searchView.setQueryHint(getString(R.string.search));
MenuItemCompat.setActionView(searchItem, searchView);
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
return false;
}
@Override
public boolean onQueryTextChange(String newText) {
removeAllViews();
Object[] keys = buildpropItem.keySet().toArray();
Object[] values = buildpropItem.values().toArray();
for (int i = 0; i < keys.length; i++)
if (((String) keys[i]).contains(newText)) {
PopupCardView.DPopupCard mPopupCard = new PopupCardView.DPopupCard(null);
mPopupCard.setDescription(newText.isEmpty() ?
(String) keys[i] : Html.fromHtml(((String) keys[i]).replace(newText, "" +
"<b><font color=\"#2A7289\">" + newText + "</font></b>")));
mPopupCard.setItem((String) values[i]);
mPopupCard.setOnClickListener(BuildpropFragment.this);
addView(mPopupCard);
}
return true;
}
});
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.search:
MenuItemCompat.expandActionView(searchItem);
break;
}
return true;
}
@Override
public boolean onBackPressed() {
if (searchItem != null && MenuItemCompat.isActionViewExpanded(searchItem)) {
MenuItemCompat.collapseActionView(searchItem);
return true;
}
return false;
}
}
| apache-2.0 |
mlperf/training_results_v0.7 | Google/benchmarks/transformer/implementations/transformer-research-TF-tpu-v3-8192/lingvo/core/batch_major_attention.py | 98986 | # Lint as: python2, python3
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Multi-headed attention layers for Transformer machine translation.
[1] Attention is all you need.
https://arxiv.org/pdf/1706.03762.pdf Section 3.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import range
import REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.compat as tf
from REDACTED.tensorflow.compiler.xla.experimental.xla_sharding import xla_sharding
from REDACTED.tensorflow.python.ops import inplace_ops # pylint: disable=g-direct-tensorflow-import
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import base_layer
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import builder
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import conv_layers_builder as conv_layers
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import gpipe
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import hyperparams
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import layers
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import layers_with_attention
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import py_utils
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import relative_atten_util
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import symbolic
from REDACTED.tensorflow_models.mlperf.models.rough.transformer_lingvo.lingvo.core import tshape
def CausalPadding(slen, dtype=tf.float32):
return 1 - tf.linalg.band_part(tf.ones([slen, slen], dtype=dtype), -1, 0)
def SegmentMask(segment_id, source_segment_id, dtype=tf.float32):
"""Calculates a segment mask for attention.
Args:
segment_id: [B, T]
source_segment_id: [B, S]
dtype: data type of generated mask.
Returns:
segment_mask: [B, 1, T, S]: A mask that is ready to
be added to [B, N, T, S] attention logits.
"""
if segment_id is None or source_segment_id is None:
return None
# Compute [B, T, S] = [B, T, 1] != [B, 1, S]
ret = tf.cast(
tf.not_equal(
tf.expand_dims(segment_id, 2), tf.expand_dims(source_segment_id, 1)),
dtype=dtype)
ret *= ret.dtype.max * -0.7
# [B, T, S] -> [B, 1, T, S]
return tf.expand_dims(ret, axis=1)
class PerDimScaleLayer(base_layer.BaseLayer):
"""A layer to scale individual dims of the input."""
@classmethod
def Params(cls):
"""Params for `PerDimScaleLayer`."""
p = super(PerDimScaleLayer, cls).Params()
p.Define('dim', 0, 'Number of individual dims .')
return p
@base_layer.initializer
def __init__(self, params):
"""Constructs a PerDimScaleLayer object."""
super(PerDimScaleLayer, self).__init__(params)
p = self.params
assert p.name
with tf.variable_scope(p.name):
pc = py_utils.WeightParams(
shape=[p.dim],
init=py_utils.WeightInit.Constant(0.0),
dtype=p.dtype,
collections=[self.__class__.__name__ + '_vars'])
self.CreateVariable('per_dim_scale', pc)
def FProp(self, theta, inputs):
"""Return theta.scale * inputs.
Args:
theta: weights defined in this layer.
inputs: 4D tensor with shape [..., p.dim]
Returns:
outpus: 4D tensor with shape [..., p.dim]
"""
p = self.params
dim = symbolic.ToStatic(p.dim)
inputs = py_utils.HasShape(inputs, [-1, -1, -1, dim])
scale = tf.math.rsqrt(tf.cast(dim, inputs.dtype))
scale *= tf.nn.softplus(theta.per_dim_scale) / tf.nn.softplus(
tf.constant(0.0, dtype=inputs.dtype))
return inputs * scale
@classmethod
def FPropMeta(cls, p, inputs):
py_utils.CheckShapes((inputs,))
return py_utils.NestedMap(
flops=inputs.num_elements() * 5, out_shapes=(inputs,))
class MultiHeadedProjectionLayer(base_layer.BaseLayer):
"""Layer that computes multi heads projection.
This layer is expected to be used within MultiHeadedAttention below.
"""
@classmethod
def Params(cls):
"""Params for MultiHeadedProjectionLayer."""
p = super(MultiHeadedProjectionLayer, cls).Params()
p.Define('input_dim', 0, 'Input dimension.')
p.Define('num_heads', 0, 'Number of heads.')
p.Define('dim_per_head', 0, 'Size of each head.')
p.Define(
'is_output_projection', False,
'Whether it is out projection or not. If False, we use '
'"BTD,DNH->BTNH" for query,key,value projection. Otherwise we use '
'"BTNH,DNH->BTD" for output projection.')
p.Define('use_bias', True, 'If to add bias in projection.')
return p
@base_layer.initializer
def __init__(self, params):
super(MultiHeadedProjectionLayer, self).__init__(params)
p = self.params
assert p.name
pc = py_utils.WeightParams(
shape=[p.input_dim, p.num_heads, p.dim_per_head],
init=p.params_init,
dtype=p.dtype,
xla_num_partitions=p.xla_num_partitions,
xla_partition_dim=2,
collections=[self.__class__.__name__ + '_vars'])
if p.use_bias:
if p.is_output_projection:
pc_bias = py_utils.WeightParams(
shape=[p.input_dim],
init=py_utils.WeightInit.Constant(0.0),
dtype=p.dtype,
collections=[self.__class__.__name__ + '_vars'])
else:
pc_bias = py_utils.WeightParams(
shape=[p.num_heads, p.dim_per_head],
init=py_utils.WeightInit.Constant(0.0),
dtype=p.dtype,
collections=[self.__class__.__name__ + '_vars'])
with tf.variable_scope(p.name):
self.CreateVariable('w', pc)
if p.use_bias:
self.CreateVariable('b', pc_bias)
def FProp(self, theta, inputs):
"""Computes the multi headed projection for inputs.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
inputs: A tensor of shape [batch_size, time_steps, num_heads,
dim_per_head] or [batch_size, time_steps, hidden_size].
Returns:
The projected tensor with shape [[batch_size, time_steps, hidden_size] or
[batch_size, time_steps, num_heads, dim_per_head].
"""
p = self.params
if p.is_output_projection:
inputs = py_utils.HasShape(
inputs, [-1, -1, p.num_heads,
symbolic.ToStatic(p.dim_per_head)])
ret = tf.einsum('BTNH,DNH->BTD', inputs, theta.w)
else:
inputs = py_utils.HasShape(
inputs, [-1, -1, symbolic.ToStatic(p.input_dim)])
ret = tf.einsum('BTD,DNH->BTNH', inputs, theta.w)
if p.use_bias:
ret += theta.b
return ret
class MultiHeadedAttention(base_layer.BaseLayer):
"""Dot-product attention with multiple attention heads.
This implementation heavily uses einsum (wrapped in py_utils.Einsum) to be
efficient on TPUs. We use the following capital letters to denote certain
tensor parameters.
B = batch size
S = length of the key/value (source)
T = length of the query (target)
D = model dimension
N = number of attention heads
H = dimensions of each attention head.
The algorithm is sketched as follows. Each intermediate tensor or weight
tensor is annotated with its shape. E.g., Wq, the weight tensor for query's
projection, its shape is [D, N, H].
Trainable weights:
Wq, Wk, Wv: [D, N, H]
Wout: [D, N, H]
Input q:[B, T, D]; k:[B, S, D]; v:[B, S, D]
q_proj:[B, T, N, H] = einsum('BTD,DNH->BTNH', x, Wq)
k_proj:[B, S, N, H] = einsum('BSD,DNH->BSNH', x, Wk)
v_proj:[B, S, N, H] = einsum('BSD,DNH->BSNH', x, Wv)
logits:[B, N, T, S] = einsum('BTNH,BSNH->BNTS', q_proj, k_proj) / sqrt(H)
probs:[B, N, T, S] = softmax(logits)
context:[B, T, N, H] = einsum('BNTS,BSNH->BTNH', probs, v_proj)
Output y:[B, T, D] = einsum('BTNH,DNH>BTD', context, Wout)
"""
@classmethod
def Params(cls):
"""Params for _MultiHeadedAttention."""
p = super(MultiHeadedAttention, cls).Params()
p.Define('input_dim', 0, 'Number of key nodes.')
p.Define('hidden_dim', 0, 'Number of hidden nodes.')
p.Define('num_heads', 1, 'Num of attention heads.')
p.Define('dropout_tpl', layers.DropoutLayer.Params(),
'Params for dropout layer.')
p.Define(
'enable_value_proj', True, 'Whether value v is pre-projected '
' before self attention or not.')
p.Define('enable_per_dim_scale', True,
'Whether using per_dim_scale or scaling by a constant factor.')
p.Define('atten_dropout_prob', 0.0,
'Probability at which we apply dropout to the attention weights.')
p.Define('proj_tpl', MultiHeadedProjectionLayer.Params(), 'Params for '
'projection layer.')
p.Define('packed_input', False, 'Whether there is packed input.')
p.Define('use_bias', True, 'Whether to use bias for projection layers.')
return p
@base_layer.initializer
def __init__(self, params):
"""Constructs a _MultiHeadedAttention object."""
super(MultiHeadedAttention, self).__init__(params)
p = self.params
assert p.input_dim, 'input_dim is {}'.format(p.input_dim)
assert p.hidden_dim, 'hidden_dim is {}'.format(p.hidden_dim)
assert symbolic.IsExpr(
p.hidden_dim
) or p.hidden_dim % p.num_heads == 0, 'hidden_dim: %s, num_heads: %s' % (
p.hidden_dim, p.num_heads)
dim_per_head = p.hidden_dim // p.num_heads
with tf.variable_scope(p.name):
def ProjectInput():
return p.proj_tpl.Copy().Set(
input_dim=p.input_dim,
num_heads=p.num_heads,
dim_per_head=dim_per_head,
use_bias=p.use_bias)
self.CreateChild('key', ProjectInput())
self.CreateChild('query', ProjectInput())
if p.enable_value_proj:
self.CreateChild('value', ProjectInput())
if p.enable_per_dim_scale:
self.CreateChild('per_dim_scale',
PerDimScaleLayer.Params().Set(dim=dim_per_head))
self.CreateChild('atten_dropout',
p.dropout_tpl.Set(keep_prob=1.0 - p.atten_dropout_prob))
# Setting is_output_projection=True to set the projection direction
# from hidden dim to input dim.
self.CreateChild(
'post',
p.proj_tpl.Copy().Set(
input_dim=p.input_dim,
num_heads=p.num_heads,
dim_per_head=dim_per_head,
is_output_projection=True,
use_bias=p.use_bias))
def _AttenLogits(self, theta, query, key, per_step_padding):
"""Computes attention logits.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query: A Tensor of shape [B, T, N, H]
key: A Tensor of shape [B, T, N, H]
per_step_padding: A Tensor of shape [B, N, T, S] or None.
Returns:
A Tensor of shape [B, N, T, S]
"""
return tf.einsum('BTNH,BSNH->BNTS', query, key)
def _AttenLogitsOneStep(self, theta, query, key, time_step):
"""Attention logits for one single target (query) step.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query: [B, N, H].
key: [S, B, N, H] or [S, B, N*H/128, 128].
time_step: Current time step.
Returns:
A Tensor of shape [S, B, N]
"""
p = self.params
s, b, _, _ = py_utils.GetShape(key, 4)
n = p.num_heads
h = p.hidden_dim // n
# [s, b, n]
return tf.einsum('BNH,SBNH->SBN', query, tf.reshape(key, [s, b, n, h]))
def AttenProbs(self,
theta,
query,
key,
paddings,
segment_mask,
per_step_padding=None):
"""Compute attention probability.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query: [B, T, N, H].
key: [B, S, N, H].
paddings: [B, S].
segment_mask: [B, 1, T, S]: A mask that is applied to prevent
attention between different segments. This is already been
converted into large negative logits. Only applied if
packed_input = True.
per_step_padding: A mask used by decoder self-attention to prevent
information flow from future (causal padding). It has shape [B, T, S] if
not None.
Returns:
unscaled_probs: [B, N, T, S].
probs_sum: [B, N, T, 1].
"""
key = py_utils.HasRank(key, 4)
b, s, n, h = py_utils.GetShape(key, 4)
query = py_utils.HasShape(query, [b, -1, n, h])
t = py_utils.GetShape(query)[1]
if segment_mask is not None and self.params.packed_input:
segment_mask = py_utils.HasShape(segment_mask, [b, 1, t, s])
logits = self._AttenLogits(theta, query, key, per_step_padding)
# Apply segment mask.
if self.params.packed_input and segment_mask is not None:
# Paddings have been included in segment_mask.
padded_logits = logits + segment_mask
else:
# Exclude padding frames.
paddings = py_utils.HasShape(paddings, [b, s])
paddings = tf.tile(tf.reshape(paddings, [b, 1, 1, s]), [1, n, t, 1])
if per_step_padding is not None:
per_step_padding = tf.tile(
tf.expand_dims(per_step_padding, 1), [1, n, 1, 1])
paddings += per_step_padding
very_negative_logits = (
tf.ones_like(logits) * logits.dtype.max *
tf.constant(-0.7, dtype=logits.dtype))
padded_logits = tf.where(paddings > 0.0, very_negative_logits, logits)
# Split the softmax into two parts. Do the 1st part here; the 2nd part
# (scaling) is moved after _AttenContext for better performance.
unscaled_probs = padded_logits - tf.stop_gradient(
tf.reduce_max(padded_logits, -1, True))
unscaled_probs = tf.exp(unscaled_probs)
probs_sum = tf.reduce_sum(unscaled_probs, -1, True)
unscaled_probs = py_utils.HasShape(unscaled_probs, [b, n, t, s])
return unscaled_probs, probs_sum
def _AttenContext(self, theta, probs, value):
return tf.einsum('BNTS,BSNH->BTNH', probs, value)
def _AttenContextOneStep(self, theta, probs, value, time_step):
s, b, _, _ = py_utils.GetShape(value, 4)
n = self.params.num_heads
h = self.params.hidden_dim // n
return tf.einsum('SBN,SBNH->BNH', probs, tf.reshape(value, [s, b, n, h]))
def _DotAtten(self,
theta,
query,
key,
value,
paddings,
segment_mask,
per_step_padding=None):
"""Main attention function.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query: [B, T, N, H].
key: [B, S, N, H].
value: [B, S, N, H].
paddings: [B, S].
segment_mask: [B, 1, T, S]: A mask that is applied to prevent
attention between different segments. This is already been
converted into large negative logits. Only applied if
packed_input = True.
per_step_padding: A mask used by decoder self-attention to prevent
information flow from future (causal padding). It has shape [B, T, S] if
not None.
Returns:
encoded: [B, T, N, H].
atten_probs: [B, N, T, S].
"""
p = self.params
# Scale the query projection.
if p.enable_per_dim_scale:
query = self.per_dim_scale.FProp(theta.per_dim_scale, query)
else:
query *= (p.hidden_dim // p.num_heads)**-0.5
if p.xla_num_partitions:
query = xla_sharding.split(
query, 2, p.xla_num_partitions, use_sharding_op=True)
key = xla_sharding.split(
key, 2, p.xla_num_partitions, use_sharding_op=True)
value = xla_sharding.split(
value, 2, p.xla_num_partitions, use_sharding_op=True)
# Compute prob with shape [batch, heads, target_time, source_time].
unscaled_probs, probs_sum = self.AttenProbs(theta, query, key, paddings,
segment_mask, per_step_padding)
# Apply dropout to probs. Split the dropout into two parts: 1) masking; 2)
# scaling. Here does the first part.
if not self.do_eval:
rand = tf.random.uniform(unscaled_probs.shape, dtype=unscaled_probs.dtype)
keep_mask = rand >= p.atten_dropout_prob
unscaled_probs *= tf.cast(keep_mask, unscaled_probs.dtype)
# Compute the attention context vector.
unscaled_encoded = self._AttenContext(theta, unscaled_probs, value)
# The 2nd part of the softamx --- scaling.
encoded = unscaled_encoded / tf.transpose(probs_sum, [0, 2, 1, 3])
# The 2nd part of the dropout --- scaling.
if not self.do_eval:
encoded = encoded / (1 - p.atten_dropout_prob)
if p.xla_num_partitions:
encoded = xla_sharding.split(
encoded, 2, p.xla_num_partitions, use_sharding_op=True)
return encoded, unscaled_probs
def _DotAttenOneStep(self,
theta,
query,
key,
value,
paddings,
segment_mask,
per_step_padding=None,
time_step=None,
use_short_seq_opt=False):
"""Dot attention function for queries with 1 time step.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query: [B, 1, N, H].
key: [S, B, N, H] or [S, B, N*H/128, 128].
value: [S, B, N, H] or [S, B, N*H/128, 128].
paddings: [B, S].
segment_mask: [B, 1, T, S]: A mask that is applied to prevent
attention between different segments. This is already been
converted into large negative logits. Only applied if
packed_input = True.
per_step_padding: A mask used by decoder self-attention to prevent
information flow from future (causal padding). It has shape [B, 1, S] if
not None.
time_step: Current time step.
use_short_seq_opt: A bool, whether using short sequence optimization.
Returns:
encoded: [B, 1, N, H].
"""
p = self.params
# Scale the query projection.
if p.enable_per_dim_scale:
query = self.per_dim_scale.FProp(theta.per_dim_scale, query)
else:
query *= (p.hidden_dim // p.num_heads)**-0.5
key = py_utils.HasRank(key, 4)
b, t, n, h = py_utils.GetShape(query, 4)
s, b, _, _ = py_utils.GetShape(key, 4)
paddings = py_utils.HasShape(paddings, [b, s])
assert t == 1
if per_step_padding is not None:
paddings += tf.squeeze(per_step_padding, 1)
query = tf.reshape(query, [b, n, h])
pad = tf.reshape(
tf.tile(tf.expand_dims(tf.transpose(paddings), 2), [1, 1, n]), [s, -1])
very_negative_logits = (
tf.ones_like(pad) * query.dtype.max *
tf.constant(-0.7, dtype=query.dtype))
def _LongSeq():
"""For long sequence, directly apply to the entire tensor with padding."""
logits = self._AttenLogitsOneStep(theta, query, key, time_step)
logits = tf.reshape(logits, [s, -1])
padded_logits = tf.where(pad > 0.0, very_negative_logits, logits)
probs = tf.nn.softmax(padded_logits, axis=0)
probs = tf.reshape(probs, [s, b, n])
encoded = self._AttenContextOneStep(theta, probs, value, time_step)
return tf.expand_dims(encoded, 1)
def _ShortSeq():
"""For short sequence, using while loop for early exit."""
def _AttenStep(o, k, q, ts):
"""Computes logits for attention prob for one step.
Args:
o: the output logits of shape [S, B*N]
k: cached key of shape [S, B, N*H/128, 8]
q: query of shape [B, N, H]
ts: a scala tensor to represent time_step
Returns:
Updated logits and time steps.
"""
ot = tf.reshape(
tf.reduce_sum(tf.reshape(tf.gather(k, ts), [-1, n, h]) * q, -1),
[-1])
return inplace_ops.alias_inplace_update(o, ts, ot), k, q, ts + 1
logits, _, _, _ = tf.while_loop(
lambda _o, _k, _q, ts: ts <= time_step,
_AttenStep,
loop_vars=(inplace_ops.empty([s, b * n], query.dtype,
init=True), key, query,
tf.zeros([], tf.int32)))
padded_logits = tf.where(pad > 0.0, very_negative_logits, logits)
probs = tf.nn.softmax(padded_logits, axis=0)
def _DotStep(o, p, v, ts):
"""Computes encoded activation.
Args:
o: the output activation of shape [B, N, H]
p: probabiliy of shape [S, B*N]
v: cached value of shape [S, B, N*H/128, 8]
ts: a scala tensor to represent time_step
Returns:
Updated output and time steps.
"""
return o + tf.reshape(tf.gather(p, ts), [-1, n, 1]) * tf.reshape(
tf.gather(v, ts), [-1, n, h]), p, v, ts + 1
encoded, _, _, _ = tf.while_loop(
lambda o, p, v, ts: ts <= time_step,
_DotStep,
loop_vars=(tf.zeros([b, n, h],
probs.dtype), probs, value, tf.zeros([],
tf.int32)))
return tf.expand_dims(encoded, 1)
return _ShortSeq() if use_short_seq_opt else _LongSeq()
def FProp(self,
theta,
query_vec,
key_vec,
value_vec,
paddings,
segment_mask=None,
per_step_padding=None):
"""Computes the value vector given the current query output.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query_vec: [B, T, D].
key_vec: [B, S, D].
value_vec: [B, S, D].
paddings: [B, S].
segment_mask: [B, 1, T, S]. A mask only applied if packed_input=True.
per_step_padding: A mask used by decoder self-attention to prevent
information flow from future (causal padding). It has shape [B, T, T] if
not None.
Returns:
encoded: [B, T, D].
atten_probs: [B, N, T, S].
Raises:
ValueError: If value projection is disabled.
"""
p = self.params
# Project inputs to key, value and query, respectively has shape
# [B, S, N, H], [B, S, N, H], and [B, T, N, H].
query_proj = self.query.FProp(theta.query, query_vec)
key_proj = self.key.FProp(theta.key, key_vec)
if p.enable_value_proj:
value_proj = self.value.FProp(theta.value, value_vec)
else:
h = p.num_heads
_, _, d = py_utils.GetShape(value_vec, 3)
dh = d // h
# TODO(b/119531146): Reshape is inefficient here. Use one-hot matmul
# avoids the data formatting. Change this back to reshape once XLA
# has optimized reshape performance.
rhs = tf.reshape(
tf.one_hot(tf.range(d) // dh, h, dtype=value_vec.dtype),
[d, h, 1]) * tf.reshape(
tf.one_hot(tf.range(d) % dh, dh, dtype=value_vec.dtype),
[d, 1, dh])
value_proj = tf.einsum('BTD,DNH->BTNH', value_vec, rhs)
if p.packed_input and not self.do_eval:
assert segment_mask is not None
encoded, atten_probs = self._DotAtten(theta, query_proj, key_proj,
value_proj, paddings, segment_mask,
per_step_padding)
# Post projection
encoded = self.post.FProp(theta.post, encoded)
return encoded, atten_probs
def ExtendStep(self,
theta,
query_vec,
cached_key_vec,
cached_value_vec,
paddings,
segment_mask,
per_step_padding,
time_step,
use_short_seq_opt=False):
"""Computes the value vector given the query of the current step.
This function is used by autoregressive decoding.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query_vec: [B, 1, D].
cached_key_vec: [T, B, N, H].
cached_value_vec: [T, B, N, H].
paddings: [B, T], or None if there is no padding.
segment_mask: [B, 1, T, S] or None.
per_step_padding: A mask used by decoder self-attention to prevent
information flow from future (causal padding). It has shape [B, 1, T] if
not None.
time_step: A scalar, the current decode step, 0-based.
use_short_seq_opt: A bool, whether using short sequence optimization.
Returns:
encoded: [B, 1, D].
updated_key_vec: [T, B, N, H].
updated_value_vec: [T, B, N, H].
Raises:
ValueError: If value projection is disabled.
"""
p = self.params
if not p.enable_value_proj:
raise ValueError('Value projection must be enabled for Transformer '
'machine translation.')
new_key_vec = query_vec
new_value_vec = query_vec
t, b, n, h = py_utils.GetShape(cached_key_vec, 4)
# Project inputs to key, value and query. Each has shape [B, 1, N, H].
new_key_proj = self.key.FProp(theta.key, new_key_vec)
new_value_proj = self.value.FProp(theta.value, new_value_vec)
query_proj = self.query.FProp(theta.query, query_vec)
# The cached_key and cached_value have shape [T, B, N, H].
indices = tf.reshape(
tf.one_hot(time_step, t, dtype=cached_key_vec.dtype), [t, 1, 1, 1])
cached_key_vec += tf.reshape(new_key_proj, [1, b, n, h]) * indices
cached_value_vec += tf.reshape(new_value_proj, [1, b, n, h]) * indices
if paddings is None:
paddings = tf.zeros([b, t], dtype=new_key_vec.dtype)
encoded = self._DotAttenOneStep(
theta,
query_proj,
cached_key_vec,
cached_value_vec,
paddings,
segment_mask,
per_step_padding,
time_step=time_step,
use_short_seq_opt=use_short_seq_opt)
# Post projection.
encoded = self.post.FProp(theta.post, encoded)
return encoded, cached_key_vec, cached_value_vec
@classmethod
def FPropMeta(cls, p, *args):
# args[0]: [b, t, d], args[1]: [b, s, d], args[2]: [b, s, d],
# args[3]: [b, s], args[4]: [b, t, s] if not None
args = tuple(py_utils.Flatten(args))
py_utils.CheckShapes(args)
b, t, d = args[0]
s = args[3][1]
n = p.num_heads
# O(b * t * s * d) computation for self-attention and there are four
# projection layers, two of which has O(b * t * d^2), the other two has
# O(b * s * d^2). Each multiple-sum took 2 flops. Approximately
# self_attention took 15 flops per element since softmax is expensive.
flops = 15 * b * t * s * d + 2 * 2 * (b * t * d * d + b * s * d * d)
return py_utils.NestedMap(flops=flops, out_shapes=(args[0], (b, n, t, s)))
class MultiHeadedAttentionXL(MultiHeadedAttention):
"""Transformer-XL multiheaded attention with relative positional embedding.
https://arxiv.org/pdf/1901.02860.pdf section 3.3.
Notice this is only intended for self attention.
"""
@classmethod
def Params(cls):
p = super(MultiHeadedAttentionXL, cls).Params()
p.Define('rel_pos_emb_dim', None,
'Dimension of relative positional embedding.')
return p
@base_layer.initializer
def __init__(self, params):
"""Constructs a MultiHeadedAttentionXL object."""
super(MultiHeadedAttentionXL, self).__init__(params)
params = self.params
assert not params.packed_input, 'Packed input not implemented yet.'
if params.rel_pos_emb_dim is None or params.rel_pos_emb_dim <= 0:
raise ValueError('Invalide rel_pos_emb_dim: %s' % params.rel_pos_emb_dim)
with tf.variable_scope(params.name):
emb_params = layers.PositionalEmbeddingLayer.Params().Set(
embedding_dim=params.rel_pos_emb_dim)
self.CreateChild('pos_emb', emb_params)
# Projection layer for relative position encoding
dim_per_head = params.hidden_dim // params.num_heads
pos_proj_tpl = params.proj_tpl.Copy().Set(
input_dim=params.rel_pos_emb_dim,
num_heads=params.num_heads,
dim_per_head=dim_per_head,
use_bias=False)
self.CreateChild('pos_proj', pos_proj_tpl)
u_pc = py_utils.WeightParams(
shape=[params.num_heads, dim_per_head],
init=py_utils.WeightInit.Constant(0.0),
dtype=params.dtype,
collections=[self.__class__.__name__ + '_vars'])
v_pc = py_utils.WeightParams(
shape=[params.num_heads, dim_per_head],
init=py_utils.WeightInit.Constant(0.0),
dtype=params.dtype,
collections=[self.__class__.__name__ + '_vars'])
self.CreateVariable('u', u_pc)
self.CreateVariable('v', v_pc)
def _AttenLogits(self, theta, query, key, per_step_padding):
b, _, n, h = py_utils.GetShape(key, 4)
t = py_utils.GetShape(query)[1]
# This layer only supports self attention.
key = py_utils.HasShape(key, [b, t, n, h])
if per_step_padding is None:
is_causal_padding = False
else:
causal_padding = tf.tile(
tf.reshape(CausalPadding(t), [1, t, t]), [b, 1, 1])
is_causal_padding = tf.reduce_all(
tf.equal(
tf.cast(per_step_padding, dtype=tf.int32),
tf.cast(causal_padding, dtype=tf.int32)))
# [1, 2T - 1]
pos = tf.expand_dims(tf.range(-(t - 1), t, name='relative_pos'), 0)
sin_emb = self.pos_emb.FPropWithPosition(theta.pos_emb, pos)
# [1, 2T - 1, N, H]
sin_emb = self.pos_proj.FProp(theta.pos_proj, sin_emb)
# [2T - 1, N, H]
sin_emb = tf.squeeze(sin_emb, 0)
logits = relative_atten_util.AttenLogitsTransformerXL(
query, key, sin_emb, theta.u, theta.v, is_causal_padding)
return logits
def _AttenLogitsOneStep(self, theta, query, key, time_step):
"""Attention logits for one single target (query) step.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query: [B, N, H].
key: [S, B, N, H] or [S, B, N*H/128, 128].
time_step: Current time step.
Returns:
A Tensor of shape [S, B, N]
"""
p = self.params
s, b, _, _ = py_utils.GetShape(key, 4)
n = p.num_heads
h = p.hidden_dim // n
# Transformer_XL relative attention.
if time_step is None:
raise ValueError('`time_step` can not be None when using relative '
'position encoding in attention.')
# term a and c.
logits = tf.einsum('BNH,SBNH->SBN', query + theta.u,
tf.reshape(key, [s, b, n, h]))
position = tf.expand_dims(time_step - tf.range(s), 0)
# [1, s, emb_dim]
sin_emb = self.pos_emb.FPropWithPosition(theta.pos_emb, position)
sin_emb = self.pos_proj.FProp(theta.pos_proj, sin_emb)
# [s, n, h]
sin_emb = tf.squeeze(sin_emb, 0)
# term b an d.
logits += tf.einsum('BNH,SNH->SBN', query + theta.v, sin_emb)
return logits
def ExtendStep(self,
theta,
query_vec,
cached_key_vec,
cached_value_vec,
paddings,
segment_mask,
per_step_padding,
time_step,
use_short_seq_opt=False):
# TODO(jamesqin): support use_short_seq_opt for TransofrmerXL attention.
assert not use_short_seq_opt
return super(MultiHeadedAttentionXL,
self).ExtendStep(theta, query_vec, cached_key_vec,
cached_value_vec, paddings, segment_mask,
per_step_padding, time_step,
use_short_seq_opt)
class MultiHeadedAttentionRPE(MultiHeadedAttention):
"""Multiheaded attention with relative positional embedding ...
See https://arxiv.org/pdf/1803.02155.pdf.
Notice this is only intended for self attention.
"""
@classmethod
def Params(cls):
p = super(MultiHeadedAttentionRPE, cls).Params()
p.Define('rel_pos_emb_dim', None,
'Dimension of relative positional embedding.')
p.Define('rel_pos_radius', None,
'Relative distance is clipped to [-radius, radius].')
p.Define('skip_value_emb', False, 'If skipping value positional embedding.')
p.Define(
'use_global_emb', True,
'If using global relative positional embedding. Only effective if '
'`rel_pos_emb_tpl` is not None.')
return p
@base_layer.initializer
def __init__(self, params):
"""Constructs a MultiHeadedAttentionRPE object."""
super(MultiHeadedAttentionRPE, self).__init__(params)
params = self.params
assert not params.packed_input, 'Packed input not implemented yet.'
if not params.rel_pos_radius:
raise ValueError('Invalid rel_pos_radius: %s' % params.rel_pos_radius)
if params.rel_pos_emb_dim is None:
rel_pos_emb_dim = params.hidden_dim
else:
rel_pos_emb_dim = params.rel_pos_emb_dim
rel_pos_emb_tpl = layers.RelativePositionalEmbeddingLayer.Params().Set(
radius=params.rel_pos_radius, dim=rel_pos_emb_dim)
if rel_pos_emb_dim != params.hidden_dim:
# Projection layer for relative position encoding
dim_per_head = params.hidden_dim // params.num_heads
pos_proj_tpl = params.proj_tpl.Copy().Set(
input_dim=rel_pos_emb_dim,
num_heads=params.num_heads,
dim_per_head=dim_per_head,
use_bias=False)
else:
pos_proj_tpl = None
with tf.variable_scope(
params.name, reuse=tf.AUTO_REUSE if params.use_global_emb else False):
self.CreateChild('key_emb', rel_pos_emb_tpl)
# Add projection layer if rel_pos_emb_dim is different from hidden_dim.
if pos_proj_tpl is not None:
self.CreateChild('key_pos_proj', pos_proj_tpl)
if not params.skip_value_emb:
self.CreateChild('value_emb', rel_pos_emb_tpl)
if pos_proj_tpl is not None:
self.CreateChild('value_pos_proj', pos_proj_tpl)
def _RelativePositionValueEmb(self, theta, key):
"""Gets relative positional value embedding.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
key: The attention key, a tensor of shape [batch, seqlen, dim]
Returns:
Relative positional embedding, a Tensor of shape
[tgt_time=seqlen, src_time=seqlen, num_heads, attenion_dim]
"""
emb_layer = self.value_emb
emb_theta = theta.value_emb
seqlen = py_utils.GetShape(key)[1]
src_time_indices = tf.tile(tf.expand_dims(tf.range(seqlen), 0), [seqlen, 1])
tgt_time_indices = tf.tile(
tf.expand_dims(tf.range(seqlen), -1), [1, seqlen])
# [tgt_time=T, src_time=T, num_heads x hidden_dim]
pos_emb = emb_layer.FProp(emb_theta, src_time_indices - tgt_time_indices)
params = self.params
num_heads = self.params.num_heads
tgt_time, src_time, _ = py_utils.GetShape(pos_emb)
pos_proj_layer = 'value_pos_proj'
if hasattr(self, pos_proj_layer):
return getattr(self, pos_proj_layer).FProp(
getattr(theta, pos_proj_layer), pos_emb)
else:
return tf.reshape(
pos_emb,
[tgt_time, src_time, num_heads, params.hidden_dim // num_heads])
def _AttenLogits(self, theta, query, key, per_step_padding):
# TODO(jamesqin): optimize it.
b, _, n, h = py_utils.GetShape(key, 4)
t = py_utils.GetShape(query)[1]
# This layer only supports self attention.
key = py_utils.HasShape(key, [b, t, n, h])
if per_step_padding is None:
is_causal_padding = False
else:
causal_padding = tf.tile(
tf.reshape(CausalPadding(t), [1, t, t]), [b, 1, 1])
is_causal_padding = tf.reduce_all(
tf.equal(
tf.cast(per_step_padding, dtype=tf.int32),
tf.cast(causal_padding, dtype=tf.int32)))
# [1, 2T - 1]
pos = tf.expand_dims(tf.range(-(t - 1), t), 0)
# [1, 2T - 1, rel_pos_emb_dim]
abs_emb = self.key_emb.FProp(theta.key_emb, pos)
if hasattr(self, 'key_pos_proj'):
# [1, 2T - 1, N, H]
abs_emb = self.key_pos_proj.FProp(theta.key_pos_proj, abs_emb)
# [2T - 1, N, H]
abs_emb = tf.squeeze(abs_emb, 0)
else:
abs_emb = tf.reshape(abs_emb, [2 * t - 1, n, h])
return relative_atten_util.AttenLogitsRPE(query, key, abs_emb,
is_causal_padding)
def _AttenLogitsOneStep(self, theta, query, key, time_step):
"""Attention logits for one single target (query) step.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query: [B, N, H].
key: [S, B, N, H] or [S, B, N*H/128, 128].
time_step: Current time step.
Returns:
A Tensor of shape [S, B, N]
"""
p = self.params
s, b, _, _ = py_utils.GetShape(key, 4)
n = p.num_heads
h = p.hidden_dim // n
# Transformer_XL relative attention.
if time_step is None:
raise ValueError('`time_step` can not be None when using relative '
'position encoding in attention.')
# Gets positional embedding.
# [1, S]
rel_dists = tf.expand_dims(time_step - tf.range(s), 0)
# [1, S, rel_pos_emb_dim]
pos_emb = self.key_emb.FPropDefaultTheta(rel_dists)
if hasattr(self, 'key_pos_proj'):
# [1, S, N, H]
pos_emb = self.key_pos_proj.FProp(theta.key_pos_proj, pos_emb)
# [S, 1, N, H]
pos_emb = tf.transpose(pos_emb, [1, 0, 2, 3])
else:
pos_emb = tf.reshape(pos_emb, [s, 1, n, h])
return tf.einsum('BNH,SBNH->SBN', query,
tf.reshape(key, [s, b, n, h]) + pos_emb)
def _AttenContext(self, theta, probs, value):
# TODO(jamesqin): optimize it.
encoded = tf.einsum('BNij,BjNH->BiNH', probs, value)
if not self.params.skip_value_emb:
encoded += tf.einsum('BNij,ijNH->BiNH', probs,
self._RelativePositionValueEmb(theta, value))
return encoded
def _AttenContextOneStep(self, theta, probs, value, time_step):
s, b, _, _ = py_utils.GetShape(value, 4)
n = self.params.num_heads
h = self.params.hidden_dim // n
logits = tf.einsum('SBN,SBNH->BNH', probs, tf.reshape(value, [s, b, n, h]))
if not self.params.skip_value_emb:
# [1, S]
rel_dists = tf.expand_dims(time_step - tf.range(s), 0)
# [1, S, rel_pos_emb_dim]
pos_emb = self.value_emb.FProp(theta.value_emb, rel_dists)
if hasattr(self, 'value_pos_proj'):
# [1, S, N, H]
pos_emb = self.value_pos_proj.FProp(theta.value_pos_proj, pos_emb)
pos_emb = tf.squeeze(pos_emb, 0)
else:
pos_emb = tf.reshape(pos_emb, [s, n, h])
logits += tf.einsum('SBN,SNH->BNH', probs, pos_emb)
return logits
def ExtendStep(self,
theta,
query_vec,
cached_key_vec,
cached_value_vec,
paddings,
segment_mask,
per_step_padding,
time_step,
use_short_seq_opt=False):
# TODO(jamesqin): support use_short_seq_opt.
assert not use_short_seq_opt
return super(MultiHeadedAttentionRPE,
self).ExtendStep(theta, query_vec, cached_key_vec,
cached_value_vec, paddings, segment_mask,
per_step_padding, time_step,
use_short_seq_opt)
@classmethod
def FPropMeta(cls, p, *args):
return NotImplementedError()
class LocalCausalSelfAttention(MultiHeadedAttention):
"""Dot-product causal self attention using a sliding window.
We use the following capital letters to denote certain
tensor parameters.
B = batch size
S=T= length of the key/value (source) and query (target)
D = model dimension
N = number of attention heads
H = dimensions of each attention head
W = block size
L = left context size, including left L-1 positions and self
R = right context size
F = L + R = context size of one position.
C = L + R + W - 1 = context size of a block of W positions.
U = ceiling(T/W).
The key difference to base class is on calculating logits:
Base class:
1) Compute the full S x T attention.
2) Apply a S x T mask to enforce local attention window.
This implementation:
1) Compute a W x C attention for each of the U blocks. Where the i-th
block has query[W*i:W*(i+1)] and key[W*(i-1)-L-1:W*(i+1)+R].
2) Apply a W x C mask for each block.
Effectively, we reduce both time and space complexities for computing the
sliding window attention from O(S * T) to O(S * C). In practice we observe
reduced HBM usage on TPU but no speed gains.
Note: Cross attention is not supported. As a result in speech models this
class can only be used for encoder.
TODO(weihan): add masking based local attention to the base class.
"""
@classmethod
def Params(cls):
"""Params for LocalCausalSelfAttention."""
p = super(LocalCausalSelfAttention, cls).Params()
p.Define(
'block_size', None, 'Size of a processing block, if unset, default to '
'max(1, left_context-1).')
p.Define(
'left_context', None, 'Number of left positions to attend '
'(including current position).')
p.Define('right_context', 0, 'Number of right positions to attend.')
return p
@base_layer.initializer
def __init__(self, params):
"""Constructs a LocalCausalSelfAttention object."""
super(LocalCausalSelfAttention, self).__init__(params)
p = self.params
assert p.left_context >= 1, 'Left context should be at least one.'
assert not p.packed_input, 'Packed input not implemented yet.'
if p.block_size is None:
p.block_size = max(1, p.left_context - 1)
tf.logging.warning('block_size not set, use default value {}'.format(
p.block_size))
assert not p.packed_input, 'Packed input not implemented yet.'
def _AttenLogits(self, theta, query, key):
return tf.einsum('BUTNH,BUSNH->BNUTS', query, key)
def AttenProbs(self,
theta,
query,
key,
paddings,
segment_mask,
unused_per_step_padding=None):
"""Compute attention probability.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query: [B, T, N, H].
key: [B, S=T, N, H].
paddings: [B, T].
segment_mask: [B, 1, T, S] not used right now.
unused_per_step_padding: Not used.
Returns:
logits: [B, U, N, W, 2 * W]
"""
p = self.params
key = py_utils.HasRank(key, 4)
b, t, n, h = py_utils.GetShape(key, 4)
paddings = py_utils.HasShape(paddings, [b, t])
query = py_utils.HasShape(query, [b, t, n, h])
# -> [B, U, C, N, H]
key_block_context = relative_atten_util.ExtractBlockContext(
key,
block_size=p.block_size,
left_context=p.left_context,
right_context=p.right_context)
_, u, c, _, _ = py_utils.GetShape(key_block_context)
# -> [B, U, W, N, H]
query_blocks = relative_atten_util.ConvertToBlocks(
query, block_size=p.block_size)
_, _, w, _, _ = py_utils.GetShape(query_blocks)
# -> [B, U, C]
paddings_block_context = relative_atten_util.ExtractBlockContext(
paddings,
block_size=p.block_size,
left_context=p.left_context,
right_context=p.right_context,
padding_val=1)
# -> [B, N, U, W, C]
paddings = tf.tile(
tf.reshape(paddings_block_context, [b, 1, u, 1, c]), [1, n, 1, w, 1])
# Make local casual paddings.
# -> [U, W, C]
local_causal_padding = relative_atten_util.MakeCausalPadding(
seq_len=t,
block_size=p.block_size,
left_context=p.left_context,
right_context=p.right_context)
paddings += local_causal_padding
# -> [B, N, U, W, C]
logits = self._AttenLogits(theta, query_blocks, key_block_context)
very_negative_logits = (
tf.ones_like(logits) * logits.dtype.max *
tf.constant(-0.7, dtype=logits.dtype))
padded_logits = tf.where(paddings > 0.0, very_negative_logits, logits)
probs = tf.nn.softmax(padded_logits)
return probs
def _DotAtten(self,
theta,
query,
key,
value,
paddings,
segment_mask,
per_step_padding=None):
"""Main attention function.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query: [B, T, N, H].
key: [B, S=T, N, H].
value: [B, S=T, N, H].
paddings: [B, S=T].
segment_mask: [B, 1, S=T, S=T].
per_step_padding: A mask of shape [B, T, S=T] if not None.
Returns:
encoded: [B, T, N, H].
atten_probs: [B, N, T, S].
"""
p = self.params
# Scale the query projection.
if p.enable_per_dim_scale:
query = self.per_dim_scale.FProp(theta.per_dim_scale, query)
else:
query *= (p.hidden_dim // p.num_heads)**-0.5
t0 = py_utils.GetShape(query)[1]
# -> [B, N, U, W, C]
probs = self.AttenProbs(theta, query, key, paddings, segment_mask,
per_step_padding)
# Apply dropout to probs.
probs = self.atten_dropout.FProp(theta.atten_dropout, probs)
# -> [B, U, C, N, H]
value_block_context = relative_atten_util.ExtractBlockContext(
value,
block_size=p.block_size,
left_context=p.left_context,
right_context=p.right_context)
# Compute the attention context vector.
# -> [B, U, W, N, H]
encoded = tf.einsum('BNUWC,BUCNH->BUWNH', probs, value_block_context)
b, u, w, n, h = py_utils.GetShape(encoded)
encoded = tf.reshape(encoded, [b, u * w, n, h])
# Remove the extra time padding introduced by converting to blocks.
encoded = encoded[:, :t0, ...]
return encoded, probs
def ExtendStep(self,
theta,
query_vec,
cached_key_vec,
cached_value_vec,
paddings,
segment_mask,
per_step_padding,
time_step,
use_short_seq_opt=False):
raise NotImplementedError()
@classmethod
def FPropMeta(cls, p, *args):
raise NotImplementedError()
class LocalCausalSelfAttentionXL(LocalCausalSelfAttention):
"""Local causal version of transformer-xl self attention."""
@classmethod
def Params(cls):
p = super(LocalCausalSelfAttentionXL, cls).Params()
p.Define('rel_pos_emb_dim', None,
'Dimension of relative positional embedding.')
return p
@base_layer.initializer
def __init__(self, params):
"""Constructs a LocalCausalSelfAttentionXL object."""
super(LocalCausalSelfAttentionXL, self).__init__(params)
params = self.params
if params.rel_pos_emb_dim is None or params.rel_pos_emb_dim <= 0:
raise ValueError('Invalide rel_pos_emb_dim: %s' % params.rel_pos_emb_dim)
with tf.variable_scope(params.name):
emb_params = layers.PositionalEmbeddingLayer.Params().Set(
embedding_dim=params.rel_pos_emb_dim)
self.CreateChild('pos_emb', emb_params)
# Projection layer for relative position encoding
dim_per_head = params.hidden_dim // params.num_heads
pos_proj_tpl = params.proj_tpl.Copy().Set(
input_dim=params.rel_pos_emb_dim,
num_heads=params.num_heads,
dim_per_head=dim_per_head,
use_bias=False)
self.CreateChild('pos_proj', pos_proj_tpl)
u_pc = py_utils.WeightParams(
shape=[params.num_heads, dim_per_head],
init=py_utils.WeightInit.Constant(0.0),
dtype=params.dtype,
collections=[self.__class__.__name__ + '_vars'])
v_pc = py_utils.WeightParams(
shape=[params.num_heads, dim_per_head],
init=py_utils.WeightInit.Constant(0.0),
dtype=params.dtype,
collections=[self.__class__.__name__ + '_vars'])
self.CreateVariable('u', u_pc)
self.CreateVariable('v', v_pc)
def _AttenLogits(self, theta, query, key):
b, u, w, _, _ = py_utils.GetShape(query)
_, _, c, _, _ = py_utils.GetShape(key)
n = self.params.num_heads
l = self.params.left_context
r = self.params.right_context
f = l + r
# term a and c
term_ac = tf.einsum('BUTNH,BUSNH->BNUTS', query + theta.u, key)
# term b and d
# [1, F]
pos = tf.expand_dims(tf.range(l - 1, -r - 1, -1), 0)
sin_emb = self.pos_emb.FPropWithPosition(theta.pos_emb, pos)
# [1, F, N, H]
sin_emb = self.pos_proj.FProp(theta.pos_proj, sin_emb)
# [F, N, H]
sin_emb = tf.squeeze(sin_emb, 0)
# [B, N, U, W, F]
term_bd = tf.einsum('BUWNH,FNH->BNUWF', query + theta.v, sin_emb)
# Perform relative shift in order to get [B, N, U, W, C]
# Pads the input to [B, N, U, C, C+1]
term_bd = tf.pad(term_bd,
((0, 0), (0, 0), (0, 0), (0, c - w), (0, c + 1 - f)))
# Reshapes to [B, N, U, C+1, C]. Note the output last dim is 1-smaller
# than the input, which "pushses" one element off to the next row for each
# row. The accumulated effect is row_i is right-shifted i steps (i>=0).
term_bd = tf.reshape(term_bd, [b, n, u, c + 1, c])
# Keeps useful slices. [B, N, U, W, C]
term_bd = tf.slice(term_bd, [0, 0, 0, 0, 0], [-1, -1, -1, w, -1])
return term_ac + term_bd
class TransformerAttentionLayer(base_layer.BaseLayer):
"""Multiheaded attention sub-layer in Transformer layer.
Input is first normalized using Layer Normalization. Output of layer
normalization is processed using multi-headed attention. And finally, the
output of the attention layer is combined with the residual connection.
This layer will be used in the following two scenarios:
1. Multi-Headed Self-Attention, where attention keys, values (source_vecs) and
queries come from the same previous layer output.
2. Masked Multi-Headed Self-Attention, where attention keys, values and
queries all come from the same previous layer output, but rightward
activations are masked to prevent information flow from future. This is the
use case for Transformer decoder self-attention layers. Can be activated by
setting is_masked flag of this layer.
3. Multi-Headed Cross-Attention, where attention keys and values
(source_vecs) are coming from a different source (output of the encoder),
and queries coming from the previous layer outputs (decoder).
We use the same capital letters to denote certain tensor parameters as
MultiHeadedAttention class.
B = batch size
S = length of the key/value (source)
T = length of the query (target)
D = model dimension
N = number of attention heads
H = dimensions of each attention head.
"""
@classmethod
def Params(cls):
p = super(TransformerAttentionLayer, cls).Params()
p.Define('input_dim', 0, 'Dimension of the transformer block input.')
p.Define('hidden_dim', 0, 'Dimension of the attention hidden dim.')
p.Define('num_heads', 8, 'Number of attention heads.')
p.Define('is_masked', False, 'If set, uses masked MultiHededAttention.')
p.Define('ln_tpl', layers.LayerNorm.Params(), 'Layer norm default params')
p.Define('atten_tpl',
MultiHeadedAttention.Params().Set(),
'Multi-Headed Dot-Product Attention default params')
p.Define(
'dropout_tpl', layers.DropoutLayer.Params(),
'Residual dropout params template. keep_prop will be reset to '
'(1.0 - residual_dropout_prob).')
p.Define(
'atten_dropout_prob', 0.0,
'Probability at which we apply dropout to the attention probs. '
'This practically drops memory values at random positions.')
p.Define(
'residual_dropout_prob', 0.0,
'Probability at which we apply dropout to the residual layers, '
'such that, residual(x, y) = (x + dropout(y)).')
p.Define('add_unnormalized_input', True,
'If set, uses unnormalized input in the residual add.')
p.Define('add_skip_connection', True,
'If True, add input (or normalized input) to the output.')
return p
@base_layer.initializer
def __init__(self, params):
super(TransformerAttentionLayer, self).__init__(params)
p = self.params
if not p.hidden_dim:
p.hidden_dim = p.input_dim
with tf.variable_scope(p.name):
# Initialize multiheaded attention.
params = p.atten_tpl.Copy()
params.name = 'multihead_atten'
params.input_dim = p.input_dim
params.hidden_dim = p.hidden_dim
params.num_heads = p.num_heads
params.atten_dropout_prob = p.atten_dropout_prob
self.CreateChild('atten', params)
# Initialize attention layer normalization.
params = p.ln_tpl.Copy()
params.name = 'atten_ln'
params.input_dim = p.input_dim
self.CreateChild('layer_norm', params)
# Initialize residual dropout.
dropout_tpl = p.dropout_tpl.Copy()
dropout_tpl.keep_prob = (1.0 - p.residual_dropout_prob)
self.CreateChild('residual_dropout', dropout_tpl)
def FProp(self,
theta,
query_vec,
source_vecs,
paddings,
per_step_padding_override=None,
segment_mask=None):
"""Compute the result of Transformer attention layer.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query_vec: [B, T, D].
source_vecs: [B, S, D] (cross_attention) or None (self-attention).
paddings: [B, S].
per_step_padding_override: [B, T, T] for self attention or
[B, T, S] for cross attention.
segment_mask: [B, 1, T, S].
Returns:
output: [B, T, D].
atten_probs: [B, N, T, S].
"""
p = self.params
b, t, _ = py_utils.GetShape(query_vec, 3)
unnormalized_query_vec = query_vec
# Layer normalization.
query_vec = self.layer_norm.FProp(theta.layer_norm, query_vec)
# For self-attention: keys = queries.
if source_vecs is None:
source_vecs = query_vec
# Generates mask, with shape [b, t, t].
if per_step_padding_override is None:
if p.is_masked and segment_mask is None:
# causal padding.
per_step_padding = tf.tile(
tf.expand_dims(CausalPadding(t, dtype=query_vec.dtype), 0),
[b, 1, 1])
else:
per_step_padding = None
else:
per_step_padding = per_step_padding_override
# Multiheaded attention.
with tf.name_scope('atten'):
ctx_vec, atten_probs = self.atten.FProp(
theta.atten,
query_vec, # query
source_vecs, # key
source_vecs, # value
paddings,
segment_mask=segment_mask,
per_step_padding=per_step_padding)
# Residual connection.
ctx_vec = self.residual_dropout.FProp(theta.residual_dropout, ctx_vec)
input_to_add = (
unnormalized_query_vec if p.add_unnormalized_input else query_vec)
if p.add_skip_connection:
ctx_vec += input_to_add
return ctx_vec, atten_probs
def ExtendStep(self,
theta,
query_vec,
cached_states,
time_step,
use_short_seq_opt=False):
"""Compute the result and update cached states for the current step.
This function is used by autoregressive decoding.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query_vec: [B, 1, D]
cached_states: A `.NestedMap` object containing tensors which are the
results of previous attentions, used for fast decoding. key - [T, B,
N, H]. value - [T, B, N, H].
time_step: A scalar, the current decode step, 0-based.
use_short_seq_opt: A bool, whether using short sequence optimization.
Returns:
cur_output: [B, 1, D]
updated_states: A `.NestedMap` object containing the updated states.
key - [T, B, N, H].
value - [T, B, N, H].
Raises:
ValueError: If not used as masked/causal self-attention.
"""
p = self.params
if not p.is_masked:
raise ValueError(
'ExtendStep should be used only by masked/causal self-attention.')
t, b, _, _ = py_utils.GetShape(cached_states.key, 4)
unnormalized_query_vec = query_vec
# Generates mask, with shape [b, 1, t].
zero_padding = tf.fill([t], tf.constant(0.0, dtype=query_vec.dtype))
per_step_padding = tf.where(
tf.less(tf.range(t), tf.fill([t], time_step + 1)), zero_padding,
tf.ones_like(zero_padding, dtype=query_vec.dtype))
per_step_padding = tf.tile(tf.expand_dims(per_step_padding, axis=0), [b, 1])
per_step_padding = tf.expand_dims(per_step_padding, 1)
# Layer normalization.
query_vec = self.layer_norm.FProp(theta.layer_norm, query_vec)
# Multiheaded masked/causal self-attention.
ctx_vec, updated_key_vec, updated_value_vec = self.atten.ExtendStep(
theta.atten, query_vec, cached_states.key, cached_states.value, None,
None, per_step_padding, time_step, use_short_seq_opt)
updated_states = py_utils.NestedMap(
key=updated_key_vec, value=updated_value_vec)
# Residual connection.
ctx_vec = self.residual_dropout.FProp(theta.residual_dropout, ctx_vec)
input_to_add = (
unnormalized_query_vec if p.add_unnormalized_input else query_vec)
if p.add_skip_connection:
ctx_vec += input_to_add
return ctx_vec, updated_states
class TransformerLayer(base_layer.BaseLayer):
"""Transformer layer with multiheaded attention.
Applies self-attention followed by a cross-attention and feed forward layer.
"""
@classmethod
def Params(cls):
p = super(TransformerLayer, cls).Params()
p.Define('has_aux_atten', False,
'If set, introduces a second attention layer')
p.Define('mask_self_atten', False, 'If True, use masked self-attention.')
p.Define('input_dim', 0, 'Dimension of the transformer block input.')
p.Define('output_dim', 0, 'Dimension of the transformer block output.')
p.Define('tr_atten_tpl',
TransformerAttentionLayer.Params().Set(),
'Transformer Attention Layer params.')
p.Define(
'tr_self_atten_tpl', None,
'Attention template for self attention. If unset, use tr_atten_tpl.')
p.Define(
'tr_fflayer_tpl',
layers_with_attention.TransformerFeedForwardLayer.Params().Set(
hidden_dim=2048), 'Transformer Feed-Forward Layer params.')
p.Define('packed_input', False,
'If True, each training example may pack multiple sequences.')
return p
@classmethod
def SetNumInputNodes(cls, p, num_input_nodes):
p.input_dim = num_input_nodes
@classmethod
def NumOutputNodes(cls, p):
return p.output_dim
@base_layer.initializer
def __init__(self, params):
super(TransformerLayer, self).__init__(params)
p = self.params
with tf.variable_scope(p.name):
# Initialize masked multi-headed self-attention
if p.tr_self_atten_tpl is not None:
self_atten_tpl = p.tr_self_atten_tpl
else:
self_atten_tpl = p.tr_atten_tpl
params = self_atten_tpl.Copy()
params.name = 'multihead_self_atten'
params.input_dim = p.input_dim
params.is_masked = p.mask_self_atten
params.atten_tpl.packed_input = p.packed_input
self.CreateChild('self_atten', params)
if p.has_aux_atten:
# Initialize multi-headed cross-attention
params = p.tr_atten_tpl.Copy()
params.name = 'multihead_cross_atten'
params.input_dim = p.input_dim
params.atten_tpl.packed_input = p.packed_input
self.CreateChild('cross_atten', params)
# Initialize feed-forward layer
params = p.tr_fflayer_tpl.Copy()
params.name = 'tr_fflayer'
params.input_dim = p.input_dim
params.output_dim = p.output_dim
self.CreateChild('fflayer', params)
def FProp(self,
theta,
query_vec,
paddings,
aux_vec=None,
aux_paddings=None,
per_step_padding_override=None,
segment_mask=None,
aux_segment_mask=None):
"""Transformer decoder layer.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query_vec: [target_batch, target_time, dim].
paddings: [target_batch, target_time].
aux_vec: [source_batch, source_time, dim].
aux_paddings: [source_batch, source_time].
per_step_padding_override: [target_batch, target_time, target_time].
segment_mask: [target_batch, 1, target_time, target_time]
aux_segment_mask: [source_batch, 1, target_time, source_time]
target_batch can be a multiple of source_batch, where samples in
target_batch are arranged in the order of [m, source_batch] where m =
target_batch / source_batch.
Returns:
The fflayer output with shape [target_batch, target_time, dim].
atten_probs: [B, N, T, S].
"""
p = self.params
# First the self-attention layer.
if p.packed_input:
assert aux_segment_mask is not None, ('Need to specify aux_segment_mask '
'for packed input.')
with tf.name_scope('self_atten'):
atten_vec, atten_probs = self.self_atten.FProp(
theta.self_atten,
query_vec,
None,
paddings,
segment_mask=segment_mask,
per_step_padding_override=per_step_padding_override)
if p.has_aux_atten:
with tf.name_scope('aux_atten'):
# Next the cross-attention layer.
target_batch, target_time, dim = py_utils.GetShape(query_vec, 3)
source_batch, source_time = py_utils.GetShape(aux_vec, 2)
atten_vec = tf.reshape(atten_vec, [-1, source_batch, target_time, dim])
atten_vec = tf.reshape(
tf.transpose(atten_vec, [1, 0, 2, 3]), [source_batch, -1, dim])
atten_vec, atten_probs = self.cross_atten.FProp(
theta.cross_atten,
atten_vec,
aux_vec,
aux_paddings,
segment_mask=aux_segment_mask)
num_heads = py_utils.GetShape(atten_probs)[1]
atten_probs = tf.reshape(
atten_probs,
[source_batch, -1, num_heads, target_time, source_time])
atten_probs = tf.transpose(atten_probs, [1, 0, 2, 3, 4])
atten_probs = tf.reshape(
atten_probs, [target_batch, num_heads, target_time, source_time])
atten_vec = tf.reshape(atten_vec, [source_batch, -1, target_time, dim])
atten_vec = tf.transpose(atten_vec, [1, 0, 2, 3])
atten_vec = tf.reshape(atten_vec, [target_batch, target_time, dim])
# Finally the feed-forward layer.
with tf.name_scope('fflayer'):
return self.fflayer.FProp(theta.fflayer, atten_vec, paddings), atten_probs
def InitStates(self, theta, target_batch_size, target_max_length):
p = self.params
num_heads = p.tr_atten_tpl.num_heads
atten_dim = p.tr_self_atten_tpl.hidden_dim if p.tr_self_atten_tpl else p.tr_atten_tpl.hidden_dim
if not atten_dim: # Check for Pathways as atten_tpl.hidden_dim is not set.
atten_dim = p.input_dim
dim_per_head = atten_dim // num_heads
# TODO(shafey): Determine if we want to make the cached shape 128 to
# avoid padding and more efficient interpolation in beamsearch.
return py_utils.NestedMap(
key=tf.zeros(
shape=(target_max_length, target_batch_size, num_heads,
dim_per_head),
dtype=tf.float32),
value=tf.zeros(
shape=(target_max_length, target_batch_size, num_heads,
dim_per_head),
dtype=tf.float32))
def ExtendStep(self,
theta,
query_vec,
aux_vec,
aux_paddings,
cached_states,
time_step,
use_short_seq_opt=False):
"""Transformer decoder layer, extend one step in autoregressive decoding.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query_vec: [target_batch, 1, dim].
aux_vec: [source_batch, source_time, dim]
aux_paddings: [source_batch, source_time]
cached_states: A `.NestedMap` object containing tensors which are the
results of previous attentions, used for fast decoding. key -
[target_time, target_batch, num_heads, dim_per_head]. value -
[target_time, target_batch, num_heads, dim_per_head].
time_step: A scalar, the current decode step, 0-based.
use_short_seq_opt: A bool, whether using short sequence optimization.
Returns:
cur_output: [target_batch, 1, dim]
updated_states: A `.NestedMap` object containing the updated states.
key - [target_time, target_batch, num_heads, dim_per_head].
value - [target_time, target_batch, num_heads, dim_per_head].
"""
target_batch, _, dim = py_utils.GetShape(query_vec, 3)
source_batch = py_utils.GetShape(aux_vec)[0]
# First the self-attention layer.
atten_vec, updated_states = self.self_atten.ExtendStep(
theta.self_atten, query_vec, cached_states, time_step,
use_short_seq_opt)
# Next the cross-attention layer.
atten_vec = tf.reshape(atten_vec, [source_batch, -1, dim])
atten_vec, _ = self.cross_atten.FProp(theta.cross_atten, atten_vec, aux_vec,
aux_paddings)
atten_vec = tf.reshape(atten_vec, [target_batch, 1, -1])
# Finally the feed-forward layer.
cur_output = self.fflayer.FProp(
theta.fflayer, atten_vec,
tf.zeros([target_batch, 1], dtype=atten_vec.dtype))
return cur_output, updated_states
# mt_attention_layer.MultiHeadedAttentionXL
ATTEN_TRANSFORMER_XL = 'transformer_xl'
# mt_attention_layer.MultiHeadedAttentionRPE
ATTEN_RPE = 'rpe'
def UseRelativeAttentionInTransformerLayer(transformer_params,
rel_pos_emb_dim,
atten_type=ATTEN_TRANSFORMER_XL):
"""Uses transformer-xl attention for self attention of a transformer layer.
Args:
transformer_params: A mt_attention_layer.TransformerLayer.Params() object.
rel_pos_emb_dim: (int) Relative positional embedding dim to be set.
atten_type: (string) Attention type. Supported:
- 'transformer_xl': mt_attention_layer.MultiHeadedAttentionXL
- 'rpe': mt_attention_layer.MultiHeadedAttentionRPE
Returns:
A mt_attention_layer.TransformerLayer.Params() object with relative pos emb.
"""
if not issubclass(transformer_params.cls, TransformerLayer):
raise ValueError('Unsupported input transformer layer: %s' %
transformer_params.cls)
if atten_type not in (ATTEN_TRANSFORMER_XL, ATTEN_RPE):
raise ValueError('Relative attention type: %s unsupported' % atten_type)
# Gets multiheaded attention tpl from self attention config in transformer.
trans_params_copy = transformer_params.Copy()
if trans_params_copy.tr_self_atten_tpl is None:
trans_params_copy.tr_self_atten_tpl = trans_params_copy.tr_atten_tpl.Copy()
atten_tpl = trans_params_copy.tr_self_atten_tpl.atten_tpl
# If already using relative attention class.
if atten_tpl.cls in (MultiHeadedAttentionRPE, MultiHeadedAttentionXL,
LocalCausalSelfAttentionXL):
atten_tpl.rel_pos_emb_dim = rel_pos_emb_dim
return trans_params_copy
if atten_type == ATTEN_TRANSFORMER_XL:
if atten_tpl.cls == MultiHeadedAttention:
rel_atten_tpl = MultiHeadedAttentionXL.Params()
elif atten_tpl.cls == LocalCausalSelfAttention:
rel_atten_tpl = (LocalCausalSelfAttentionXL.Params())
else:
raise ValueError('Unsupported attention: %s' % atten_tpl.cls)
elif atten_type == ATTEN_RPE:
rel_atten_tpl = MultiHeadedAttentionRPE.Params()
rel_atten_tpl = hyperparams.CopyParamsTo(
atten_tpl, rel_atten_tpl, skip=['cls'])
rel_atten_tpl.rel_pos_emb_dim = rel_pos_emb_dim
trans_params_copy.tr_self_atten_tpl.atten_tpl = rel_atten_tpl
return trans_params_copy
def ClearRelativeAttentionInTransformerLayer(transformer_params):
"""Removes relative position attention in the transformer layer.
Args:
transformer_params: A mt_attention_layer.TransformerLayer param.
Returns:
A mt_attention_layer.TransformerLayer param without relative attention.
"""
if not issubclass(transformer_params.cls, TransformerLayer):
raise ValueError('Unsupported input transformer layer: %s' %
transformer_params.cls)
trans_params_copy = transformer_params.Copy()
if trans_params_copy.tr_self_atten_tpl is None:
trans_params_copy.tr_self_atten_tpl = trans_params_copy.tr_atten_tpl.Copy()
attention_tpl = trans_params_copy.tr_self_atten_tpl.atten_tpl
if attention_tpl.cls == MultiHeadedAttentionXL:
new_attention_tpl = MultiHeadedAttention.Params()
elif attention_tpl.cls == (LocalCausalSelfAttentionXL):
new_attention_tpl = LocalCausalSelfAttention.Params()
else:
raise ValueError('Unsupported attention params: %s' % attention_tpl.cls)
new_attention_tpl = hyperparams.CopyParamsTo(
attention_tpl, new_attention_tpl, skip=['cls', 'rel_pos_emb_dim'])
trans_params_copy.tr_self_atten_tpl.atten_tpl = new_attention_tpl
return trans_params_copy
class TransformerDecoderLayer(TransformerLayer):
"""Transformer decoder layer with multiheaded attention."""
@classmethod
def Params(cls):
p = super(TransformerDecoderLayer, cls).Params()
p.has_aux_atten = True
p.mask_self_atten = True
return p
class StackedTransformerLayers(base_layer.BaseLayer):
"""A stack of Batch-Major Transformer layers."""
@classmethod
def Params(cls):
p = super(StackedTransformerLayers, cls).Params()
p.Define('has_aux_atten', False,
'If set, introduces a second attention layer')
p.Define('mask_self_atten', False, 'If True, use masked self-attention.')
p.Define('num_layers', 0, 'Num of layers in this stack.')
p.Define('mdl_dim', 0, 'Model dimension in Transformer layers.')
p.Define('hidden_dim', 0,
'The hidden layer dimension in Transformer layers.')
p.Define('num_atten_heads', 0, 'Num of attention heads.')
p.Define('dropout_prob', 0.0,
'Apply dropout at this prob at various places.')
p.Define('add_unnormalized_input', True,
'If set, uses unnormalized input in the residual add.')
p.Define('transformer_layer_params_tpl', TransformerLayer.Params(),
'A template of TransformerLayer.params.')
p.Define('final_layer_norm', False,
'If true, apply layer normalization to the final output.')
p.Define('packed_input', False,
'If True, each training example may pack multiple sequences.')
p.Define('use_fused_layernorm', False, 'Whether to use fused layernorm.')
return p
@base_layer.initializer
def __init__(self, params):
super(StackedTransformerLayers, self).__init__(params)
p = self.params
assert p.num_layers > 0
assert p.mdl_dim > 0
assert p.hidden_dim > 0
assert p.num_atten_heads > 0
assert 0.0 <= p.dropout_prob < 1.0
def _LayerParams(ii):
"""Construct ii-th layer params."""
p_ii = p.transformer_layer_params_tpl.Copy()
p.name = 'layer_%d' % ii
p_ii.has_aux_atten = p.has_aux_atten
p_ii.mask_self_atten = p.mask_self_atten
p_ii.input_dim = p.mdl_dim
p_ii.output_dim = p.mdl_dim
p_ii.packed_input = p.packed_input
p_ii.tr_atten_tpl.num_heads = p.num_atten_heads
p_ii.tr_atten_tpl.atten_dropout_prob = p.dropout_prob
p_ii.tr_atten_tpl.residual_dropout_prob = p.dropout_prob
p_ii.tr_atten_tpl.add_unnormalized_input = p.add_unnormalized_input
p_ii.tr_fflayer_tpl.hidden_dim = p.hidden_dim
p_ii.tr_fflayer_tpl.residual_dropout_prob = p.dropout_prob
p_ii.tr_fflayer_tpl.relu_dropout_prob = p.dropout_prob
return p_ii
layer_params = [_LayerParams(ii) for ii in range(p.num_layers)]
with tf.variable_scope(p.name):
self.CreateChildren('x_layers', layer_params)
if p.final_layer_norm:
final_ln_p = layers.LayerNorm.Params().Set(
input_dim=p.mdl_dim, use_fused_layernorm=p.use_fused_layernorm)
self.CreateChild('final_ln', final_ln_p)
def FProp(self,
theta,
query_vec,
paddings,
aux_vec=None,
aux_paddings=None,
segment_mask=None,
aux_segment_mask=None):
"""Stacked Transformer layer.
Args:
theta: A `NestedMap` object containing weights' values of this layer and
its children layers.
query_vec: [batch, target_time, dim].
paddings: [batch, target_time].
aux_vec: [batch, source_time, dim].
aux_paddings: [batch, source_time].
segment_mask: [batch, 1, target_time, target_time]
aux_segment_mask: [batch, 1, target_time, source_time]
Returns:
The attention context vector with shape [batch, target_time, dim].
"""
p = self.params
x_out = query_vec
with tf.name_scope(p.name):
for i in range(p.num_layers):
x_in = x_out
x_out, _ = self.x_layers[i].FProp(theta.x_layers[i], x_in, paddings,
aux_vec, aux_paddings, segment_mask,
aux_segment_mask)
if p.final_layer_norm:
x_out = self.final_ln.FProp(theta.final_ln, x_out)
return x_out, paddings
def InitStates(self, theta, *args, **kwargs):
return py_utils.NestedMap(x_layers=[
layer.InitStates(layer_theta, *args, **kwargs)
for layer, layer_theta in zip(self.x_layers, theta.x_layers)
])
def ExtendStep(self,
theta,
query_vec,
aux_vec,
aux_paddings,
cached_states,
time_step,
use_short_seq_opt=False):
"""Transformer decoder layer, extend one step in autoregressive decoding.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query_vec: [target_batch, 1, dim].
aux_vec: [source_batch, source_time, dim]
aux_paddings: [source_batch, source_time]
cached_states: A `.NestedMap` object containing tensors which are the
results of previous attentions, used for fast decoding.
cached_states.x_layers is a list corresponding to self.x_layers, where
each element is a NestedMap with attention keys and values:
"key" - [target_time, target_batch, num_heads, dim_per_head].
"value" - [target_time, target_batch, num_heads, dim_per_head].
time_step: A scalar, the current decode step, 0-based.
use_short_seq_opt: A bool, whether using short sequence optimization.
Returns:
cur_output: The last decoder layer output of shape [target_batch, 1, dim].
updated_states: A `.NestedMap` object containing the updated states.
updated_states.x_layers is a list corresponding to self.x_layers, where
each element is a NestedMap with attention keys and values:
"key" - [target_time, target_batch, num_heads, dim_per_head].
"value" - [target_time, target_batch, num_heads, dim_per_head].
"""
p = self.params
with tf.name_scope(p.name):
updated_states = py_utils.NestedMap(x_layers=[])
decoder_input = query_vec
for layer, layer_theta, layer_states in zip(self.x_layers, theta.x_layers,
cached_states.x_layers):
decoder_output, updated_layer_states = layer.ExtendStep(
layer_theta, decoder_input, aux_vec, aux_paddings, layer_states,
time_step, use_short_seq_opt)
updated_states.x_layers.append(updated_layer_states)
decoder_input = decoder_output
return decoder_output, updated_states
class TransformerFeedForwardLayerWithTaskId(
layers_with_attention.TransformerFeedForwardLayer):
"""TransformerFeedForwardLayer with optional task_id input args."""
@classmethod
def Params(cls):
p = super(TransformerFeedForwardLayerWithTaskId, cls).Params()
p.Define('use_task_ids', False,
'If set, introduces a second attention layer')
return p
def FProp(self, theta, inputs, paddings, task_id=None):
"""Feed-forward, residual and layer-norm.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
inputs: [batch, time, dim].
paddings: [batch, time]
task_id: optional task_id with shape [batch]
Returns:
tensor of the same shape with inputs
"""
p = self.params
if p.use_task_ids:
if task_id is None:
raise ValueError('Must pass task_id if use_task_ids.')
inputs_normalized = self.layer_norm.FProp(theta.layer_norm, inputs)
if hasattr(self, 'res_proj_layer'):
inputs = self.res_proj_layer.FProp(theta.res_proj_layer, inputs)
expanded_paddings = tf.expand_dims(paddings, -1)
fflayer_args = [inputs_normalized, expanded_paddings]
fflayer_args += [task_id] if p.use_task_ids else []
h = inputs + self.residual_dropout.FProp(
theta.residual_dropout, self.fflayer.FProp(theta.fflayer,
*fflayer_args))
return h
class GPipeTransformerLayer(TransformerLayer):
"""GPipe compatible transformer layer."""
@classmethod
def Params(cls):
p = super(GPipeTransformerLayer, cls).Params()
p.tr_fflayer_tpl = TransformerFeedForwardLayerWithTaskId.Params()
return p
def FProp(self,
theta,
source_vecs,
source_paddings,
target_vecs,
target_paddings,
source_segment_mask,
target_segment_mask,
transparent_acc,
transparent_acc_helper,
source_task_id=None,
target_task_id=None):
p = self.params
with tf.name_scope(p.name):
if p.has_aux_atten: # Decoder FProp
atten_vec, _ = self.self_atten.FProp(
theta.self_atten,
target_vecs,
None,
target_paddings,
segment_mask=target_segment_mask)
atten_vec, _ = self.cross_atten.FProp(
theta.cross_atten,
atten_vec,
source_vecs,
source_paddings,
segment_mask=source_segment_mask)
atten_vec = self.fflayer.FProp(theta.fflayer, atten_vec,
target_paddings, target_task_id)
atten_vec.set_shape(target_vecs.shape)
return (source_vecs, source_paddings, atten_vec, target_paddings,
source_segment_mask, target_segment_mask, transparent_acc,
transparent_acc_helper, source_task_id, target_task_id)
# Encoder FProp
atten_vec, _ = self.self_atten.FProp(
theta.self_atten,
source_vecs,
None,
source_paddings,
segment_mask=source_segment_mask)
atten_vec = self.fflayer.FProp(theta.fflayer, atten_vec, source_paddings,
source_task_id)
atten_vec.set_shape(source_vecs.shape)
return (atten_vec, source_paddings, target_vecs, target_paddings,
source_segment_mask, target_segment_mask, transparent_acc,
transparent_acc_helper, source_task_id, target_task_id)
@classmethod
def FPropMeta(cls, p, inputs, *args):
py_utils.CheckShapes((inputs,))
flops_per_element = 5
source_batch, src_time, dim = inputs
flops = flops_per_element * src_time * src_time * source_batch * dim
args = args if isinstance(args, tuple) else (args,)
return py_utils.NestedMap(flops=flops, out_shapes=(inputs,) + args)
@classmethod
def SetupDeterministicDropout(cls, params):
"""Replaced dropout layers in transformer with deterministic ones."""
params.tr_atten_tpl.dropout_tpl = (
layers.DeterministicDropoutLayer.Params())
params.tr_atten_tpl.atten_tpl.dropout_tpl = (
layers.DeterministicDropoutLayer.Params())
params.tr_fflayer_tpl.residual_dropout_tpl = (
layers.DeterministicDropoutLayer.Params())
params.tr_fflayer_tpl.fflayer_tpl.dropout = (
layers.DeterministicDropoutLayer.Params())
return params
def ExtendStep(self,
theta,
query_vec,
aux_vec,
aux_paddings,
cached_states,
time_step,
task_id=None,
use_short_seq_opt=False):
"""Transformer decoder layer, extend one step in autoregressive decoding.
Args:
theta: A `.NestedMap` object containing weights' values of this layer and
its children layers.
query_vec: [target_batch, 1, dim].
aux_vec: [source_batch, source_time, dim]
aux_paddings: [source_batch, source_time]
cached_states: A `.NestedMap` object containing tensors which are the
results of previous attentions, used for fast decoding. key -
[target_time, target_batch, num_heads, dim_per_head]. value -
[target_time, target_batch, num_heads, dim_per_head].
time_step: A scalar, the current decode step, 0-based.
task_id: [batch_size]: the input task_id meta information.
use_short_seq_opt: A bool, whether using short sequence optimization.
Returns:
cur_output: [target_batch, 1, dim]
updated_states: A `.NestedMap` object containing the updated states.
key - [target_time, target_batch, num_heads, dim_per_head].
value - [target_time, target_batch, num_heads, dim_per_head].
"""
target_batch, _, dim = py_utils.GetShape(query_vec, 3)
source_batch = py_utils.GetShape(aux_vec)[0]
# First the self-attention layer.
atten_vec, updated_states = self.self_atten.ExtendStep(
theta.self_atten, query_vec, cached_states, time_step,
use_short_seq_opt)
# Next the cross-attention layer.
atten_vec = tf.reshape(atten_vec, [source_batch, -1, dim])
atten_vec, _ = self.cross_atten.FProp(theta.cross_atten, atten_vec, aux_vec,
aux_paddings)
atten_vec = tf.reshape(atten_vec, [target_batch, 1, -1])
# Finally the feed-forward layer.
cur_output = self.fflayer.FProp(
theta.fflayer, atten_vec,
tf.zeros([target_batch, 1], dtype=atten_vec.dtype), task_id)
return cur_output, updated_states
# pyformat: disable
class Builder(builder.Base):
"""Builder for self-attention layers."""
@classmethod
def Params(cls):
p = super(Builder, cls).Params()
p.Define('model_dim', 4, 'Model dim of this layer.')
p.Define('num_heads', 1, 'Number of heads in the atten layer.')
p.Define('ff_hidden_dim', 4, 'Hidden dim of the feedforward layer')
p.Define('residual_dropout_prob', 0,
'Dropout prob to the output of each sub-layer before it is added '
'to the sub-layer input.')
p.Define('ff_activation_fn', tf.nn.relu,
'Activation function in Feedforward layer.')
p.Define('ff_residual_weight', 1.0, 'Weight given to F(x) in the residual '
'connection: y = x + ff_residual_weight * F(x), in Feedforward '
'layer.')
p.Define('relu_dropout_prob', 0,
'Probability at which we apply dropout to the hidden layer of '
'feed-forward network.')
p.Define('atten_dropout_prob', 0,
'Probability at which we apply dropout to the attention layer')
p.Define('selfatten_add_unnormalized_input', True,
'Whether to use unnormalized input in the residual add.')
p.Define('selfatten_enable_value_proj', True,
'Whether value v is pre-projected before self attention or not.')
p.Define('conv_activation', 'RELU',
'Activation function for convolution layer in Builder.')
p.Define('num_splits', 1,
'Number of model parallelism splits.')
p.Define('num_micro_batches', 1,
'Number of spatial partition along the batch dimension. '
'When num_micro_batches > 1, the effective batch size of the '
'intermediate activation is batch_size // num_micro_batches.'
'This allows models to try larger batch size which might improve '
'model quality')
p.Define('glu_with_tanh', False,
'If the Gated Linear Unit should apply tanh on the activation '
'input.')
p.Define('packed_input', False,
'Whether to support packed input')
p.Define('enable_per_dim_scale', True,
'Whether using per_dim_scale or scaling by a constant factor.')
p.Define('use_fused_layernorm', False, 'Whether to use fused layernorm.')
p.Define('use_bias', True, 'Whether to use bias for projection layer.')
p.Define('xla_num_partitions', None, '')
return p
def __init__(self, params):
super(Builder, self).__init__(params)
p = self.params
if p.num_splits > 1 or p.num_micro_batches > 1:
assert p.deterministic_dropout
def _Dropout(self, name, drop_prob):
"""Returns a DropoutLayer Params."""
return super(Builder, self)._Dropout(name, keep_prob=1.0 - drop_prob)
def _Add(self, name, residual_weight=1.0):
return self._Fn(name, fn=lambda x, y: x + residual_weight * y,
fn_out=lambda x, y: x)
def _ExpandDims(self, name):
return self._Fn(name,
fn=lambda x: tf.expand_dims(x, 2),
fn_out=lambda x: tshape.Shape(x[0:2] + [1] + x[2:]),
fn_flops=lambda x: 1)
def _Squeeze(self, name):
return self._Fn(name,
fn=lambda x: tf.squeeze(x, 2),
fn_out=lambda x: tshape.Shape(x[0:2] + x[3:]),
fn_flops=lambda x: 1)
def _Glu(self, name):
def _GLUFn(inputs):
gated_inputs, act_inputs = tf.split(inputs, 2, axis=-1)
return act_inputs * tf.sigmoid(gated_inputs)
def _GatedTanhFn(inputs):
gated_inputs, act_inputs = tf.split(inputs, 2, axis=-1)
return tf.tanh(act_inputs) * tf.sigmoid(gated_inputs)
fn = _GatedTanhFn if self.params.glu_with_tanh else _GLUFn
return self._Fn(name,
fn=fn,
fn_out=lambda x: tshape.Shape(x[:-1] + [x[-1] / 2]),
fn_flops=lambda x: 15 * x.size)
def _Pad(self, name):
return self._Fn(
name,
fn=lambda x, p: py_utils.ApplyPadding(tf.expand_dims(p, -1), x),
fn_out=lambda x, p: x,
fn_flops=lambda x, p: 2 * max(x.size, p.size))
def _MultiHeadedAtten(self, name):
"""Returns a MultiHeadedAttention params."""
p = self.params
atten_p = MultiHeadedAttention.Params().Set(
name=name,
input_dim=p.model_dim,
hidden_dim=p.model_dim,
num_heads=p.num_heads,
atten_dropout_prob=p.atten_dropout_prob,
enable_value_proj=p.selfatten_enable_value_proj,
enable_per_dim_scale=p.enable_per_dim_scale,
packed_input=p.packed_input,
fprop_dtype=p.fprop_dtype,
use_bias=p.use_bias
)
if p.deterministic_dropout:
atten_p.dropout_tpl = layers.DeterministicDropoutLayer.Params()
return atten_p
def Feedforward(self, name, is_causal=False):
del is_causal
p = self.params
sub_list = [
('i.vec->after_feedforward',
self._Seq(
'feedforward',
self._LN('ln', p.model_dim,
use_fused_layernorm=p.use_fused_layernorm),
self._Linear('linear01', p.model_dim, p.ff_hidden_dim),
self._Bias('bias01', p.ff_hidden_dim),
# TODO(blee): figure out why p.xla_num_partitions is not propagated
# here.
self._ActivationXlaShard('act', p.ff_activation_fn, p.xla_num_partitions),
self._Dropout('relu_dropout', p.relu_dropout_prob),
self._Linear('linear02', p.ff_hidden_dim, p.model_dim),
self._Bias('bias02', p.model_dim),
self._Dropout('dropout', p.residual_dropout_prob))),
('i.vec,after_feedforward->added', self._Add('add', p.ff_residual_weight)),
('added,i.paddings->o.vec', self._Pad('pad')),
('i.paddings->o.paddings', self._Id('id')),
]
if p.packed_input:
sub_list.append(('i.segment_mask->o.segment_mask',
self._Id('segment_mask')))
return self._Graph(
name,
['i'], # input NestedMap with {vec, paddings, segment_mask}
['o'], # output NestedMap with {vec, paddings, segment_mask}
*sub_list)
def _MaybeSplit(self, name, blocks):
p = self.params
if p.num_splits == 1 and p.num_micro_batches == 1:
return None
num_layers = len(blocks)
assert num_layers >= p.num_splits
layers_per_split = (num_layers - 1) // p.num_splits + 1
cells = []
while blocks:
head, blocks = blocks[:layers_per_split], blocks[layers_per_split:]
cells.append(self._Seq('cell_{}'.format(len(cells)), *head))
assert len(cells) == p.num_splits
return gpipe.PipeliningLayer.Params().Set(
name=name,
cell_tpl=cells,
nested_map_fprop=True,
num_micro_batches=p.num_micro_batches)
def _DepthwiseConv2D(self, name, filter_size, is_causal=False):
"""A depthwise convolution block for lightweight conv."""
p = self.params
conv_builder_params = conv_layers.Builder.Params()
conv_builder = conv_builder_params.Instantiate()
return conv_builder.DepthwiseConv2D(
name=name,
in_dim=p.model_dim,
depth_multiplier=1,
filter_shape=[filter_size, 1],
stride=(1, 1),
dilation=(1, 1),
activation=p.conv_activation,
is_causal=is_causal)
def _NormalizedDepthwiseConv2D(self, name, kernel_size, is_causal=False):
"""A depthwise convolution block for lightweight conv."""
p = self.params
conv_builder_params = conv_layers.Builder.Params()
conv_builder = conv_builder_params.Instantiate()
return conv_builder.NormalizedDepthwiseConv2D(
name=name,
kernel_size=kernel_size,
num_heads=p.num_heads,
in_dim=p.model_dim,
dropconnect_prob=p.atten_dropout_prob,
deterministic_dropout=p.deterministic_dropout,
is_causal=is_causal)
def LConv(self,
name,
kernel_size,
is_causal=False,
convolution_fn=None):
"""A lightweight convolution block as described in.
https://arxiv.org/abs/1901.10430
Corresponding PyTorch Implementation (L587):
https://github.com/pytorch/fairseq/blob/v0.6.2/fairseq/models/lightconv.py
This block can be used as an alternative to self-attention block.
Args:
name: name of the params
kernel_size: kernel size used in the conv layer.
is_causal: is causal padding or not.
convolution_fn: Convolution to apply, default _NormalizedDepthwiseConv2D.
Returns:
A LightWeightConvLayerBlock layer params.
"""
p = self.params
if convolution_fn is None:
convolution_fn = getattr(self, '_NormalizedDepthwiseConv2D')
sub_list = [
('i.vec->pre_conv',
self._Seq(
'pre_conv',
self._LN('ln', p.model_dim,
use_fused_layernorm=p.use_fused_layernorm),
self._Linear('linear', p.model_dim, p.model_dim * 2),
self._Bias('bias', p.model_dim * 2),
self._Glu('glu'),
self._ExpandDims('expand'))),
('pre_conv,i.paddings->post_conv,o.paddings',
convolution_fn('conv', kernel_size, is_causal)),
('post_conv->after_dropout',
self._Seq(
'post_conv',
self._Squeeze('squeeze'),
self._Linear('linear', p.model_dim, p.model_dim),
self._Bias('bias', p.model_dim),
self._Dropout('dropout', p.residual_dropout_prob))),
('i.vec,after_dropout->o.vec', self._Add('add')),
]
if p.packed_input:
sub_list.append(('i.segment_mask->o.segment_mask', self._Id('segment_mask')))
return self._Graph(
name,
['i'], # input NestedMap with {vec, paddings, segment_mask}
['o'], # output NestedMap with {vec, paddings, segment_mask}
*sub_list
)
def LconvBlock(self, name, kernel_size, is_causal,
convolution_fn):
"""A lightweight conv block followed by a feedforward one."""
return self._Seq(
name,
self.LConv(
name='lconv',
kernel_size=kernel_size,
is_causal=is_causal,
convolution_fn=convolution_fn),
self.Feedforward('ff', is_causal))
def Seq(self, name, *subs):
"""Returns a stack of sequential layers."""
return self._Seq(name, *subs)
def LConvStack(self, name, kernel_sizes, is_causal=False):
"""Returns a stack of LConv layers with kernel size in kernel_sizes."""
blocks = []
for i, kernel_size in enumerate(kernel_sizes):
blocks.append(
self.LconvBlock(
name='block_{}'.format(i),
kernel_size=kernel_size,
is_causal=is_causal,
convolution_fn=None))
return self._MaybeSplit(name, blocks) or self._Seq(name, *blocks)
def _Stride(self, name, stride):
"""Strides the input sequence.
Args:
name: name of this layer.
stride: To use every k-th token, set the stride to k. When stride == 0,
only returns the first token of the input. When stride == 1, returns
every token in the input.
Returns:
A layer params that does stride.
"""
if stride == 1:
return self._Id(name)
if stride == 0:
return self._Fn(
name=name,
fn=lambda x: tf.expand_dims(x[:, 0], 1),
fn_out=lambda x: tshape.Shape(x[0:1] + [1] + x[2:]),
fn_flops=lambda x: 1)
return self._Fn(
name=name,
fn=lambda x: x[:, ::stride],
fn_out=lambda x: tshape.Shape(x[0:1] + x[1] // stride + x[2:]),
fn_flops=lambda x: 1)
def _StridedAttention(self, name, stride=1):
"""Computes self attention with optional stride.
Args:
name: name of this layer.
stride: If omitted, the default is 1: use every token in the query. To use
every k-th token, set the stride to k. When set to 0, only use the first
token of the query.
Returns:
A self attention layer params.
"""
p = self.params
input_to_add = ('i.vec'
if p.selfatten_add_unnormalized_input else 'after_ln')
attention_inputs = 'strided_query,after_ln,after_ln,i.paddings'
if p.packed_input:
attention_inputs += ',i.segment_mask'
sub_list = [
('i.vec->after_ln',
self._LN('LN', p.model_dim,
use_fused_layernorm=p.use_fused_layernorm)),
('after_ln->strided_query',
self._Stride('query_after_stride', stride)),
('{}->after_att,prob'.format(attention_inputs),
self._MultiHeadedAtten('atten')),
('after_att->after_dropout',
self._Dropout('dropout', p.residual_dropout_prob)),
('{}->strided_input'.format(input_to_add),
self._Stride('before_add', stride)),
('strided_input,after_dropout->o.vec',
self._Add('add')),
('i.paddings->o.paddings',
self._Stride('padding_after_Stride', stride)),
]
if p.packed_input:
sub_list.append(('i.segment_mask->o.segment_mask', self._Id('segment_mask')))
return self._Graph(
name,
['i'], # input NestedMap with {vec, paddings, segment_mask}
['o'], # output NestedMap with {vec, paddings, segment_mask}
*sub_list)
def TransformerEncoderLayer(self, name, stride=1):
"""(inputs, paddings) -> (encoded, paddings).
Args:
name: the string name of the encoder layer params.
stride: To use every k-th token, set the stride to k. When stride == 0,
only returns the first token of the input. When stride == 1, returns
every token in the input.
Returns:
A transformer encoder layer params that supports optional stride.
"""
# Hack to be compatible with ckpt generated by self._rep
return self._Seq(name, self._Seq(
'block',
self._StridedAttention('self_atten', stride=stride),
self.Feedforward('ff')))
def Stack(self, name, blocks):
"""Returns a stack of sequential layers."""
return self._MaybeSplit(name, blocks) or self._Seq(name, *blocks)
def TransformerEncoderStack(self, name, num_layers=1):
"""Returns a stack of num_layers self-attention layers."""
blocks = [
self.TransformerEncoderLayer(name='iter_{:0>3d}'.format(d))
for d in range(num_layers)
]
return self.Stack(name, blocks)
# pyformat: enable
| apache-2.0 |
radicalbit/ambari | ambari-agent/src/main/python/ambari_agent/FileCache.py | 10789 | #!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import StringIO
import logging
import os
import shutil
import zipfile
import urllib2
import urllib
from AmbariConfig import AmbariConfig
logger = logging.getLogger()
class CachingException(Exception):
pass
class FileCache():
"""
Provides caching and lookup for service metadata files.
If service metadata is not available at cache,
downloads relevant files from the server.
"""
CLUSTER_CONFIGURATION_CACHE_DIRECTORY="cluster_configuration"
ALERTS_CACHE_DIRECTORY="alerts"
RECOVERY_CACHE_DIRECTORY="recovery"
STACKS_CACHE_DIRECTORY="stacks"
COMMON_SERVICES_DIRECTORY="common-services"
CUSTOM_ACTIONS_CACHE_DIRECTORY="custom_actions"
EXTENSIONS_CACHE_DIRECTORY="extensions"
HOST_SCRIPTS_CACHE_DIRECTORY="host_scripts"
HASH_SUM_FILE=".hash"
ARCHIVE_NAME="archive.zip"
ENABLE_AUTO_AGENT_CACHE_UPDATE_KEY = "agent.auto.cache.update"
BLOCK_SIZE=1024*16
SOCKET_TIMEOUT=10
def __init__(self, config):
self.service_component_pool = {}
self.config = config
self.cache_dir = config.get('agent', 'cache_dir')
# Defines whether command should fail when downloading scripts
# from the server is not possible or agent should rollback to local copy
self.tolerate_download_failures = \
config.get('agent','tolerate_download_failures').lower() == 'true'
self.reset()
def reset(self):
self.uptodate_paths = [] # Paths that already have been recently checked
def get_service_base_dir(self, command, server_url_prefix):
"""
Returns a base directory for service
"""
service_subpath = command['commandParams']['service_package_folder']
return self.provide_directory(self.cache_dir, service_subpath,
server_url_prefix)
def get_hook_base_dir(self, command, server_url_prefix):
"""
Returns a base directory for hooks
"""
try:
hooks_subpath = command['commandParams']['hooks_folder']
except KeyError:
return None
subpath = os.path.join(self.STACKS_CACHE_DIRECTORY, hooks_subpath)
return self.provide_directory(self.cache_dir, subpath,
server_url_prefix)
def get_custom_actions_base_dir(self, server_url_prefix):
"""
Returns a base directory for custom action scripts
"""
return self.provide_directory(self.cache_dir,
self.CUSTOM_ACTIONS_CACHE_DIRECTORY,
server_url_prefix)
def get_custom_resources_subdir(self, command, server_url_prefix):
"""
Returns a custom directory which must be a subdirectory of the resources dir
"""
try:
custom_dir = command['commandParams']['custom_folder']
except KeyError:
return None
return self.provide_directory(self.cache_dir,
custom_dir,
server_url_prefix)
def get_host_scripts_base_dir(self, server_url_prefix):
"""
Returns a base directory for host scripts (host alerts, etc) which
are scripts that are not part of the main agent code
"""
return self.provide_directory(self.cache_dir,
self.HOST_SCRIPTS_CACHE_DIRECTORY,
server_url_prefix)
def auto_cache_update_enabled(self):
if self.config and \
self.config.has_option(AmbariConfig.AMBARI_PROPERTIES_CATEGORY, FileCache.ENABLE_AUTO_AGENT_CACHE_UPDATE_KEY) and \
self.config.get(AmbariConfig.AMBARI_PROPERTIES_CATEGORY, FileCache.ENABLE_AUTO_AGENT_CACHE_UPDATE_KEY).lower() == "false":
return False
return True
def provide_directory(self, cache_path, subdirectory, server_url_prefix):
"""
Ensures that directory at cache is up-to-date. Throws a CachingException
if any problems occur
Parameters;
cache_path: full path to cache directory
subdirectory: subpath inside cache
server_url_prefix: url of "resources" folder at the server
"""
full_path = os.path.join(cache_path, subdirectory)
logger.debug("Trying to provide directory {0}".format(subdirectory))
if not self.auto_cache_update_enabled():
logger.debug("Auto cache update is disabled.")
return full_path
try:
if full_path not in self.uptodate_paths:
logger.debug("Checking if update is available for "
"directory {0}".format(full_path))
# Need to check for updates at server
remote_url = self.build_download_url(server_url_prefix,
subdirectory, self.HASH_SUM_FILE)
memory_buffer = self.fetch_url(remote_url)
remote_hash = memory_buffer.getvalue().strip()
local_hash = self.read_hash_sum(full_path)
if not local_hash or local_hash != remote_hash:
logger.debug("Updating directory {0}".format(full_path))
download_url = self.build_download_url(server_url_prefix,
subdirectory, self.ARCHIVE_NAME)
membuffer = self.fetch_url(download_url)
# extract only when the archive is not zero sized
if (membuffer.getvalue().strip()):
self.invalidate_directory(full_path)
self.unpack_archive(membuffer, full_path)
self.write_hash_sum(full_path, remote_hash)
logger.info("Updated directory {0}".format(full_path))
else:
logger.warn("Skipping empty archive: {0}. "
"Expected archive was not found. Cached copy will be used.".format(download_url))
pass
# Finally consider cache directory up-to-date
self.uptodate_paths.append(full_path)
except CachingException, e:
if self.tolerate_download_failures:
# ignore
logger.warn("Error occurred during cache update. "
"Error tolerate setting is set to true, so"
" ignoring this error and continuing with current cache. "
"Error details: {0}".format(str(e)))
else:
raise # we are not tolerant to exceptions, command execution will fail
return full_path
def build_download_url(self, server_url_prefix,
directory, filename):
"""
Builds up a proper download url for file. Used for downloading files
from the server.
directory - relative path
filename - file inside directory we are trying to fetch
"""
return "{0}/{1}/{2}".format(server_url_prefix,
urllib.pathname2url(directory), filename)
def fetch_url(self, url):
"""
Fetches content on url to in-memory buffer and returns the resulting buffer.
May throw exceptions because of various reasons
"""
logger.debug("Trying to download {0}".format(url))
try:
memory_buffer = StringIO.StringIO()
proxy_handler = urllib2.ProxyHandler({})
opener = urllib2.build_opener(proxy_handler)
u = opener.open(url, timeout=self.SOCKET_TIMEOUT)
logger.debug("Connected with {0} with code {1}".format(u.geturl(),
u.getcode()))
buff = u.read(self.BLOCK_SIZE)
while buff:
memory_buffer.write(buff)
buff = u.read(self.BLOCK_SIZE)
if not buff:
break
return memory_buffer
except Exception, err:
raise CachingException("Can not download file from"
" url {0} : {1}".format(url, str(err)))
def read_hash_sum(self, directory):
"""
Tries to read a hash sum from previously generated file. Returns string
containing hash or None
"""
hash_file = os.path.join(directory, self.HASH_SUM_FILE)
try:
with open(hash_file) as fh:
return fh.readline().strip()
except:
return None # We don't care
def write_hash_sum(self, directory, new_hash):
"""
Tries to read a hash sum from previously generated file. Returns string
containing hash or None
"""
hash_file = os.path.join(directory, self.HASH_SUM_FILE)
try:
with open(hash_file, "w") as fh:
fh.write(new_hash)
os.chmod(hash_file, 0o666)
except Exception, err:
raise CachingException("Can not write to file {0} : {1}".format(hash_file,
str(err)))
def invalidate_directory(self, directory):
"""
Recursively removes directory content (if any). Also, creates
directory and any parent directories if needed. May throw exceptions
on permission problems
"""
logger.debug("Invalidating directory {0}".format(directory))
try:
if os.path.exists(directory):
if os.path.isfile(directory): # It would be a strange situation
os.unlink(directory)
elif os.path.isdir(directory):
shutil.rmtree(directory)
# create directory itself and any parent directories
os.makedirs(directory)
except Exception, err:
raise CachingException("Can not invalidate cache directory {0}: {1}",
directory, str(err))
def unpack_archive(self, mem_buffer, target_directory):
"""
Unpacks contents of in-memory buffer to file system.
In-memory buffer is expected to contain a valid zip archive
"""
try:
zfile = zipfile.ZipFile(mem_buffer)
for name in zfile.namelist():
(dirname, filename) = os.path.split(name)
concrete_dir=os.path.abspath(os.path.join(target_directory, dirname))
if not os.path.isdir(concrete_dir):
os.makedirs(concrete_dir)
logger.debug("Unpacking file {0} to {1}".format(name, concrete_dir))
if filename!='':
zfile.extract(name, target_directory)
except Exception, err:
raise CachingException("Can not unpack zip file to "
"directory {0} : {1}".format(
target_directory, str(err)))
| apache-2.0 |
allotria/intellij-community | java/java-impl/src/com/intellij/codeInspection/java18api/OptionalGetWithoutIsPresentInspection.java | 8013 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInspection.java18api;
import com.intellij.codeInsight.PsiEquivalenceUtil;
import com.intellij.codeInspection.*;
import com.intellij.codeInspection.dataFlow.CommonDataflow;
import com.intellij.codeInspection.dataFlow.DfaNullability;
import com.intellij.codeInspection.dataFlow.SpecialField;
import com.intellij.codeInspection.dataFlow.types.DfReferenceType;
import com.intellij.codeInspection.dataFlow.types.DfType;
import com.intellij.codeInspection.dataFlow.types.DfTypes;
import com.intellij.codeInspection.util.LambdaGenerationUtil;
import com.intellij.codeInspection.util.OptionalUtil;
import com.intellij.java.JavaBundle;
import com.intellij.openapi.project.Project;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.IncorrectOperationException;
import com.siyeh.ig.psiutils.CommentTracker;
import com.siyeh.ig.psiutils.ExpressionUtils;
import com.siyeh.ig.psiutils.VariableNameGenerator;
import org.jetbrains.annotations.Nls;
import org.jetbrains.annotations.NotNull;
import java.util.Objects;
public class OptionalGetWithoutIsPresentInspection extends AbstractBaseJavaLocalInspectionTool {
@NotNull
@Override
public PsiElementVisitor buildVisitor(@NotNull ProblemsHolder holder, boolean isOnTheFly) {
return new JavaElementVisitor() {
@Override
public void visitMethodCallExpression(PsiMethodCallExpression call) {
PsiElement nameElement = call.getMethodExpression().getReferenceNameElement();
if (nameElement == null) return;
if (!OptionalUtil.OPTIONAL_GET.test(call)) return;
PsiExpression qualifier = PsiUtil.skipParenthesizedExprDown(call.getMethodExpression().getQualifierExpression());
if (qualifier == null) return;
PsiClass optionalClass = PsiUtil.resolveClassInClassTypeOnly(qualifier.getType());
if (optionalClass == null) return;
CommonDataflow.DataflowResult result = CommonDataflow.getDataflowResult(qualifier);
if (result == null || !result.expressionWasAnalyzed(qualifier)) return;
DfType dfType = SpecialField.OPTIONAL_VALUE.getFromQualifier(result.getDfType(qualifier));
if (dfType != DfTypes.TOP && !(dfType instanceof DfReferenceType)) return;
DfaNullability nullability = DfaNullability.fromDfType(dfType);
if ((nullability == DfaNullability.UNKNOWN || nullability == DfaNullability.NULLABLE) &&
!isPresentCallWithSameQualifierExists(qualifier)) {
holder.registerProblem(nameElement,
JavaBundle.message("inspection.optional.get.without.is.present.message", optionalClass.getName()),
tryCreateFix(call));
}
}
public boolean isPresentCallWithSameQualifierExists(PsiExpression qualifier) {
// Conservatively skip the results of method calls if there's an isPresent() call with the same qualifier in the method
if (qualifier instanceof PsiMethodCallExpression) {
PsiElement context = PsiTreeUtil.getParentOfType(qualifier, PsiMember.class, PsiLambdaExpression.class);
if (context != null) {
return !PsiTreeUtil.processElements(context, e -> {
if (e == qualifier || !(e instanceof PsiMethodCallExpression)) return true;
PsiMethodCallExpression call = (PsiMethodCallExpression)e;
String name = call.getMethodExpression().getReferenceName();
if ((!"isPresent".equals(name) && !"isEmpty".equals(name)) || !call.getArgumentList().isEmpty()) return true;
PsiExpression isPresentQualifier = call.getMethodExpression().getQualifierExpression();
return isPresentQualifier == null || !PsiEquivalenceUtil.areElementsEquivalent(qualifier, isPresentQualifier);
});
}
}
return false;
}
};
}
private static LocalQuickFix tryCreateFix(PsiMethodCallExpression call) {
PsiExpression qualifier = call.getMethodExpression().getQualifierExpression();
if (qualifier == null) return null;
PsiClass optionalClass = PsiUtil.resolveClassInClassTypeOnly(qualifier.getType());
if (optionalClass == null || !CommonClassNames.JAVA_UTIL_OPTIONAL.equals(optionalClass.getQualifiedName())) return null;
PsiType optionalElementType = OptionalUtil.getOptionalElementType(qualifier.getType());
if (optionalElementType == null) return null;
PsiMethodCallExpression nextCall = ExpressionUtils.getCallForQualifier(call);
if (nextCall != null) {
if (optionalClass.equals(PsiUtil.resolveClassInClassTypeOnly(nextCall.getType()))) {
if (!LambdaGenerationUtil.canBeUncheckedLambda(nextCall)) {
// Probably qualifier accesses non-final vars or throws exception: we will replace qualifier, so this is not a problem
PsiMethodCallExpression copy = (PsiMethodCallExpression)nextCall.copy();
PsiExpression copyQualifier = Objects.requireNonNull(copy.getMethodExpression().getQualifierExpression());
try {
copyQualifier.replace(JavaPsiFacade.getElementFactory(call.getProject())
.createExpressionFromText("((" + optionalElementType.getCanonicalText() + ")null)",
copyQualifier));
}
catch (IncorrectOperationException e) {
return null;
}
if (!LambdaGenerationUtil.canBeUncheckedLambda(copy)) {
return null;
}
}
return new UseFlatMapFix();
}
}
return null;
}
private static class UseFlatMapFix implements LocalQuickFix {
@Nls(capitalization = Nls.Capitalization.Sentence)
@NotNull
@Override
public String getFamilyName() {
return JavaBundle.message("quickfix.family.use.flatmap");
}
@Override
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
PsiMethodCallExpression call = PsiTreeUtil.getParentOfType(descriptor.getStartElement(), PsiMethodCallExpression.class);
if (call == null) return;
PsiExpression qualifier = call.getMethodExpression().getQualifierExpression();
if (qualifier == null) return;
PsiType elementType = OptionalUtil.getOptionalElementType(qualifier.getType());
PsiMethodCallExpression nextCall = ExpressionUtils.getCallForQualifier(call);
if (nextCall == null) return;
String name = new VariableNameGenerator(qualifier, VariableKind.PARAMETER).byExpression(qualifier)
.byType(elementType).byName("value").generate(true);
CommentTracker ct = new CommentTracker();
PsiReferenceExpression methodExpression = nextCall.getMethodExpression();
ct.markRangeUnchanged(Objects.requireNonNull(methodExpression.getQualifierExpression()).getNextSibling(),
methodExpression.getLastChild());
ct.markRangeUnchanged(methodExpression.getNextSibling(), nextCall.getLastChild());
PsiMethodCallExpression newNextCall = (PsiMethodCallExpression)nextCall.copy();
PsiExpression newQualifier = Objects.requireNonNull(newNextCall.getMethodExpression().getQualifierExpression());
newQualifier.replace(JavaPsiFacade.getElementFactory(project).createExpressionFromText(name, newNextCall));
String lambda = name + "->" + newNextCall.getText();
String replacement = ct.text(qualifier) + ".flatMap(" + lambda + ")";
PsiMethodCallExpression result = (PsiMethodCallExpression)ct.replaceAndRestoreComments(nextCall, replacement);
LambdaCanBeMethodReferenceInspection.replaceLambdaWithMethodReference(
(PsiLambdaExpression)result.getArgumentList().getExpressions()[0]);
}
}
}
| apache-2.0 |
sparkslabs/kamaelia_ | Sketches/MPS/Experiments/AlphaTests.py | 3281 | #!/usr/bin/python
#
# USAGE OF THIS IS DEPRECATED SINCE THIS CAN NOW BE USED IN THE MAIN CODE
# TREE
#
#
import pygame
import pygame.font
import time
from pygame.locals import *
from Axon.Scheduler import scheduler
from Axon.Component import component
from Kamaelia.Util.PipelineComponent import pipeline
from SubtitleColourDecoderComponent import Colour
class datasource(component):
def main(self):
for x in text.split():
self.send(x,"outbox")
yield 1
class Ticker(component):
def __init__(self, **argd):
super(Ticker,self).__init__()
#
# Bunch of initial configs.
#
self.screen_width = argd.get("screen_width",1024)
self.screen_height = argd.get("screen_height",768)
self.text_height = argd.get("text_height",39)
self.line_spacing = argd.get("line_spacing", self.text_height/7)
self.background_colour = argd.get("background_colour", (48,48,128))
self.ticker_background_colour = argd.get("ticker_background_colour", (128,48,128))
self.text_colour = argd.get("text_colour", (232, 232, 48))
self.ticker_outline_colour = argd.get("ticker_outline_colour", (128,232,128))
self.ticker_outline_width = argd.get("ticker_outline_width", 1)
self.render_area = pygame.Rect((argd.get("render_left",50),
argd.get("render_top",200),
argd.get("render_right",700),
argd.get("render_bottom",300)))
def main(self):
pygame.init()
display = pygame.display.set_mode((self.screen_width, self.screen_height), FULLSCREEN )
my_font = pygame.font.Font(None, self.text_height)
initial_postition = (self.render_area.left,self.render_area.top)
position = [ self.render_area.left, self.render_area.top ]
display.fill(self.background_colour)
pygame.draw.rect(display,
self.ticker_outline_colour,
( self.render_area.left-self.ticker_outline_width,
self.render_area.top-self.ticker_outline_width,
self.render_area.width+self.ticker_outline_width,
self.render_area.height+self.ticker_outline_width),
self.ticker_outline_width)
pygame.draw.rect(display,
self.ticker_background_colour,
(self.render_area.left, self.render_area.top,
self.render_area.width-1,self.render_area.height-1),
0)
pygame.display.update()
overlay = pygame.Surface((500,300))
# overlay.set_colorkey((255,255,255))
word_render= my_font.render("Testing, testing", 1, (0,0,0))
overlay.fill((255,255,255))
overlay.blit(word_render, (10,10))
overlay.set_alpha(10)
overlay = overlay.convert_alpha()
C = 0
while 1:
C = C+1
if C == 21: C=20
display.blit(overlay, (50+C,50+C))
for event in pygame.event.get():
if event.type == KEYDOWN:
if event.key == 113:
raise "Quitting Program"
pygame.display.update()
yield 1
if __name__ == "__main__":
Ticker().run()
| apache-2.0 |
manstis/drools | drools-test-coverage/test-compiler-integration/src/test/java/org/drools/compiler/integrationtests/StreamsTest.java | 32008 | /*
* Copyright 2018 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.compiler.integrationtests;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.assertj.core.api.Assertions;
import org.drools.core.base.ClassObjectType;
import org.drools.core.common.InternalFactHandle;
import org.drools.kiesession.rulebase.InternalKnowledgeBase;
import org.drools.core.reteoo.ObjectTypeNode;
import org.drools.core.rule.EntryPointId;
import org.drools.core.spi.ObjectType;
import org.drools.core.time.impl.PseudoClockScheduler;
import org.drools.testcoverage.common.model.Cheese;
import org.drools.testcoverage.common.model.StockTick;
import org.drools.testcoverage.common.util.KieBaseTestConfiguration;
import org.drools.testcoverage.common.util.KieBaseUtil;
import org.drools.testcoverage.common.util.KieSessionTestConfiguration;
import org.drools.testcoverage.common.util.KieUtil;
import org.drools.testcoverage.common.util.TestParametersUtil;
import org.hamcrest.CoreMatchers;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.kie.api.KieBase;
import org.kie.api.builder.KieBuilder;
import org.kie.api.definition.type.FactType;
import org.kie.api.event.rule.AfterMatchFiredEvent;
import org.kie.api.event.rule.AgendaEventListener;
import org.kie.api.event.rule.MatchCreatedEvent;
import org.kie.api.event.rule.RuleRuntimeEventListener;
import org.kie.api.runtime.KieSession;
import org.kie.api.runtime.rule.EntryPoint;
import org.mockito.ArgumentCaptor;
import static org.drools.core.rule.TypeDeclaration.NEVER_EXPIRES;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertSame;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
/**
* Tests related to the stream support features
*/
@RunWith(Parameterized.class)
public class StreamsTest {
private final KieBaseTestConfiguration kieBaseTestConfiguration;
public StreamsTest(final KieBaseTestConfiguration kieBaseTestConfiguration) {
this.kieBaseTestConfiguration = kieBaseTestConfiguration;
}
@Parameterized.Parameters(name = "KieBase type={0}")
public static Collection<Object[]> getParameters() {
return TestParametersUtil.getKieBaseStreamConfigurations(true);
}
@Test(timeout = 10000)
public void testEventAssertion() {
final String drl = "package org.drools.compiler\n" +
"\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"\n" +
"global java.util.List results;\n" +
"\n" +
"declare StockTick\n" +
" @role( event )\n" +
"end\n" +
"\n" +
"rule \"Test entry point\"\n" +
"when\n" +
" $st : StockTick( company == \"ACME\", price > 10 ) from entry-point StockStream\n" +
"then\n" +
" results.add( $st );\n" +
"end";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession session = kbase.newKieSession(KieSessionTestConfiguration.STATEFUL_PSEUDO.getKieSessionConfiguration(), null);
try {
final List results = new ArrayList();
session.setGlobal("results", results);
final StockTick tick1 = new StockTick(1, "DROO", 50, System.currentTimeMillis());
final StockTick tick2 = new StockTick(2, "ACME", 10, System.currentTimeMillis());
final StockTick tick3 = new StockTick(3, "ACME", 10, System.currentTimeMillis());
final StockTick tick4 = new StockTick(4, "DROO", 50, System.currentTimeMillis());
final InternalFactHandle handle1 = (InternalFactHandle) session.insert(tick1);
final InternalFactHandle handle2 = (InternalFactHandle) session.insert(tick2);
final InternalFactHandle handle3 = (InternalFactHandle) session.insert(tick3);
final InternalFactHandle handle4 = (InternalFactHandle) session.insert(tick4);
assertNotNull(handle1);
assertNotNull(handle2);
assertNotNull(handle3);
assertNotNull(handle4);
assertTrue(handle1.isEvent());
assertTrue(handle2.isEvent());
assertTrue(handle3.isEvent());
assertTrue(handle4.isEvent());
session.fireAllRules();
assertEquals(0, results.size());
final StockTick tick5 = new StockTick(5, "DROO", 50, System.currentTimeMillis());
final StockTick tick6 = new StockTick(6, "ACME", 10, System.currentTimeMillis());
final StockTick tick7 = new StockTick(7, "ACME", 15, System.currentTimeMillis());
final StockTick tick8 = new StockTick(8, "DROO", 50, System.currentTimeMillis());
final EntryPoint entry = session.getEntryPoint("StockStream");
final InternalFactHandle handle5 = (InternalFactHandle) entry.insert(tick5);
final InternalFactHandle handle6 = (InternalFactHandle) entry.insert(tick6);
final InternalFactHandle handle7 = (InternalFactHandle) entry.insert(tick7);
final InternalFactHandle handle8 = (InternalFactHandle) entry.insert(tick8);
assertNotNull(handle5);
assertNotNull(handle6);
assertNotNull(handle7);
assertNotNull(handle8);
assertTrue(handle5.isEvent());
assertTrue(handle6.isEvent());
assertTrue(handle7.isEvent());
assertTrue(handle8.isEvent());
session.fireAllRules();
assertEquals(1, results.size());
assertSame(tick7, results.get(0));
} finally {
session.dispose();
}
}
@Test
public void testEntryPointReference() {
final KieBase kbase = KieBaseUtil.getKieBaseFromClasspathResources("stream-test", kieBaseTestConfiguration,
"org/drools/compiler/integrationtests/test_EntryPointReference.drl");
final KieSession session = kbase.newKieSession();
try {
final List<StockTick> results = new ArrayList<>();
session.setGlobal("results", results);
final StockTick tick5 = new StockTick(5, "DROO", 50, System.currentTimeMillis());
final StockTick tick6 = new StockTick(6, "ACME", 10, System.currentTimeMillis());
final StockTick tick7 = new StockTick(7, "ACME", 30, System.currentTimeMillis());
final StockTick tick8 = new StockTick(8, "DROO", 50, System.currentTimeMillis());
final EntryPoint entry = session.getEntryPoint("stream1");
final InternalFactHandle handle5 = (InternalFactHandle) entry.insert(tick5);
final InternalFactHandle handle6 = (InternalFactHandle) entry.insert(tick6);
final InternalFactHandle handle7 = (InternalFactHandle) entry.insert(tick7);
final InternalFactHandle handle8 = (InternalFactHandle) entry.insert(tick8);
assertNotNull(handle5);
assertNotNull(handle6);
assertNotNull(handle7);
assertNotNull(handle8);
assertTrue(handle5.isEvent());
assertTrue(handle6.isEvent());
assertTrue(handle7.isEvent());
assertTrue(handle8.isEvent());
session.fireAllRules();
assertEquals(1, results.size());
assertSame(tick7, results.get(0));
} finally {
session.dispose();
}
}
@Test(timeout = 10000)
public void testModifyRetracOnEntryPointFacts() {
final String drl = "package org.drools.compiler\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"global java.util.List results;\n" +
"\n" +
"declare StockTick\n" +
" @role( event )\n" +
"end\n" +
"\n" +
"rule \"Test entry point 1\"\n" +
"when\n" +
" $st : StockTick( company == \"ACME\", price > 10 && < 100 ) from entry-point \"stream1\"\n" +
"then\n" +
" results.add( Double.valueOf( $st.getPrice() ) );\n" +
" modify( $st ) { setPrice( 110 ) }\n" +
"end\n" +
"\n" +
"rule \"Test entry point 2\"\n" +
"when\n" +
" $st : StockTick( company == \"ACME\", price > 100 ) from entry-point \"stream1\"\n" +
"then\n" +
" results.add( Double.valueOf( $st.getPrice() ) );\n" +
" delete( $st );\n" +
"end";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession session = kbase.newKieSession();
try {
final List<? extends Number> results = new ArrayList<>();
session.setGlobal("results", results);
final StockTick tick5 = new StockTick(5, "DROO", 50, System.currentTimeMillis());
final StockTick tick6 = new StockTick(6, "ACME", 10, System.currentTimeMillis());
final StockTick tick7 = new StockTick(7, "ACME", 30, System.currentTimeMillis());
final StockTick tick8 = new StockTick(8, "DROO", 50, System.currentTimeMillis());
final EntryPoint entry = session.getEntryPoint("stream1");
final InternalFactHandle handle5 = (InternalFactHandle) entry.insert(tick5);
final InternalFactHandle handle6 = (InternalFactHandle) entry.insert(tick6);
final InternalFactHandle handle7 = (InternalFactHandle) entry.insert(tick7);
final InternalFactHandle handle8 = (InternalFactHandle) entry.insert(tick8);
assertNotNull(handle5);
assertNotNull(handle6);
assertNotNull(handle7);
assertNotNull(handle8);
assertTrue(handle5.isEvent());
assertTrue(handle6.isEvent());
assertTrue(handle7.isEvent());
assertTrue(handle8.isEvent());
session.fireAllRules();
assertEquals(2, results.size());
assertEquals(30, results.get(0).intValue());
assertEquals(110, results.get(1).intValue());
// the 3 non-matched facts continue to exist in the entry point
assertEquals(3, entry.getObjects().size());
// but no fact was inserted into the main session
assertEquals(0, session.getObjects().size());
} finally {
session.dispose();
}
}
@Test
public void testModifyOnEntryPointFacts() {
final String drl = "package org.drools.compiler\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"declare StockTick\n" +
" @role ( event )\n" +
"end\n" +
"rule R1 salience 100\n" +
" when\n" +
" $s1 : StockTick( company == 'RHT', price == 10 ) from entry-point ep1\n" +
" then\n" +
" StockTick s = $s1;\n" +
" modify( s ) { setPrice( 50 ) };\n" +
"end\n" +
"rule R2 salience 90\n" +
" when\n" +
" $s1 : StockTick( company == 'RHT', price == 10 ) from entry-point ep2\n" +
" then\n" +
" StockTick s = $s1;\n" +
" modify( s ) { setPrice( 50 ) };\n" +
"end\n" +
"rule R3 salience 80\n" +
" when\n" +
" $s1 : StockTick( company == 'RHT', price == 10 ) from entry-point ep3\n" +
" then\n" +
" StockTick s = $s1;\n" +
" modify( s ) { setPrice( 50 ) };\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final org.kie.api.event.rule.AgendaEventListener ael = mock(org.kie.api.event.rule.AgendaEventListener.class);
ksession.addEventListener(ael);
final EntryPoint ep1 = ksession.getEntryPoint("ep1");
final EntryPoint ep2 = ksession.getEntryPoint("ep2");
final EntryPoint ep3 = ksession.getEntryPoint("ep3");
ep1.insert(new StockTick(1, "RHT", 10, 1000));
ep2.insert(new StockTick(1, "RHT", 10, 1000));
ep3.insert(new StockTick(1, "RHT", 10, 1000));
final int rulesFired = ksession.fireAllRules();
assertEquals(3, rulesFired);
final ArgumentCaptor<org.kie.api.event.rule.AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(org.kie.api.event.rule.AfterMatchFiredEvent.class);
verify(ael, times(3)).afterMatchFired(captor.capture());
final List<org.kie.api.event.rule.AfterMatchFiredEvent> aafe = captor.getAllValues();
assertThat(aafe.get(0).getMatch().getRule().getName(), is("R1"));
assertThat(aafe.get(1).getMatch().getRule().getName(), is("R2"));
assertThat(aafe.get(2).getMatch().getRule().getName(), is("R3"));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testEntryPointWithAccumulateAndMVEL() {
final String drl = "package org.drools.compiler\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"rule R1 dialect 'mvel'\n" +
" when\n" +
" $n : Number() from accumulate( \n" +
" StockTick() from entry-point ep1,\n" +
" count(1))" +
" then\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final org.kie.api.event.rule.AgendaEventListener ael = mock(org.kie.api.event.rule.AgendaEventListener.class);
ksession.addEventListener(ael);
final EntryPoint ep1 = ksession.getEntryPoint("ep1");
ep1.insert(new StockTick(1, "RHT", 10, 1000));
final int rulesFired = ksession.fireAllRules();
assertEquals(1, rulesFired);
final ArgumentCaptor<org.kie.api.event.rule.AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(org.kie.api.event.rule.AfterMatchFiredEvent.class);
verify(ael, times(1)).afterMatchFired(captor.capture());
final List<org.kie.api.event.rule.AfterMatchFiredEvent> aafe = captor.getAllValues();
assertThat(aafe.get(0).getMatch().getRule().getName(), is("R1"));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testGetEntryPointList() {
final KieBase kbase = KieBaseUtil.getKieBaseFromClasspathResources("stream-test", kieBaseTestConfiguration,
"org/drools/compiler/integrationtests/test_EntryPointReference.drl");
final KieSession session = kbase.newKieSession();
try {
final EntryPoint def = session.getEntryPoint(EntryPointId.DEFAULT.getEntryPointId());
final EntryPoint s1 = session.getEntryPoint("stream1");
final EntryPoint s2 = session.getEntryPoint("stream2");
final EntryPoint s3 = session.getEntryPoint("stream3");
final Collection<? extends EntryPoint> eps = session.getEntryPoints();
assertEquals(4, eps.size());
assertTrue(eps.contains(def));
assertTrue(eps.contains(s1));
assertTrue(eps.contains(s2));
assertTrue(eps.contains(s3));
} finally {
session.dispose();
}
}
@Test(timeout = 10000)
public void testEventDoesNotExpireIfNotInPattern() {
final String drl = "package org.drools.compiler\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"declare StockTick\n" +
" @role( event )\n" +
" @expires( 1s )\n" +
"end\n" +
"\n" +
"rule X\n" +
"when\n" +
" eval( true )\n" +
"then \n" +
" // no-op\n" +
"end";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession(KieSessionTestConfiguration.STATEFUL_PSEUDO.getKieSessionConfiguration(), null);
try {
final RuleRuntimeEventListener wml = mock(RuleRuntimeEventListener.class);
ksession.addEventListener(wml);
final PseudoClockScheduler clock = ksession.getSessionClock();
final StockTick st1 = new StockTick(1, "RHT", 100, 1000);
final StockTick st2 = new StockTick(2, "RHT", 100, 1000);
ksession.insert(st1);
ksession.insert(st2);
verify(wml, times(2)).objectInserted(any(org.kie.api.event.rule.ObjectInsertedEvent.class));
assertThat(ksession.getObjects().size(), equalTo(2));
assertThat((Collection<Object>) ksession.getObjects(), CoreMatchers.hasItems(st1, st2));
ksession.fireAllRules();
clock.advanceTime(3, TimeUnit.SECONDS);
ksession.fireAllRules();
assertThat(ksession.getObjects().size(), equalTo(0));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testEventExpirationSetToZero() {
final String drl = "package org.drools.compiler\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"declare StockTick\n" +
" @role( event )\n" +
" @expires( 0 )\n" +
"end\n" +
"\n" +
"rule X\n" +
"when\n" +
" StockTick()\n" +
"then \n" +
" // no-op\n" +
"end";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession(KieSessionTestConfiguration.STATEFUL_PSEUDO.getKieSessionConfiguration(), null);
try {
final RuleRuntimeEventListener wml = mock(RuleRuntimeEventListener.class);
ksession.addEventListener(wml);
final AgendaEventListener ael = mock(AgendaEventListener.class);
ksession.addEventListener(ael);
final PseudoClockScheduler clock = ksession.getSessionClock();
final StockTick st1 = new StockTick(1, "RHT", 100, 1000);
final StockTick st2 = new StockTick(2, "RHT", 100, 1000);
ksession.insert(st1);
ksession.insert(st2);
assertThat(ksession.fireAllRules(), equalTo(2));
verify(wml, times(2)).objectInserted(any(org.kie.api.event.rule.ObjectInsertedEvent.class));
verify(ael, times(2)).matchCreated(any(MatchCreatedEvent.class));
assertThat(ksession.getObjects().size(), equalTo(2));
assertThat((Collection<Object>) ksession.getObjects(), CoreMatchers.hasItems(st1, st2));
clock.advanceTime(3, TimeUnit.SECONDS);
ksession.fireAllRules();
assertThat(ksession.getObjects().size(), equalTo(0));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testEventExpirationValue() {
final String drl1 = "package org.drools.pkg1\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"declare StockTick\n" +
" @role(event)\n" +
"end\n" +
"rule X\n" +
"when\n" +
" StockTick()\n" +
"then\n" +
"end\n";
final String drl2 = "package org.drools.pkg2\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"declare StockTick\n" +
" @role(event)\n" +
"end\n" +
"rule X\n" +
"when\n" +
" StockTick()\n" +
"then\n" +
"end\n";
final InternalKnowledgeBase kbase = (InternalKnowledgeBase) KieBaseUtil.getKieBaseFromKieModuleFromDrl(
"stream-test", kieBaseTestConfiguration, drl1, drl2);
final List<ObjectTypeNode> otns = kbase.getRete().getObjectTypeNodes();
final ObjectType stot = new ClassObjectType(StockTick.class);
for (final ObjectTypeNode otn : otns) {
if (otn.getObjectType().isAssignableFrom(stot)) {
assertEquals(NEVER_EXPIRES, otn.getExpirationOffset());
}
}
}
@Test(timeout = 10000)
public void testDeclaredEntryPoint() {
final String drl = "package org.jboss.qa.brms.declaredep\n" +
"declare entry-point UnusedEntryPoint\n" +
"end\n" +
"rule HelloWorld\n" +
" when\n" +
" String( ) from entry-point UsedEntryPoint\n" +
" then\n" +
" // consequences\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
assertNotNull(ksession.getEntryPoint("UsedEntryPoint"));
assertNotNull(ksession.getEntryPoint("UnusedEntryPoint"));
} finally {
ksession.dispose();
}
}
@Test
public void testWindowDeclaration() {
final String drl = "package org.drools.compiler\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"declare StockTick\n" +
" @role(event)\n" +
"end\n" +
"declare window RedHatTicks\n" +
" StockTick( company == 'RHT' )\n" +
" over window:length(5)\n" +
" from entry-point ticks\n" +
"end\n" +
"rule X\n" +
"when\n" +
" accumulate( $s : StockTick( price > 20 ) from window RedHatTicks,\n" +
" $sum : sum( $s.getPrice() ),\n" +
" $cnt : count( $s ) )\n" +
"then\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final AgendaEventListener ael = mock(AgendaEventListener.class);
ksession.addEventListener(ael);
final EntryPoint ep = ksession.getEntryPoint("ticks");
ep.insert(new StockTick(1, "ACME", 20, 1000)); // not in the window
ep.insert(new StockTick(2, "RHT", 20, 1000)); // not > 20
ep.insert(new StockTick(3, "RHT", 30, 1000));
ep.insert(new StockTick(4, "ACME", 30, 1000)); // not in the window
ep.insert(new StockTick(5, "RHT", 25, 1000));
ep.insert(new StockTick(6, "ACME", 10, 1000)); // not in the window
ep.insert(new StockTick(7, "RHT", 10, 1000)); // not > 20
ep.insert(new StockTick(8, "RHT", 40, 1000));
ksession.fireAllRules();
final ArgumentCaptor<org.kie.api.event.rule.AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(org.kie.api.event.rule.AfterMatchFiredEvent.class);
verify(ael, times(1)).afterMatchFired(captor.capture());
final AfterMatchFiredEvent aafe = captor.getValue();
assertThat(((Number) aafe.getMatch().getDeclarationValue("$sum")).intValue(), is(95));
assertThat(((Number) aafe.getMatch().getDeclarationValue("$cnt")).intValue(), is(3));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testWindowDeclaration2() {
final String drl = "package org.drools.compiler\n" +
"declare Double\n" +
" @role(event)\n" +
"end\n" +
"declare window Streem\n" +
" Double() over window:length( 10 ) from entry-point data\n" +
"end\n" +
"rule \"See\"\n" +
"when\n" +
" $sum : Double() from accumulate (\n" +
" $d: Double()\n" +
" from window Streem,\n" +
" sum( $d )\n" +
" )\n" +
"then\n" +
"end";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final AgendaEventListener ael = mock(AgendaEventListener.class);
ksession.addEventListener(ael);
final EntryPoint ep = ksession.getEntryPoint("data");
ep.insert(10d);
ep.insert(11d);
ep.insert(12d);
ksession.fireAllRules();
final ArgumentCaptor<AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(AfterMatchFiredEvent.class);
verify(ael, times(1)).afterMatchFired(captor.capture());
final AfterMatchFiredEvent aafe = captor.getValue();
assertThat(((Number) aafe.getMatch().getDeclarationValue("$sum")).intValue(), is(33));
} finally {
ksession.dispose();
}
}
@Test(timeout = 10000)
public void testMultipleWindows() {
final String drl = "package org.drools.compiler\n" +
"import " + StockTick.class.getCanonicalName() + ";\n" +
"declare StockTick\n" +
" @role(event)\n" +
"end\n" +
"rule FaultsCoincide\n" +
"when\n" +
" f1 : StockTick( company == \"RHT\" ) over window:length( 1 )\n" +
" f2 : StockTick( company == \"JBW\" ) over window:length( 1 )\n" +
"then\n" +
"end";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
final AgendaEventListener ael = mock(AgendaEventListener.class);
ksession.addEventListener(ael);
final StockTick st1 = new StockTick(1, "RHT", 10, 1000);
ksession.insert(st1);
final StockTick st2 = new StockTick(2, "JBW", 10, 1000);
ksession.insert(st2);
ksession.fireAllRules();
final ArgumentCaptor<org.kie.api.event.rule.AfterMatchFiredEvent> captor = ArgumentCaptor.forClass(org.kie.api.event.rule.AfterMatchFiredEvent.class);
verify(ael, times(1)).afterMatchFired(captor.capture());
final AfterMatchFiredEvent aafe = captor.getValue();
assertThat(aafe.getMatch().getDeclarationValue("f1"), CoreMatchers.is(st1));
assertThat(aafe.getMatch().getDeclarationValue("f2"), CoreMatchers.is(st2));
} finally {
ksession.dispose();
}
}
@Test
public void testWindowWithEntryPointCompilationError() {
final String drl = "import " + Cheese.class.getCanonicalName() + ";\n" +
"declare window X\n" +
" Cheese( type == \"gorgonzola\" ) over window:time(1m) from entry-point Z\n" +
"end\n" +
"rule R when\n" +
" $c : Cheese( price < 100 ) from window X\n" +
"then\n" +
" System.out.println($c);\n" +
"end\n";
final KieBuilder kieBuilder = KieUtil.getKieBuilderFromDrls(kieBaseTestConfiguration, false, drl);
Assertions.assertThat(kieBuilder.getResults().getMessages())
.withFailMessage("Should have raised a compilation error as Cheese is not declared as an event.")
.isNotEmpty();
}
@Test(timeout = 10000)
public void testAtomicActivationFiring() throws Exception {
// JBRULES-3383
final String drl = "package org.drools.compiler.test\n" +
"declare Event\n" +
" @role(event)\n" +
" name : String\n" +
"end\n" +
"declare Monitor\n" +
" @role(event)\n" +
" event : Event\n" +
" name : String\n" +
"end\n" +
"\n" +
"rule \"start monitoring\"\n" +
"when\n" +
" $e : Event( $in : name )\n" +
" not Monitor( name == $in )\n" +
"then\n" +
" Monitor m = new Monitor( $e, $in );\n" +
" insert( m );\n" +
"end\n" +
"\n" +
"rule \"stop monitoring\"\n" +
"timer( int: 1s )\n" +
"when\n" +
" $m : Monitor( $in : name )\n" +
" $e : Event( name == $in )\n" +
"then\n" +
" retract( $m );\n" +
" retract( $m.getEvent() );\n" +
"end\n" +
"rule \"halt\"\n" +
"salience -1\n" +
"when\n" +
" not Event( )\n" +
"then\n" +
" drools.halt();\n" +
"end\n";
final KieBase kbase = KieBaseUtil.getKieBaseFromKieModuleFromDrl("stream-test", kieBaseTestConfiguration, drl);
final KieSession ksession = kbase.newKieSession();
try {
ksession.addEventListener(new org.kie.api.event.rule.DebugAgendaEventListener());
final FactType eventType = kbase.getFactType("org.drools.compiler.test", "Event");
final Object event = eventType.newInstance();
eventType.set(event, "name", "myName");
ksession.insert(event);
ksession.fireUntilHalt();
} finally {
ksession.dispose();
}
}
}
| apache-2.0 |
koo-taejin/pinpoint | web/src/main/angular/src/app/core/components/inspector-chart/agent-tps-chart-container.ts | 3747 | import { PrimitiveArray, Data, areaSpline } from 'billboard.js';
import { Observable } from 'rxjs';
import { IInspectorChartContainer } from './inspector-chart-container-factory';
import { makeYData, makeXData, getMaxTickValue, getStackedData } from 'app/core/utils/chart-util';
import { IInspectorChartData, InspectorChartDataService } from './inspector-chart-data.service';
export class AgentTPSChartContainer implements IInspectorChartContainer {
private apiUrl = 'getAgentStat/transaction/chart.pinpoint';
defaultYMax = 4;
title = 'Transactions Per Second';
constructor(
private inspectorChartDataService: InspectorChartDataService
) {}
getData(range: number[]): Observable<IInspectorChartData> {
return this.inspectorChartDataService.getData(this.apiUrl, range);
}
makeChartData({charts}: IInspectorChartData): PrimitiveArray[] {
return [
['x', ...makeXData(charts.x)],
['tpsSC', ...makeYData(charts.y['TPS_SAMPLED_CONTINUATION'], 2)],
['tpsSN', ...makeYData(charts.y['TPS_SAMPLED_NEW'], 2)],
['tpsUC', ...makeYData(charts.y['TPS_UNSAMPLED_CONTINUATION'], 2)],
['tpsUN', ...makeYData(charts.y['TPS_UNSAMPLED_NEW'], 2)],
['tpsSSN', ...makeYData(charts.y['TPS_SKIPPED_NEW'], 2)],
['tpsSSC', ...makeYData(charts.y['TPS_SKIPPED_CONTINUATION'], 2)],
['tpsT', ...makeYData(charts.y['TPS_TOTAL'], 2)],
];
}
makeDataOption(): Data {
return {
type: areaSpline(),
names: {
tpsSC: 'S.C',
tpsSN: 'S.N',
tpsUC: 'U.C',
tpsUN: 'U.N',
tpsSSN: 'S.S.N',
tpsSSC: 'S.S.C',
tpsT: 'Total'
},
colors: {
tpsSC: 'rgba(214, 141, 8, 0.4)',
tpsSN: 'rgba(252, 178, 65, 0.4)',
tpsUC: 'rgba(90, 103, 166, 0.4)',
tpsUN: 'rgba(160, 153, 255, 0.4)',
tpsSSN: 'rgba(26, 188, 156, 0.4)',
tpsSSC: 'rgba(82, 190, 128, 0.4)',
tpsT: 'rgb(255, 255, 255)'
},
groups: [
['tpsSC', 'tpsSN', 'tpsUC', 'tpsUN', 'tpsSSN', 'tpsSSC', 'tpsT']
],
order: null
};
}
makeElseOption(): {[key: string]: any} {
return {};
}
makeYAxisOptions(data: PrimitiveArray[]): {[key: string]: any} {
return {
y: {
label: {
text: 'Transaction (count)',
position: 'outer-middle'
},
tick: {
count: 5,
format: (v: number): string => this.convertWithUnit(v)
},
padding: {
top: 0,
bottom: 0
},
min: 0,
max: (() => {
const maxTickValue = getMaxTickValue(getStackedData(data.slice(0, -1)), 1);
return maxTickValue === 0 ? this.defaultYMax : maxTickValue;
})(),
default: [0, this.defaultYMax]
}
};
}
convertWithUnit(value: number): string {
const unitList = ['', 'K', 'M', 'G'];
return [...unitList].reduce((acc: string, curr: string, i: number, arr: string[]) => {
const v = Number(acc);
return v >= 1000
? (v / 1000).toString()
: (arr.splice(i + 1), `${v}${curr}`);
}, value.toString());
}
getTooltipFormat(v: number, columnId: string, i: number): string {
return this.convertWithUnit(v);
}
}
| apache-2.0 |
y-higuchi/onos | web/api/src/main/java/org/onosproject/rest/resources/MetersWebResource.java | 8249 | /*
* Copyright 2015-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.rest.resources;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import org.onosproject.net.DeviceId;
import org.onosproject.net.device.DeviceService;
import org.onosproject.net.meter.DefaultMeterRequest;
import org.onosproject.net.meter.Meter;
import org.onosproject.net.meter.MeterId;
import org.onosproject.net.meter.MeterRequest;
import org.onosproject.net.meter.MeterService;
import org.onosproject.rest.AbstractWebResource;
import org.slf4j.Logger;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.UriInfo;
import java.io.IOException;
import java.io.InputStream;
import static org.onlab.util.Tools.nullIsNotFound;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Query and program meter rules.
*/
@Path("meters")
public class MetersWebResource extends AbstractWebResource {
@Context
private UriInfo uriInfo;
private final Logger log = getLogger(getClass());
private static final String DEVICE_INVALID = "Invalid deviceId in meter creation request";
private static final String METER_NOT_FOUND = "Meter is not found for ";
private final MeterService meterService = get(MeterService.class);
private final ObjectNode root = mapper().createObjectNode();
private final ArrayNode metersNode = root.putArray("meters");
/**
* Returns all meters of all devices.
*
* @return 200 OK with array of all the meters in the system
* @onos.rsModel Meters
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
public Response getMeters() {
final Iterable<Meter> meters = meterService.getAllMeters();
if (meters != null) {
meters.forEach(meter -> metersNode.add(codec(Meter.class).encode(meter, this)));
}
return ok(root).build();
}
/**
* Returns a collection of meters by the device id.
*
* @param deviceId device identifier
* @return 200 OK with array of meters which belongs to specified device
* @onos.rsModel Meters
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{deviceId}")
public Response getMetersByDeviceId(@PathParam("deviceId") String deviceId) {
DeviceId did = DeviceId.deviceId(deviceId);
final Iterable<Meter> meters = meterService.getMeters(did);
if (meters != null) {
meters.forEach(meter -> metersNode.add(codec(Meter.class).encode(meter, this)));
}
return ok(root).build();
}
/**
* Returns a meter by the meter id.
*
* @param deviceId device identifier
* @param meterId meter identifier
* @return 200 OK with a meter, return 404 if no entry has been found
* @onos.rsModel Meter
*/
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("{deviceId}/{meterId}")
public Response getMeterByDeviceIdAndMeterId(@PathParam("deviceId") String deviceId,
@PathParam("meterId") String meterId) {
DeviceId did = DeviceId.deviceId(deviceId);
MeterId mid = MeterId.meterId(Long.valueOf(meterId));
final Meter meter = nullIsNotFound(meterService.getMeter(did, mid),
METER_NOT_FOUND + mid.id());
metersNode.add(codec(Meter.class).encode(meter, this));
return ok(root).build();
}
/**
* Creates new meter rule. Creates and installs a new meter rule for the
* specified device.
*
* @param deviceId device identifier
* @param stream meter rule JSON
* @return status of the request - CREATED if the JSON is correct,
* BAD_REQUEST if the JSON is invalid
* @onos.rsModel MeterPost
*/
@POST
@Path("{deviceId}")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response createMeter(@PathParam("deviceId") String deviceId,
InputStream stream) {
try {
ObjectNode jsonTree = (ObjectNode) mapper().readTree(stream);
JsonNode specifiedDeviceId = jsonTree.get("deviceId");
if ((specifiedDeviceId != null &&
!specifiedDeviceId.asText().equals(deviceId)) ||
get(DeviceService.class).getDevice(DeviceId.deviceId(deviceId))
== null) {
throw new IllegalArgumentException(DEVICE_INVALID);
}
jsonTree.put("deviceId", deviceId);
final MeterRequest meterRequest = codec(MeterRequest.class)
.decode(jsonTree, this);
final Meter meter = meterService.submit(meterRequest);
UriBuilder locationBuilder = uriInfo.getBaseUriBuilder()
.path("meters")
.path(deviceId)
.path(Long.toString(meter.id().id()));
return Response
.created(locationBuilder.build())
.build();
} catch (IOException ex) {
throw new IllegalArgumentException(ex);
}
}
/**
* Removes the specified meter.
*
* @param deviceId device identifier
* @param meterId meter identifier
* @return 204 NO CONTENT
*/
@DELETE
@Path("{deviceId}/{meterId}")
public Response deleteMeterByDeviceIdAndMeterId(@PathParam("deviceId") String deviceId,
@PathParam("meterId") String meterId) {
DeviceId did = DeviceId.deviceId(deviceId);
MeterId mid = MeterId.meterId(Long.valueOf(meterId));
final Meter tmpMeter = meterService.getMeter(did, mid);
if (tmpMeter != null) {
final MeterRequest meterRequest = meterToMeterRequest(tmpMeter, "REMOVE");
meterService.withdraw(meterRequest, tmpMeter.id());
}
return Response.noContent().build();
}
/**
* Converts a meter instance to meterRequest instance with a certain operation.
*
* @param meter meter instance
* @param operation operation
* @return converted meterRequest instance
*/
private MeterRequest meterToMeterRequest(Meter meter, String operation) {
MeterRequest.Builder builder;
MeterRequest meterRequest;
if (meter == null) {
return null;
}
if (meter.isBurst()) {
builder = DefaultMeterRequest.builder()
.fromApp(meter.appId())
.forDevice(meter.deviceId())
.withUnit(meter.unit())
.withBands(meter.bands())
.burst();
} else {
builder = DefaultMeterRequest.builder()
.fromApp(meter.appId())
.forDevice(meter.deviceId())
.withUnit(meter.unit())
.withBands(meter.bands());
}
switch (operation) {
case "ADD":
meterRequest = builder.add();
break;
case "REMOVE":
meterRequest = builder.remove();
break;
default:
log.warn("Invalid operation {}.", operation);
return null;
}
return meterRequest;
}
}
| apache-2.0 |
growthbeat/growthanalytics-java | src/main/java/com/growthbeat/analytics/query/filter/SegmentFilterQuery.java | 781 | package com.growthbeat.analytics.query.filter;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.growthbeat.analytics.query.segment.SegmentQuery;
@JsonDeserialize(using = JsonDeserializer.None.class)
public class SegmentFilterQuery extends FilterQuery {
private static final long serialVersionUID = 1L;
private SegmentQuery segmentQuery;
public SegmentFilterQuery() {
super();
setType(FilterQueryType.segment);
}
public SegmentFilterQuery(SegmentQuery segmentQuery) {
this();
setSegmentQuery(segmentQuery);
}
public SegmentQuery getSegmentQuery() {
return segmentQuery;
}
public void setSegmentQuery(SegmentQuery segmentQuery) {
this.segmentQuery = segmentQuery;
}
}
| apache-2.0 |
feargswalsh92/feargswalsh92.github.io | node_modules/caniuse-lite/data/features/css-matches-pseudo.js | 842 | module.exports={A:{A:{"2":"J C G E B A TB"},B:{"2":"D X g H L"},C:{"16":"3 RB PB OB","548":"0 2 4 F I J C G E B A D X g H L M N O P Q R S T U V W t Y Z a b c d e f K h i j k l m n o p q v w x y z s r"},D:{"16":"F I J C G E B A D X g","164":"0 2 4 8 H L M N O P Q R S T U V W t Y Z a b c d e f K h i j k l m n o p q v w x y z s r DB AB SB BB"},E:{"2":"7 F CB","16":"I","164":"J C G EB FB GB","257":"E B A HB IB JB"},F:{"2":"1 5 6 E A D KB LB MB NB QB","164":"H L M N O P Q R S T U V W t Y Z a b c d e f K h i j k l m n o p q"},G:{"16":"7 9 u UB VB","164":"G WB XB","257":"A YB ZB aB bB"},H:{"2":"cB"},I:{"16":"3 dB eB fB","164":"F r gB u hB iB"},J:{"16":"C","164":"B"},K:{"2":"1 5 6 B A D","164":"K"},L:{"164":"8"},M:{"548":"s"},N:{"2":"B A"},O:{"164":"jB"},P:{"164":"F I"},Q:{"164":"kB"},R:{"164":"lB"}},B:5,C:":matches() CSS pseudo-class"};
| apache-2.0 |
palecur/elasticsearch | modules/reindex/src/main/java/org/elasticsearch/index/reindex/BulkIndexByScrollResponse.java | 6830 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.reindex;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static java.lang.Math.min;
import static java.util.Collections.unmodifiableList;
import static java.util.Objects.requireNonNull;
import static org.elasticsearch.action.search.ShardSearchFailure.readShardSearchFailure;
/**
* Response used for actions that index many documents using a scroll request.
*/
public class BulkIndexByScrollResponse extends ActionResponse implements ToXContent {
private TimeValue took;
private BulkByScrollTask.Status status;
private List<Failure> indexingFailures;
private List<ShardSearchFailure> searchFailures;
private boolean timedOut;
public BulkIndexByScrollResponse() {
}
public BulkIndexByScrollResponse(TimeValue took, BulkByScrollTask.Status status, List<Failure> indexingFailures,
List<ShardSearchFailure> searchFailures, boolean timedOut) {
this.took = took;
this.status = requireNonNull(status, "Null status not supported");
this.indexingFailures = indexingFailures;
this.searchFailures = searchFailures;
this.timedOut = timedOut;
}
public TimeValue getTook() {
return took;
}
protected BulkByScrollTask.Status getStatus() {
return status;
}
public long getCreated() {
return status.getCreated();
}
public long getDeleted() {
return status.getDeleted();
}
public long getUpdated() {
return status.getUpdated();
}
public int getBatches() {
return status.getBatches();
}
public long getVersionConflicts() {
return status.getVersionConflicts();
}
public long getNoops() {
return status.getNoops();
}
/**
* The reason that the request was canceled or null if it hasn't been.
*/
public String getReasonCancelled() {
return status.getReasonCancelled();
}
/**
* The number of times that the request had retry bulk actions.
*/
public long getBulkRetries() {
return status.getBulkRetries();
}
/**
* The number of times that the request had retry search actions.
*/
public long getSearchRetries() {
return status.getSearchRetries();
}
/**
* All of the indexing failures. Version conflicts are only included if the request sets abortOnVersionConflict to true (the
* default).
*/
public List<Failure> getIndexingFailures() {
return indexingFailures;
}
/**
* All search failures.
*/
public List<ShardSearchFailure> getSearchFailures() {
return searchFailures;
}
/**
* Did any of the sub-requests that were part of this request timeout?
*/
public boolean isTimedOut() {
return timedOut;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
took.writeTo(out);
status.writeTo(out);
out.writeVInt(indexingFailures.size());
for (Failure failure: indexingFailures) {
failure.writeTo(out);
}
out.writeVInt(searchFailures.size());
for (ShardSearchFailure failure: searchFailures) {
failure.writeTo(out);
}
out.writeBoolean(timedOut);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
took = new TimeValue(in);
status = new BulkByScrollTask.Status(in);
int indexingFailuresCount = in.readVInt();
List<Failure> indexingFailures = new ArrayList<>(indexingFailuresCount);
for (int i = 0; i < indexingFailuresCount; i++) {
indexingFailures.add(new Failure(in));
}
this.indexingFailures = unmodifiableList(indexingFailures);
int searchFailuresCount = in.readVInt();
List<ShardSearchFailure> searchFailures = new ArrayList<>(searchFailuresCount);
for (int i = 0; i < searchFailuresCount; i++) {
searchFailures.add(readShardSearchFailure(in));
}
this.searchFailures = unmodifiableList(searchFailures);
this.timedOut = in.readBoolean();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("took", took.millis());
builder.field("timed_out", timedOut);
status.innerXContent(builder, params);
builder.startArray("failures");
for (Failure failure: indexingFailures) {
builder.startObject();
failure.toXContent(builder, params);
builder.endObject();
}
for (ShardSearchFailure failure: searchFailures) {
builder.startObject();
failure.toXContent(builder, params);
builder.endObject();
}
builder.endArray();
return builder;
}
@Override
public String toString() {
StringBuilder builder = new StringBuilder();
builder.append("BulkIndexByScrollResponse[");
builder.append("took=").append(took).append(',');
builder.append("timed_out=").append(timedOut).append(',');
status.innerToString(builder);
builder.append(",indexing_failures=").append(getIndexingFailures().subList(0, min(3, getIndexingFailures().size())));
builder.append(",search_failures=").append(getSearchFailures().subList(0, min(3, getSearchFailures().size())));
return builder.append(']').toString();
}
} | apache-2.0 |
osdu/anmpserver-manager | src/Util/Temp/SelfUpdateCommand.php | 8189 | <?php
/*
* This file is part of the Symfony Installer package.
*
* (c) Fabien Potencier <fabien@symfony.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace Symfony\Installer;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Input\InputOption;
use Symfony\Component\Console\Output\OutputInterface;
use Symfony\Component\Filesystem\Exception\IOException;
/**
* This command is inspired by the self-update command included
* in the PHP-CS-Fixer library.
*
* @link https://github.com/fabpot/PHP-CS-Fixer/blob/master/Symfony/CS/Console/Command/SelfUpdateCommand.php.
*
* @author Igor Wiedler <igor@wiedler.ch>
* @author Stephane PY <py.stephane1@gmail.com>
* @author Grégoire Pineau <lyrixx@lyrixx.info>
*/
class SelfUpdateCommand extends DownloadCommand
{
/**
* @var string The temp dir
*/
private $tempDir;
/**
* @var string The URL where the latest installer version can be downloaded
*/
private $remoteInstallerFile;
/**
* @var string The filepath of the installer currently installed in the local machine
*/
private $currentInstallerFile;
/**
* @var string The filepath of the new installer downloaded to replace the current installer
*/
private $newInstallerFile;
/**
* @var string The filepath of the backup of the current installer in case a rollback is performed
*/
private $currentInstallerBackupFile;
/**
* @var bool Flag which indicates that, in case of a rollback, it's safe to restore the installer backup because
* it corresponds to the most recent version
*/
private $restorePreviousInstaller;
/**
* {@inheritdoc}
*/
protected function configure()
{
$this
->setName('self-update')
->setAliases(array('selfupdate'))
->addOption('force-update', 'f', InputOption::VALUE_NONE, 'It updates the installer to the latest available version without checking if it\'s older or newer than the locally installed version.')
->setDescription('Update the Symfony Installer to the latest version.')
->setHelp('The <info>%command.name%</info> command updates the installer to the latest available version.')
;
}
/**
* The self-update command is only available when using the installer via the PHAR file.
*
* @return bool Whether the command is enabled
*/
public function isEnabled()
{
return 'phar://' === substr(__DIR__, 0, 7);
}
/**
* {@inheritdoc}
*/
protected function initialize(InputInterface $input, OutputInterface $output)
{
parent::initialize($input, $output);
$this->remoteInstallerFile = 'http://symfony.com/installer';
$this->currentInstallerFile = realpath($_SERVER['argv'][0]) ?: $_SERVER['argv'][0];
$this->tempDir = sys_get_temp_dir();
$this->currentInstallerBackupFile = basename($this->currentInstallerFile, '.phar').'-backup.phar';
$this->newInstallerFile = $this->tempDir.'/'.basename($this->currentInstallerFile, '.phar').'-temp.phar';
$this->restorePreviousInstaller = false;
}
/**
* {@inheritdoc}
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$forceUpdate = true === $input->getOption('force-update');
if (!$forceUpdate && $this->isInstallerUpdated()) {
$this->output->writeln(sprintf('// Symfony Installer is <info>already updated</info> to the latest version (%s).', $this->latestInstallerVersion));
return;
}
$this->output->writeln(sprintf('// <info>updating</info> Symfony Installer to <info>%s</info> version', $this->latestInstallerVersion));
try {
$this
->downloadNewVersion()
->checkNewVersionIsValid()
->backupCurrentVersion()
->replaceCurrentVersionbyNewVersion()
->cleanUp()
;
} catch (IOException $e) {
if ($this->output->isVeryVerbose()) {
$this->output->writeln($e->getMessage());
}
throw new \RuntimeException(sprintf(
"The installer couldn't be updated, probably because of a permissions issue.\n".
"Try to execute the command again with super user privileges:\n".
" sudo %s\n",
$this->getExecutedCommand()
));
} catch (\Exception $e) {
$this->rollback();
if ($this->output->isVeryVerbose()) {
$this->output->writeln($e->getMessage());
}
return 1;
}
}
/**
* Downloads the new version of the Symfony installer.
*
* @return $this
*/
private function downloadNewVersion()
{
// check for permissions in local filesystem before start downloading files
if (!is_writable($this->currentInstallerFile)) {
throw new IOException('Symfony Installer update failed: the "'.$this->currentInstallerFile.'" file could not be written');
}
if (!is_writable($this->tempDir)) {
throw new IOException('Symfony Installer update failed: the "'.$this->tempDir.'" directory used to download files temporarily could not be written');
}
if (false === $newInstaller = $this->getUrlContents($this->remoteInstallerFile)) {
throw new \RuntimeException('The new version of the Symfony Installer couldn\'t be downloaded from the server.');
}
$newInstallerPermissions = $this->currentInstallerFile ? fileperms($this->currentInstallerFile) : 0777 & ~umask();
$this->fs->dumpFile($this->newInstallerFile, $newInstaller, $newInstallerPermissions);
return $this;
}
/**
* Checks if the new version is valid.
*
* @return $this
*/
private function checkNewVersionIsValid()
{
// creating a Phar instance for an existing file is not allowed
// when the Phar extension is in readonly mode
if (!ini_get('phar.readonly')) {
// test the phar validity
$phar = new \Phar($this->newInstallerFile);
// free the variable to unlock the file
unset($phar);
}
return $this;
}
/**
* Does a backup of the current version of the Symfony installer.
*
* @return $this
*/
private function backupCurrentVersion()
{
$this->fs->copy($this->currentInstallerFile, $this->currentInstallerBackupFile, true);
$this->restorePreviousInstaller = true;
return $this;
}
/**
* Replaces the currenct version of the Symfony installer with the new one.
*
* @return $this
*/
private function replaceCurrentVersionbyNewVersion()
{
$this->fs->copy($this->newInstallerFile, $this->currentInstallerFile, true);
return $this;
}
/**
* Removes the temporary used files.
*/
private function cleanUp()
{
$this->fs->remove(array($this->currentInstallerBackupFile, $this->newInstallerFile));
}
/**
* Restores the previously installed version of the Symfony installer.
*/
private function rollback()
{
$this->output->writeln(array(
'',
'<error>There was an error while updating the installer.</error>',
'The previous Symfony Installer version has been restored.',
'',
));
$this->fs->remove($this->newInstallerFile);
if ($this->restorePreviousInstaller) {
$this->fs->copy($this->currentInstallerBackupFile, $this->currentInstallerFile, true);
}
}
/**
* {@inheritdoc}
*/
protected function getDownloadedApplicationType()
{
return 'Symfony Installer';
}
/**
* {@inheritdoc}
*/
protected function getRemoteFileUrl()
{
return 'http://symfony.com/installer';
}
}
| apache-2.0 |
box/t3js | lib/dom-jquery.js | 1919 | /**
* @fileoverview DOM abstraction to use jquery to add and remove event listeners
* in T3
* @author jdivock
*/
/* eslint-env jquery */
Box.JQueryDOM = (function() {
'use strict';
return {
type: 'jquery',
/**
* Returns the first element that is a descendant of the element
* on which it is invoked that matches the specified group of selectors.
* @param {HTMLElement} root parent element to query off of
* @param {string} selector query string to match on
*
* @returns {HTMLElement} first element found matching query
*/
query: function(root, selector) {
// Aligning with native which returns null if not found
return jQuery(root).find(selector)[0] || null;
},
/**
* Returns a non-live NodeList of all elements descended from the
* element on which it is invoked that match the specified group of CSS selectors.
* @param {HTMLElement} root parent element to query off of
* @param {string} selector query string to match on
*
* @returns {Array} elements found matching query
*/
queryAll: function(root, selector) {
return jQuery.makeArray(jQuery(root).find(selector));
},
/**
* Adds event listener to element via jquery
* @param {HTMLElement} element Target to attach listener to
* @param {string} type Name of the action to listen for
* @param {function} listener Function to be executed on action
*
* @returns {void}
*/
on: function(element, type, listener) {
jQuery(element).on(type, listener);
},
/**
* Removes event listener to element via jquery
* @param {HTMLElement} element Target to remove listener from
* @param {string} type Name of the action remove listener from
* @param {function} listener Function to be removed from action
*
* @returns {void}
*/
off: function(element, type, listener) {
jQuery(element).off(type, listener);
}
};
}());
Box.DOM = Box.JQueryDOM;
| apache-2.0 |
BigBoss424/portfolio | v8/development/node_modules/gatsby/dist/schema/types/pagination.js | 2909 | "use strict";
const {
getFieldsEnum
} = require(`./sort`);
const {
addDerivedType
} = require(`./derived-types`);
const {
distinct,
group
} = require(`../resolvers`);
const getPageInfo = ({
schemaComposer
}) => schemaComposer.getOrCreateOTC(`PageInfo`, tc => {
tc.addFields({
currentPage: `Int!`,
hasPreviousPage: `Boolean!`,
hasNextPage: `Boolean!`,
itemCount: `Int!`,
pageCount: `Int!`,
perPage: `Int`
});
});
const getEdge = ({
schemaComposer,
typeComposer
}) => {
const typeName = typeComposer.getTypeName() + `Edge`;
addDerivedType({
typeComposer,
derivedTypeName: typeName
});
return schemaComposer.getOrCreateOTC(typeName, tc => {
tc.addFields({
next: typeComposer,
node: typeComposer.getTypeNonNull(),
previous: typeComposer
});
});
};
const createPagination = ({
schemaComposer,
typeComposer,
fields,
typeName
}) => {
const paginationTypeComposer = schemaComposer.getOrCreateOTC(typeName, tc => {
tc.addFields({
totalCount: `Int!`,
edges: [getEdge({
schemaComposer,
typeComposer
}).getTypeNonNull()],
nodes: [typeComposer.getTypeNonNull()],
pageInfo: getPageInfo({
schemaComposer
}).getTypeNonNull(),
...fields
});
});
paginationTypeComposer.makeFieldNonNull(`edges`);
paginationTypeComposer.makeFieldNonNull(`nodes`);
addDerivedType({
typeComposer,
derivedTypeName: typeName
});
return paginationTypeComposer;
};
const getGroup = ({
schemaComposer,
typeComposer
}) => {
const typeName = typeComposer.getTypeName() + `GroupConnection`;
const fields = {
field: `String!`,
fieldValue: `String`
};
return createPagination({
schemaComposer,
typeComposer,
fields,
typeName
});
};
const getPagination = ({
schemaComposer,
typeComposer
}) => {
const inputTypeComposer = typeComposer.getInputTypeComposer();
const typeName = typeComposer.getTypeName() + `Connection`;
const fieldsEnumTC = getFieldsEnum({
schemaComposer,
typeComposer,
inputTypeComposer
});
const fields = {
distinct: {
type: [`String!`],
args: {
field: fieldsEnumTC.getTypeNonNull()
},
resolve: distinct
},
group: {
type: [getGroup({
schemaComposer,
typeComposer
}).getTypeNonNull()],
args: {
skip: `Int`,
limit: `Int`,
field: fieldsEnumTC.getTypeNonNull()
},
resolve: group
}
};
const paginationTypeComposer = createPagination({
schemaComposer,
typeComposer,
fields,
typeName
});
paginationTypeComposer.makeFieldNonNull(`distinct`);
paginationTypeComposer.makeFieldNonNull(`group`);
return paginationTypeComposer;
};
module.exports = {
getPageInfo,
getEdge,
getGroup,
getPagination
};
//# sourceMappingURL=pagination.js.map | apache-2.0 |
SignalK/signalk-server-node | packages/server-admin-ui/src/routes.js | 109 | const routes = {
'/': 'Home',
'/dashboard': 'Dashboard',
'/webapps': 'Webapps'
}
export default routes
| apache-2.0 |
oscarbou/isis | core/metamodel/src/test/java/org/apache/isis/objectstore/jdo/metamodel/facets/object/auditable/GivenAuditableFacetMarkerInterfaceFactoryTest.java | 3600 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.objectstore.jdo.metamodel.facets.object.auditable;
import java.util.List;
import javax.jdo.annotations.PersistenceCapable;
import org.datanucleus.enhancement.Persistable;
import org.apache.isis.core.metamodel.facetapi.Facet;
import org.apache.isis.core.metamodel.facetapi.FeatureType;
import org.apache.isis.core.metamodel.facets.AbstractFacetFactoryTest;
import org.apache.isis.core.metamodel.facets.FacetFactory;
import org.apache.isis.core.metamodel.facets.object.audit.AuditableFacet;
import org.apache.isis.core.metamodel.facets.object.audit.markerifc.AuditableFacetMarkerInterface;
import org.apache.isis.objectstore.jdo.applib.Auditable;
import junit.framework.Assert;
public class GivenAuditableFacetMarkerInterfaceFactoryTest extends
AbstractFacetFactoryTest {
private AuditableMarkerInterfaceInJdoApplibFacetFactory facetFactory;
@Override
protected void setUp() throws Exception {
super.setUp();
facetFactory = new AuditableMarkerInterfaceInJdoApplibFacetFactory();
}
@Override
protected void tearDown() throws Exception {
facetFactory = null;
super.tearDown();
}
public void testFeatureTypes() {
final List<FeatureType> featureTypes = facetFactory
.getFeatureTypes();
Assert.assertTrue(contains(featureTypes, FeatureType.OBJECT));
assertFalse(contains(featureTypes, FeatureType.PROPERTY));
assertFalse(contains(featureTypes, FeatureType.COLLECTION));
Assert.assertFalse(contains(featureTypes, FeatureType.ACTION));
assertFalse(contains(featureTypes,
FeatureType.ACTION_PARAMETER_SCALAR));
}
public void testAuditableMarkerInterfacePickedUpOnClass() {
abstract class Customer implements Auditable, Persistable {
}
facetFactory.process(new FacetFactory.ProcessClassContext(Customer.class, methodRemover, facetHolder));
final Facet facet = facetHolder.getFacet(AuditableFacet.class);
assertNotNull(facet);
assertTrue(facet instanceof AuditableFacetMarkerInterface);
}
public void testIfNoAuditableMarkerInterfaceThenNoFacet() {
abstract class Customer implements Persistable {
}
facetFactory.process(new FacetFactory.ProcessClassContext(Customer.class, methodRemover, facetHolder));
final Facet facet = facetHolder.getFacet(AuditableFacet.class);
assertNull(facet);
}
public void testNoMethodsRemoved() {
@PersistenceCapable
abstract class Customer implements Persistable {
}
facetFactory.process(new FacetFactory.ProcessClassContext(Customer.class, methodRemover, facetHolder));
assertNoMethodsRemoved();
}
}
| apache-2.0 |
heuermh/eggo | setup.py | 1810 | # Licensed to Big Data Genomics (BDG) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The BDG licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
def readme():
with open('README.md', 'r') as ip:
return ip.read()
setup(
name='eggo',
version='0.1.0.dev0',
description='Pre-formatted Hadoop-friendly public genomics datasets',
long_description=readme(),
author='Uri Laserson',
author_email='laserson@cloudera.com',
url='https://github.com/bigdatagenomics/eggo',
packages=find_packages(),
package_data={'eggo.resources': ['*.template', '*.conf']},
include_package_data=True,
install_requires=['fabric', 'boto', 'click', 'cm_api'],
entry_points={'console_scripts': ['eggo-cluster = eggo.cli.cluster:main',
'eggo-data = eggo.cli.datasets:main']},
keywords=('bdg adam spark eggo genomics omics public data'),
license='Apache License, Version 2.0',
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
],
zip_safe=False)
| apache-2.0 |
rouazana/james | mailet/mailets/src/main/java/org/apache/james/transport/mailets/RecipientRewriteTable.java | 3440 | /****************************************************************
* Licensed to the Apache Software Foundation (ASF) under one *
* or more contributor license agreements. See the NOTICE file *
* distributed with this work for additional information *
* regarding copyright ownership. The ASF licenses this file *
* to you under the Apache License, Version 2.0 (the *
* "License"); you may not use this file except in compliance *
* with the License. You may obtain a copy of the License at *
* *
* http://www.apache.org/licenses/LICENSE-2.0 *
* *
* Unless required by applicable law or agreed to in writing, *
* software distributed under the License is distributed on an *
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY *
* KIND, either express or implied. See the License for the *
* specific language governing permissions and limitations *
* under the License. *
****************************************************************/
package org.apache.james.transport.mailets;
import java.util.ArrayList;
import java.util.Collection;
import javax.inject.Inject;
import javax.mail.MessagingException;
import javax.mail.internet.MimeMessage;
import org.apache.james.rrt.api.RecipientRewriteTable.ErrorMappingException;
import org.apache.james.rrt.api.RecipientRewriteTableException;
import org.apache.mailet.MailAddress;
/**
* Mailet which should get used when using RecipientRewriteTable-Store to
* implementations for mappings of forwards and aliases.
*/
public class RecipientRewriteTable extends AbstractRecipientRewriteTableMailet {
private org.apache.james.rrt.api.RecipientRewriteTable vut;
/**
* Sets the virtual table store.
*
* @param vut
* the vutStore to set, possibly null
*/
@Inject
public final void setRecipientRewriteTable(org.apache.james.rrt.api.RecipientRewriteTable vut) {
this.vut = vut;
}
/**
* @see org.apache.james.transport.mailets.AbstractRecipientRewriteTableMailet#processMail(MailAddress, MailAddress, MimeMessage)
*/
public Collection<MailAddress> processMail(MailAddress sender, MailAddress recipient, MimeMessage message) throws MessagingException {
try {
Collection<String> mappings = vut.getMappings(recipient.getLocalPart(), recipient.getDomain());
if (mappings != null) {
return handleMappings(mappings, sender, recipient, message);
}
} catch (ErrorMappingException e) {
String errorBuffer = "A problem as occoured trying to alias and forward user " + recipient + ": " + e.getMessage();
throw new MessagingException(errorBuffer);
} catch (RecipientRewriteTableException e) {
throw new MessagingException("Unable to access RecipientRewriteTable", e);
}
Collection<MailAddress> rcpts = new ArrayList<MailAddress>();
rcpts.add(recipient);
return rcpts;
}
/**
* @see org.apache.mailet.base.GenericMailet#getMailetInfo()
*/
public String getMailetInfo() {
return "RecipientRewriteTable Mailet";
}
}
| apache-2.0 |
apache/kylin | core-metadata/src/main/java/org/apache/kylin/measure/dim/DimCountDistincSerializer.java | 2263 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.measure.dim;
import java.nio.ByteBuffer;
import java.util.Set;
import org.apache.kylin.metadata.datatype.DataType;
import org.apache.kylin.metadata.datatype.DataTypeSerializer;
import org.apache.kylin.util.KryoUtils;
public class DimCountDistincSerializer extends DataTypeSerializer<DimCountDistinctCounter> {
// called by reflection
public DimCountDistincSerializer(DataType type) {
}
@Override
public void serialize(DimCountDistinctCounter value, ByteBuffer out) {
byte[] serialize = KryoUtils.serialize(value.getContainer());
out.putInt(4 + 4 + 4 + serialize.length);
out.putInt(value.getMAX_CARD());
out.putInt(serialize.length);
out.put(serialize);
}
@Override
public DimCountDistinctCounter deserialize(ByteBuffer in) {
int totalLength = in.getInt();
int maxCard = in.getInt();
int arrayLength = in.getInt();
byte[] data = new byte[arrayLength];
in.get(data);
return new DimCountDistinctCounter(KryoUtils.deserialize(data, Set.class), maxCard);
}
@Override
public int peekLength(ByteBuffer in) {
int mark = in.position();
int ret = in.getInt();
in.position(mark);
return ret;
}
@Override
public int maxLength() {
return 8 * 1024 * 1024;
}
@Override
public int getStorageBytesEstimate() {
return 1024;
}
}
| apache-2.0 |
tadeegan/eiger-application-aware | src/java/org/apache/cassandra/gms/EndpointState.java | 4767 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.gms;
import java.io.*;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.io.IVersionedSerializer;
import org.cliffc.high_scale_lib.NonBlockingHashMap;
/**
* This abstraction represents both the HeartBeatState and the ApplicationState in an EndpointState
* instance. Any state for a given endpoint can be retrieved from this instance.
*/
public class EndpointState
{
protected static Logger logger = LoggerFactory.getLogger(EndpointState.class);
private final static IVersionedSerializer<EndpointState> serializer = new EndpointStateSerializer();
private volatile HeartBeatState hbState;
final Map<ApplicationState, VersionedValue> applicationState = new NonBlockingHashMap<ApplicationState, VersionedValue>();
/* fields below do not get serialized */
private volatile long updateTimestamp;
private volatile boolean isAlive;
public static IVersionedSerializer<EndpointState> serializer()
{
return serializer;
}
EndpointState(HeartBeatState initialHbState)
{
hbState = initialHbState;
updateTimestamp = System.currentTimeMillis();
isAlive = true;
}
HeartBeatState getHeartBeatState()
{
return hbState;
}
void setHeartBeatState(HeartBeatState newHbState)
{
updateTimestamp();
hbState = newHbState;
}
public VersionedValue getApplicationState(ApplicationState key)
{
return applicationState.get(key);
}
/**
* TODO replace this with operations that don't expose private state
*/
@Deprecated
public Map<ApplicationState, VersionedValue> getApplicationStateMap()
{
return applicationState;
}
void addApplicationState(ApplicationState key, VersionedValue value)
{
applicationState.put(key, value);
}
/* getters and setters */
public long getUpdateTimestamp()
{
return updateTimestamp;
}
void updateTimestamp()
{
updateTimestamp = System.currentTimeMillis();
}
public boolean isAlive()
{
return isAlive;
}
void markAlive()
{
isAlive = true;
}
void markDead()
{
isAlive = false;
}
}
class EndpointStateSerializer implements IVersionedSerializer<EndpointState>
{
private static Logger logger = LoggerFactory.getLogger(EndpointStateSerializer.class);
public void serialize(EndpointState epState, DataOutput dos, int version) throws IOException
{
/* serialize the HeartBeatState */
HeartBeatState hbState = epState.getHeartBeatState();
HeartBeatState.serializer().serialize(hbState, dos, version);
/* serialize the map of ApplicationState objects */
int size = epState.applicationState.size();
dos.writeInt(size);
for (Map.Entry<ApplicationState, VersionedValue> entry : epState.applicationState.entrySet())
{
VersionedValue value = entry.getValue();
dos.writeInt(entry.getKey().ordinal());
VersionedValue.serializer.serialize(value, dos, version);
}
}
public EndpointState deserialize(DataInput dis, int version) throws IOException
{
HeartBeatState hbState = HeartBeatState.serializer().deserialize(dis, version);
EndpointState epState = new EndpointState(hbState);
int appStateSize = dis.readInt();
for ( int i = 0; i < appStateSize; ++i )
{
int key = dis.readInt();
VersionedValue value = VersionedValue.serializer.deserialize(dis, version);
epState.addApplicationState(Gossiper.STATES[key], value);
}
return epState;
}
public long serializedSize(EndpointState endpointState, int version)
{
throw new UnsupportedOperationException();
}
}
| apache-2.0 |
internetisalie/lua-for-idea | testdata/non-test-system-files/lua5.1-tests/pm.lua | 9250 | print('testing pattern matching')
function f(s, p)
local i,e = string.find(s, p)
if i then return string.sub(s, i, e) end
end
function f1(s, p)
p = string.gsub(p, "%%([0-9])", function (s) return "%" .. (s+1) end)
p = string.gsub(p, "^(^?)", "%1()", 1)
p = string.gsub(p, "($?)$", "()%1", 1)
local t = {string.match(s, p)}
return string.sub(s, t[1], t[#t] - 1)
end
a,b = string.find('', '') -- empty patterns are tricky
assert(a == 1 and b == 0);
a,b = string.find('alo', '')
assert(a == 1 and b == 0)
a,b = string.find('a\0o a\0o a\0o', 'a', 1) -- first position
assert(a == 1 and b == 1)
a,b = string.find('a\0o a\0o a\0o', 'a\0o', 2) -- starts in the midle
assert(a == 5 and b == 7)
a,b = string.find('a\0o a\0o a\0o', 'a\0o', 9) -- starts in the midle
assert(a == 9 and b == 11)
a,b = string.find('a\0a\0a\0a\0\0ab', '\0ab', 2); -- finds at the end
assert(a == 9 and b == 11);
a,b = string.find('a\0a\0a\0a\0\0ab', 'b') -- last position
assert(a == 11 and b == 11)
assert(string.find('a\0a\0a\0a\0\0ab', 'b\0') == nil) -- check ending
assert(string.find('', '\0') == nil)
assert(string.find('alo123alo', '12') == 4)
assert(string.find('alo123alo', '^12') == nil)
assert(f('aloALO', '%l*') == 'alo')
assert(f('aLo_ALO', '%a*') == 'aLo')
assert(f('aaab', 'a*') == 'aaa');
assert(f('aaa', '^.*$') == 'aaa');
assert(f('aaa', 'b*') == '');
assert(f('aaa', 'ab*a') == 'aa')
assert(f('aba', 'ab*a') == 'aba')
assert(f('aaab', 'a+') == 'aaa')
assert(f('aaa', '^.+$') == 'aaa')
assert(f('aaa', 'b+') == nil)
assert(f('aaa', 'ab+a') == nil)
assert(f('aba', 'ab+a') == 'aba')
assert(f('a$a', '.$') == 'a')
assert(f('a$a', '.%$') == 'a$')
assert(f('a$a', '.$.') == 'a$a')
assert(f('a$a', '$$') == nil)
assert(f('a$b', 'a$') == nil)
assert(f('a$a', '$') == '')
assert(f('', 'b*') == '')
assert(f('aaa', 'bb*') == nil)
assert(f('aaab', 'a-') == '')
assert(f('aaa', '^.-$') == 'aaa')
assert(f('aabaaabaaabaaaba', 'b.*b') == 'baaabaaabaaab')
assert(f('aabaaabaaabaaaba', 'b.-b') == 'baaab')
assert(f('alo xo', '.o$') == 'xo')
assert(f(' \n isto é assim', '%S%S*') == 'isto')
assert(f(' \n isto é assim', '%S*$') == 'assim')
assert(f(' \n isto é assim', '[a-z]*$') == 'assim')
assert(f('um caracter ? extra', '[^%sa-z]') == '?')
assert(f('', 'a?') == '')
assert(f('á', 'á?') == 'á')
assert(f('ábl', 'á?b?l?') == 'ábl')
assert(f(' ábl', 'á?b?l?') == '')
assert(f('aa', '^aa?a?a') == 'aa')
assert(f(']]]áb', '[^]]') == 'á')
assert(f("0alo alo", "%x*") == "0a")
assert(f("alo alo", "%C+") == "alo alo")
print('+')
assert(f1('alo alx 123 b\0o b\0o', '(..*) %1') == "b\0o b\0o")
assert(f1('axz123= 4= 4 34', '(.+)=(.*)=%2 %1') == '3= 4= 4 3')
assert(f1('=======', '^(=*)=%1$') == '=======')
assert(string.match('==========', '^([=]*)=%1$') == nil)
local function range (i, j)
if i <= j then
return i, range(i+1, j)
end
end
local abc = string.char(range(0, 255));
assert(string.len(abc) == 256)
function strset (p)
local res = {s=''}
string.gsub(abc, p, function (c) res.s = res.s .. c end)
return res.s
end;
assert(string.len(strset('[\200-\210]')) == 11)
assert(strset('[a-z]') == "abcdefghijklmnopqrstuvwxyz")
assert(strset('[a-z%d]') == strset('[%da-uu-z]'))
assert(strset('[a-]') == "-a")
assert(strset('[^%W]') == strset('[%w]'))
assert(strset('[]%%]') == '%]')
assert(strset('[a%-z]') == '-az')
assert(strset('[%^%[%-a%]%-b]') == '-[]^ab')
assert(strset('%Z') == strset('[\1-\255]'))
assert(strset('.') == strset('[\1-\255%z]'))
print('+');
assert(string.match("alo xyzK", "(%w+)K") == "xyz")
assert(string.match("254 K", "(%d*)K") == "")
assert(string.match("alo ", "(%w*)$") == "")
assert(string.match("alo ", "(%w+)$") == nil)
assert(string.find("(álo)", "%(á") == 1)
local a, b, c, d, e = string.match("âlo alo", "^(((.).).* (%w*))$")
assert(a == 'âlo alo' and b == 'âl' and c == 'â' and d == 'alo' and e == nil)
a, b, c, d = string.match('0123456789', '(.+(.?)())')
assert(a == '0123456789' and b == '' and c == 11 and d == nil)
print('+')
assert(string.gsub('ülo ülo', 'ü', 'x') == 'xlo xlo')
assert(string.gsub('alo úlo ', ' +$', '') == 'alo úlo') -- trim
assert(string.gsub(' alo alo ', '^%s*(.-)%s*$', '%1') == 'alo alo') -- double trim
assert(string.gsub('alo alo \n 123\n ', '%s+', ' ') == 'alo alo 123 ')
t = "abç d"
a, b = string.gsub(t, '(.)', '%1@')
assert('@'..a == string.gsub(t, '', '@') and b == 5)
a, b = string.gsub('abçd', '(.)', '%0@', 2)
assert(a == 'a@b@çd' and b == 2)
assert(string.gsub('alo alo', '()[al]', '%1') == '12o 56o')
assert(string.gsub("abc=xyz", "(%w*)(%p)(%w+)", "%3%2%1-%0") ==
"xyz=abc-abc=xyz")
assert(string.gsub("abc", "%w", "%1%0") == "aabbcc")
assert(string.gsub("abc", "%w+", "%0%1") == "abcabc")
assert(string.gsub('áéí', '$', '\0óú') == 'áéí\0óú')
assert(string.gsub('', '^', 'r') == 'r')
assert(string.gsub('', '$', 'r') == 'r')
print('+')
assert(string.gsub("um (dois) tres (quatro)", "(%(%w+%))", string.upper) ==
"um (DOIS) tres (QUATRO)")
do
local function setglobal (n,v) rawset(_G, n, v) end
string.gsub("a=roberto,roberto=a", "(%w+)=(%w%w*)", setglobal)
assert(_G.a=="roberto" and _G.roberto=="a")
end
function f(a,b) return string.gsub(a,'.',b) end
assert(string.gsub("trocar tudo em |teste|b| é |beleza|al|", "|([^|]*)|([^|]*)|", f) ==
"trocar tudo em bbbbb é alalalalalal")
local function dostring (s) return loadstring(s)() or "" end
assert(string.gsub("alo $a=1$ novamente $return a$", "$([^$]*)%$", dostring) ==
"alo novamente 1")
x = string.gsub("$x=string.gsub('alo', '.', string.upper)$ assim vai para $return x$",
"$([^$]*)%$", dostring)
assert(x == ' assim vai para ALO')
t = {}
s = 'a alo jose joao'
r = string.gsub(s, '()(%w+)()', function (a,w,b)
assert(string.len(w) == b-a);
t[a] = b-a;
end)
assert(r == s and t[1] == 1 and t[3] == 3 and t[7] == 4 and t[13] == 4)
function isbalanced (s)
return string.find(string.gsub(s, "%b()", ""), "[()]") == nil
end
assert(isbalanced("(9 ((8))(\0) 7) \0\0 a b ()(c)() a"))
assert(not isbalanced("(9 ((8) 7) a b (\0 c) a"))
assert(string.gsub("alo 'oi' alo", "%b''", '"') == 'alo " alo')
local t = {"apple", "orange", "lime"; n=0}
assert(string.gsub("x and x and x", "x", function () t.n=t.n+1; return t[t.n] end)
== "apple and orange and lime")
t = {n=0}
string.gsub("first second word", "%w%w*", function (w) t.n=t.n+1; t[t.n] = w end)
assert(t[1] == "first" and t[2] == "second" and t[3] == "word" and t.n == 3)
t = {n=0}
assert(string.gsub("first second word", "%w+",
function (w) t.n=t.n+1; t[t.n] = w end, 2) == "first second word")
assert(t[1] == "first" and t[2] == "second" and t[3] == nil)
assert(not pcall(string.gsub, "alo", "(.", print))
assert(not pcall(string.gsub, "alo", ".)", print))
assert(not pcall(string.gsub, "alo", "(.", {}))
assert(not pcall(string.gsub, "alo", "(.)", "%2"))
assert(not pcall(string.gsub, "alo", "(%1)", "a"))
assert(not pcall(string.gsub, "alo", "(%0)", "a"))
-- big strings
local a = string.rep('a', 300000)
assert(string.find(a, '^a*.?$'))
assert(not string.find(a, '^a*.?b$'))
assert(string.find(a, '^a-.?$'))
-- deep nest of gsubs
function rev (s)
return string.gsub(s, "(.)(.+)", function (c,s1) return rev(s1)..c end)
end
local x = string.rep('012345', 10)
assert(rev(rev(x)) == x)
-- gsub with tables
assert(string.gsub("alo alo", ".", {}) == "alo alo")
assert(string.gsub("alo alo", "(.)", {a="AA", l=""}) == "AAo AAo")
assert(string.gsub("alo alo", "(.).", {a="AA", l="K"}) == "AAo AAo")
assert(string.gsub("alo alo", "((.)(.?))", {al="AA", o=false}) == "AAo AAo")
assert(string.gsub("alo alo", "().", {2,5,6}) == "256 alo")
t = {}; setmetatable(t, {__index = function (t,s) return string.upper(s) end})
assert(string.gsub("a alo b hi", "%w%w+", t) == "a ALO b HI")
-- tests for gmatch
assert(string.gfind == string.gmatch)
local a = 0
for i in string.gmatch('abcde', '()') do assert(i == a+1); a=i end
assert(a==6)
t = {n=0}
for w in string.gmatch("first second word", "%w+") do
t.n=t.n+1; t[t.n] = w
end
assert(t[1] == "first" and t[2] == "second" and t[3] == "word")
t = {3, 6, 9}
for i in string.gmatch ("xuxx uu ppar r", "()(.)%2") do
assert(i == table.remove(t, 1))
end
assert(table.getn(t) == 0)
t = {}
for i,j in string.gmatch("13 14 10 = 11, 15= 16, 22=23", "(%d+)%s*=%s*(%d+)") do
t[i] = j
end
a = 0
for k,v in pairs(t) do assert(k+1 == v+0); a=a+1 end
assert(a == 3)
-- tests for `%f' (`frontiers')
assert(string.gsub("aaa aa a aaa a", "%f[%w]a", "x") == "xaa xa x xaa x")
assert(string.gsub("[[]] [][] [[[[", "%f[[].", "x") == "x[]] x]x] x[[[")
assert(string.gsub("01abc45de3", "%f[%d]", ".") == ".01abc.45de.3")
assert(string.gsub("01abc45 de3x", "%f[%D]%w", ".") == "01.bc45 de3.")
assert(string.gsub("function", "%f[\1-\255]%w", ".") == ".unction")
assert(string.gsub("function", "%f[^\1-\255]", ".") == "function.")
local i, e = string.find(" alo aalo allo", "%f[%S].-%f[%s].-%f[%S]")
assert(i == 2 and e == 5)
local k = string.match(" alo aalo allo", "%f[%S](.-%f[%s].-%f[%S])")
assert(k == 'alo ')
local a = {1, 5, 9, 14, 17,}
for k in string.gmatch("alo alo th02 is 1hat", "()%f[%w%d]") do
assert(table.remove(a, 1) == k)
end
assert(table.getn(a) == 0)
print('OK')
| apache-2.0 |
gradle/gradle | subprojects/ivy/src/main/java/org/gradle/api/publish/ivy/internal/artifact/ArchiveTaskBasedIvyArtifact.java | 2460 | /*
* Copyright 2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.publish.ivy.internal.artifact;
import com.google.common.collect.ImmutableSet;
import org.gradle.api.internal.tasks.DefaultTaskDependency;
import org.gradle.api.internal.tasks.TaskDependencyInternal;
import org.gradle.api.publish.ivy.internal.publisher.IvyPublicationIdentity;
import org.gradle.api.tasks.TaskDependency;
import org.gradle.api.tasks.bundling.AbstractArchiveTask;
import java.io.File;
public class ArchiveTaskBasedIvyArtifact extends AbstractIvyArtifact {
private final AbstractArchiveTask archiveTask;
private final IvyPublicationIdentity identity;
private final TaskDependencyInternal buildDependencies;
public ArchiveTaskBasedIvyArtifact(AbstractArchiveTask archiveTask, IvyPublicationIdentity identity) {
this.archiveTask = archiveTask;
this.identity = identity;
this.buildDependencies = new DefaultTaskDependency(null, ImmutableSet.<Object>of(archiveTask));
}
@Override
protected String getDefaultName() {
return identity.getModule();
}
@Override
protected String getDefaultType() {
return archiveTask.getArchiveExtension().getOrNull();
}
@Override
protected String getDefaultExtension() {
return archiveTask.getArchiveExtension().getOrNull();
}
@Override
protected String getDefaultClassifier() {
return archiveTask.getArchiveClassifier().getOrNull();
}
@Override
protected String getDefaultConf() {
return null;
}
@Override
protected TaskDependency getDefaultBuildDependencies() {
return buildDependencies;
}
@Override
public File getFile() {
return archiveTask.getArchiveFile().get().getAsFile();
}
@Override
public boolean shouldBePublished() {
return archiveTask.isEnabled();
}
}
| apache-2.0 |
cushon/error-prone | check_api/src/main/java/com/google/errorprone/fixes/AppliedFix.java | 4791 | /*
* Copyright 2011 The Error Prone Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.errorprone.fixes;
import static com.google.common.base.Preconditions.checkArgument;
import com.sun.tools.javac.tree.EndPosTable;
import java.io.IOException;
import java.io.LineNumberReader;
import java.io.StringReader;
import java.util.HashSet;
import java.util.Set;
import javax.annotation.Nullable;
/**
* Represents the corrected source which we think was intended, by applying a Fix. This is used to
* generate the "Did you mean?" snippet in the error message.
*
* @author alexeagle@google.com (Alex Eagle)
*/
public class AppliedFix {
private final String snippet;
private final boolean isRemoveLine;
private AppliedFix(String snippet, boolean isRemoveLine) {
this.snippet = snippet;
this.isRemoveLine = isRemoveLine;
}
public CharSequence getNewCodeSnippet() {
return snippet;
}
public boolean isRemoveLine() {
return isRemoveLine;
}
public static class Applier {
private final CharSequence source;
private final EndPosTable endPositions;
public Applier(CharSequence source, EndPosTable endPositions) {
this.source = source;
this.endPositions = endPositions;
}
/**
* Applies the suggestedFix to the source. Returns null if applying the fix results in no change
* to the source, or a change only to imports.
*/
@Nullable
public AppliedFix apply(Fix suggestedFix) {
StringBuilder replaced = new StringBuilder(source);
// We have to apply the replacements in descending order, since otherwise the positions in
// subsequent replacements are invalidated by earlier replacements.
Set<Replacement> replacements = descending(suggestedFix.getReplacements(endPositions));
Set<Integer> modifiedLines = new HashSet<>();
for (Replacement repl : replacements) {
checkArgument(
repl.endPosition() <= source.length(),
"End [%s] should not exceed source length [%s]",
repl.endPosition(),
source.length());
replaced.replace(repl.startPosition(), repl.endPosition(), repl.replaceWith());
// Find the line number(s) being modified
// TODO: this could be more efficient
try {
LineNumberReader lineNumberReader =
new LineNumberReader(new StringReader(source.toString()));
lineNumberReader.skip(repl.startPosition());
modifiedLines.add(lineNumberReader.getLineNumber());
} catch (IOException e) {
// impossible since source is in-memory
}
}
// Not sure this is really the right behavior, but otherwise we can end up with an infinite
// loop below.
if (modifiedLines.isEmpty()) {
return null;
}
LineNumberReader lineNumberReader =
new LineNumberReader(new StringReader(replaced.toString()));
String snippet = null;
boolean isRemoveLine = false;
try {
while (!modifiedLines.contains(lineNumberReader.getLineNumber())) {
lineNumberReader.readLine();
}
// TODO: this is over-simplified; need a failing test case
snippet = lineNumberReader.readLine();
if (snippet == null) {
// The file's last line was removed.
snippet = "";
} else {
snippet = snippet.trim();
// snip comment from line
if (snippet.contains("//")) {
snippet = snippet.substring(0, snippet.indexOf("//")).trim();
}
}
if (snippet.isEmpty()) {
isRemoveLine = true;
snippet = "to remove this line";
}
} catch (IOException e) {
// impossible since source is in-memory
}
return new AppliedFix(snippet, isRemoveLine);
}
/** Get the replacements in an appropriate order to apply correctly. */
private static Set<Replacement> descending(Set<Replacement> set) {
Replacements replacements = new Replacements();
set.forEach(replacements::add);
return replacements.descending();
}
}
public static Applier fromSource(CharSequence source, EndPosTable endPositions) {
return new Applier(source, endPositions);
}
}
| apache-2.0 |
diennea/majordodo | majordodo-core/src/test/java/majordodo/task/FileCommitLogSimpleTest.java | 4316 | /*
Licensed to Diennea S.r.l. under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. Diennea S.r.l. licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package majordodo.task;
import majordodo.task.BrokerStatusSnapshot;
import majordodo.task.FileCommitLog;
import majordodo.task.LogSequenceNumber;
import majordodo.task.Task;
import majordodo.task.StatusEdit;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
/**
* Simple tests for FileCommit Log
*
* @author enrico.olivelli
*/
public class FileCommitLogSimpleTest {
@Rule
public TemporaryFolder folderSnapshots = new TemporaryFolder();
@Rule
public TemporaryFolder folderLogs = new TemporaryFolder();
@Test
public void test() throws Exception {
try (FileCommitLog log = new FileCommitLog(folderSnapshots.getRoot().toPath(), folderLogs.getRoot().toPath(), 1024 * 1024);) {
BrokerStatusSnapshot snapshot = log.loadBrokerStatusSnapshot();
log.recovery(snapshot.getActualLogSequenceNumber(), (a, b) -> {
fail();
}, false);
log.startWriting();
assertEquals(snapshot.getActualLogSequenceNumber().ledgerId, -1);
assertEquals(snapshot.getActualLogSequenceNumber().sequenceNumber, -1);
assertTrue(snapshot.getTasks().isEmpty());
StatusEdit edit1 = StatusEdit.ADD_TASK(1, "mytype", "param1", "myuser", 0, 0, 0, null, 0, null, null);
StatusEdit edit2 = StatusEdit.WORKER_CONNECTED("node1", "psasa", "localhost", new HashSet<>(), System.currentTimeMillis());
StatusEdit edit3 = StatusEdit.ASSIGN_TASK_TO_WORKER(1, "worker1", 1, "db1,db2");
StatusEdit edit4 = StatusEdit.TASK_STATUS_CHANGE(1, "node1", Task.STATUS_FINISHED, "theresult");
LogSequenceNumber logStatusEdit1 = log.logStatusEdit(edit1);
LogSequenceNumber logStatusEdit2 = log.logStatusEdit(edit2);
LogSequenceNumber logStatusEdit3 = log.logStatusEdit(edit3);
LogSequenceNumber logStatusEdit4 = log.logStatusEdit(edit4);
}
try (FileCommitLog log = new FileCommitLog(folderSnapshots.getRoot().toPath(), folderLogs.getRoot().toPath(), 1024 * 1024);) {
BrokerStatusSnapshot snapshot = log.loadBrokerStatusSnapshot();
System.out.println("snapshot:" + snapshot);
// no snapshot was taken...
assertEquals(snapshot.getActualLogSequenceNumber().ledgerId, -1);
assertEquals(snapshot.getActualLogSequenceNumber().sequenceNumber, -1);
List<StatusEdit> edits = new ArrayList<>();
AtomicLong last = new AtomicLong(-1);
log.recovery(snapshot.getActualLogSequenceNumber(), (a, b) -> {
System.out.println("entry:" + a + ", " + b);
assertEquals(1, a.ledgerId);
assertTrue(a.sequenceNumber > last.get());
edits.add(b);
last.set(a.sequenceNumber);
}, false);
log.startWriting();
assertEquals(StatusEdit.TYPE_ADD_TASK, edits.get(0).editType);
assertEquals(StatusEdit.TYPE_WORKER_CONNECTED, edits.get(1).editType);
assertEquals(StatusEdit.TYPE_ASSIGN_TASK_TO_WORKER, edits.get(2).editType);
assertEquals("db1,db2", edits.get(2).resources);
assertEquals(StatusEdit.TYPE_TASK_STATUS_CHANGE, edits.get(3).editType);
}
}
}
| apache-2.0 |
request/request | tests/test-isUrl.js | 2291 | 'use strict'
var http = require('http')
var request = require('../index')
var tape = require('tape')
var s = http.createServer(function (req, res) {
res.statusCode = 200
res.end('ok')
})
tape('setup', function (t) {
s.listen(0, function () {
s.port = this.address().port
s.url = 'http://localhost:' + s.port
t.end()
})
})
tape('lowercase', function (t) {
request(s.url, function (err, resp, body) {
t.equal(err, null)
t.equal(body, 'ok')
t.end()
})
})
tape('uppercase', function (t) {
request(s.url.replace('http', 'HTTP'), function (err, resp, body) {
t.equal(err, null)
t.equal(body, 'ok')
t.end()
})
})
tape('mixedcase', function (t) {
request(s.url.replace('http', 'HtTp'), function (err, resp, body) {
t.equal(err, null)
t.equal(body, 'ok')
t.end()
})
})
tape('hostname and port', function (t) {
request({
uri: {
protocol: 'http:',
hostname: 'localhost',
port: s.port
}
}, function (err, res, body) {
t.equal(err, null)
t.equal(body, 'ok')
t.end()
})
})
tape('hostname and port 1', function (t) {
request({
uri: {
protocol: 'http:',
hostname: 'localhost',
port: s.port
}
}, function (err, res, body) {
t.equal(err, null)
t.equal(body, 'ok')
t.end()
})
})
tape('hostname and port 2', function (t) {
request({
protocol: 'http:',
hostname: 'localhost',
port: s.port
}, {
// need this empty options object, otherwise request thinks no uri was set
}, function (err, res, body) {
t.equal(err, null)
t.equal(body, 'ok')
t.end()
})
})
tape('hostname and port 3', function (t) {
request({
protocol: 'http:',
hostname: 'localhost',
port: s.port
}, function (err, res, body) {
t.notEqual(err, null)
t.equal(err.message, 'options.uri is a required argument')
t.equal(body, undefined)
t.end()
})
})
tape('hostname and query string', function (t) {
request({
uri: {
protocol: 'http:',
hostname: 'localhost',
port: s.port
},
qs: {
test: 'test'
}
}, function (err, res, body) {
t.equal(err, null)
t.equal(body, 'ok')
t.end()
})
})
tape('cleanup', function (t) {
s.close(function () {
t.end()
})
})
| apache-2.0 |
Thingee/cinder | cinder/tests/zonemanager/test_brcd_fc_zone_client_cli.py | 12985 | # (c) Copyright 2014 Brocade Communications Systems Inc.
# All Rights Reserved.
#
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Unit tests for brcd fc zone client cli."""
import mock
from cinder import exception
from cinder.openstack.common import log as logging
from cinder.openstack.common import processutils
from cinder import test
from cinder.zonemanager.drivers.brocade.brcd_fc_zone_client_cli \
import BrcdFCZoneClientCLI
import cinder.zonemanager.drivers.brocade.fc_zone_constants as ZoneConstant
from mock import patch
LOG = logging.getLogger(__name__)
nsshow = '20:1a:00:05:1e:e8:e3:29'
switch_data = [' N 011a00;2,3;20:1a:00:05:1e:e8:e3:29;\
20:1a:00:05:1e:e8:e3:29;na',
' Fabric Port Name: 20:1a:00:05:1e:e8:e3:29']
cfgactvshow = ['Effective configuration:\n',
' cfg:\tOpenStack_Cfg\t\n',
' zone:\topenstack50060b0000c26604201900051ee8e329\t\n',
'\t\t50:06:0b:00:00:c2:66:04\n',
'\t\t20:19:00:05:1e:e8:e3:29\n']
active_zoneset = {
'zones': {
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29']},
'active_zone_config': 'OpenStack_Cfg'}
active_zoneset_multiple_zones = {
'zones': {
'openstack50060b0000c26604201900051ee8e329':
['50:06:0b:00:00:c2:66:04', '20:19:00:05:1e:e8:e3:29'],
'openstack50060b0000c26602201900051ee8e327':
['50:06:0b:00:00:c2:66:02', '20:19:00:05:1e:e8:e3:27']},
'active_zone_config': 'OpenStack_Cfg'}
new_zone = {'openstack10000012345678902001009876543210':
['10:00:00:12:34:56:78:90', '20:01:00:98:76:54:32:10']}
new_zones = {'openstack10000012345678902001009876543210':
['10:00:00:12:34:56:78:90', '20:01:00:98:76:54:32:10'],
'openstack10000011111111112001001111111111':
['10:00:00:11:11:11:11:11', '20:01:00:11:11:11:11:11']}
zone_names_to_delete = 'openstack50060b0000c26604201900051ee8e329'
supported_firmware = ['Kernel: 2.6', 'Fabric OS: v7.0.1']
unsupported_firmware = ['Fabric OS: v6.2.1']
class TestBrcdFCZoneClientCLI(BrcdFCZoneClientCLI, test.TestCase):
def setUp(self):
super(TestBrcdFCZoneClientCLI, self).setUp()
# override some of the functions
def __init__(self, *args, **kwargs):
test.TestCase.__init__(self, *args, **kwargs)
@patch.object(BrcdFCZoneClientCLI, '_get_switch_info')
def test_get_active_zone_set(self, get_switch_info_mock):
cmd_list = [ZoneConstant.GET_ACTIVE_ZONE_CFG]
get_switch_info_mock.return_value = cfgactvshow
active_zoneset_returned = self.get_active_zone_set()
get_switch_info_mock.assert_called_once_with(cmd_list)
self.assertDictMatch(active_zoneset_returned, active_zoneset)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test_get_active_zone_set_ssh_error(self, run_ssh_mock):
run_ssh_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.BrocadeZoningCliException,
self.get_active_zone_set)
@mock.patch.object(BrcdFCZoneClientCLI, 'get_active_zone_set')
@mock.patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
@mock.patch.object(BrcdFCZoneClientCLI, '_cfg_save')
def test_add_zones_new_zone_no_activate(self, get_active_zs_mock,
apply_zone_change_mock,
cfg_save_mock):
get_active_zs_mock.return_value = active_zoneset
self.add_zones(new_zones, False)
get_active_zs_mock.assert_called_once()
apply_zone_change_mock.assert_called_twice()
cfg_save_mock.assert_called_once()
@mock.patch.object(BrcdFCZoneClientCLI, 'get_active_zone_set')
@mock.patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
@mock.patch.object(BrcdFCZoneClientCLI, '_cfg_save')
@mock.patch.object(BrcdFCZoneClientCLI, 'activate_zoneset')
def test_add_zones_new_zone_activate(self, get_active_zs_mock,
apply_zone_change_mock,
cfg_save_mock,
activate_zoneset_mock):
get_active_zs_mock.return_value = active_zoneset
self.add_zones(new_zone, True)
get_active_zs_mock.assert_called_once()
apply_zone_change_mock.assert_called_once()
cfg_save_mock.assert_called_once()
activate_zoneset_mock.assert_called_once()
@mock.patch.object(BrcdFCZoneClientCLI, '_ssh_execute')
def test_activate_zoneset(self, ssh_execute_mock):
ssh_execute_mock.return_value = True
return_value = self.activate_zoneset('zoneset1')
self.assertTrue(return_value)
@mock.patch.object(BrcdFCZoneClientCLI, '_ssh_execute')
def test_deactivate_zoneset(self, ssh_execute_mock):
ssh_execute_mock.return_value = True
return_value = self.deactivate_zoneset()
self.assertTrue(return_value)
@mock.patch.object(BrcdFCZoneClientCLI, 'get_active_zone_set')
@mock.patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
@mock.patch.object(BrcdFCZoneClientCLI, '_cfg_save')
def test_delete_zones_activate_false(self, get_active_zs_mock,
apply_zone_change_mock,
cfg_save_mock):
get_active_zs_mock.return_value = active_zoneset_multiple_zones
with mock.patch.object(self, '_zone_delete') \
as zone_delete_mock:
self.delete_zones(zone_names_to_delete, False)
get_active_zs_mock.assert_called_once()
apply_zone_change_mock.assert_called_once()
zone_delete_mock.assert_called_once_with(zone_names_to_delete)
cfg_save_mock.assert_called_once()
@patch.object(BrcdFCZoneClientCLI, 'get_active_zone_set')
@patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
@patch.object(BrcdFCZoneClientCLI, '_cfg_save')
@patch.object(BrcdFCZoneClientCLI, 'activate_zoneset')
def test_delete_zones_activate_true(self, get_active_zs_mock,
apply_zone_change_mock,
cfg_save_mock,
activate_zs_mock):
get_active_zs_mock.return_value = active_zoneset_multiple_zones
with mock.patch.object(self, '_zone_delete') \
as zone_delete_mock:
self.delete_zones(zone_names_to_delete, True)
get_active_zs_mock.assert_called_once()
apply_zone_change_mock.assert_called_once()
zone_delete_mock.assert_called_once_with(zone_names_to_delete)
cfg_save_mock.assert_called_once()
activate_zs_mock.assert_called_once()
@patch.object(BrcdFCZoneClientCLI, '_get_switch_info')
def test_get_nameserver_info(self, get_switch_info_mock):
ns_info_list = []
ns_info_list_expected = ['20:1a:00:05:1e:e8:e3:29',
'20:1a:00:05:1e:e8:e3:29']
get_switch_info_mock.return_value = (switch_data)
ns_info_list = self.get_nameserver_info()
self.assertEqual(ns_info_list, ns_info_list_expected)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test_get_nameserver_info_ssh_error(self, run_ssh_mock):
run_ssh_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.BrocadeZoningCliException,
self.get_nameserver_info)
@patch.object(BrcdFCZoneClientCLI, '_ssh_execute')
def test__cfg_save(self, ssh_execute_mock):
cmd_list = [ZoneConstant.CFG_SAVE]
self._cfg_save()
ssh_execute_mock.assert_called_once_with(cmd_list, True, 1)
@patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
def test__zone_delete(self, apply_zone_change_mock):
zone_name = 'testzone'
cmd_list = ['zonedelete', '"testzone"']
self._zone_delete(zone_name)
apply_zone_change_mock.assert_called_once_with(cmd_list)
@patch.object(BrcdFCZoneClientCLI, 'apply_zone_change')
def test__cfg_trans_abort(self, apply_zone_change_mock):
cmd_list = [ZoneConstant.CFG_ZONE_TRANS_ABORT]
with mock.patch.object(self, '_is_trans_abortable') \
as is_trans_abortable_mock:
is_trans_abortable_mock.return_value = True
self._cfg_trans_abort()
is_trans_abortable_mock.assert_called_once()
apply_zone_change_mock.assert_called_once_with(cmd_list)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test__is_trans_abortable_true(self, run_ssh_mock):
cmd_list = [ZoneConstant.CFG_SHOW_TRANS]
run_ssh_mock.return_value = (Stream(ZoneConstant.TRANS_ABORTABLE),
None)
data = self._is_trans_abortable()
self.assertTrue(data)
run_ssh_mock.assert_called_once_with(cmd_list, True, 1)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test__is_trans_abortable_ssh_error(self, run_ssh_mock):
run_ssh_mock.return_value = (Stream(), Stream())
self.assertRaises(exception.BrocadeZoningCliException,
self._is_trans_abortable)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test__is_trans_abortable_false(self, run_ssh_mock):
cmd_list = [ZoneConstant.CFG_SHOW_TRANS]
cfgtransshow = 'There is no outstanding zoning transaction'
run_ssh_mock.return_value = (Stream(cfgtransshow), None)
data = self._is_trans_abortable()
self.assertFalse(data)
run_ssh_mock.assert_called_once_with(cmd_list, True, 1)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test_apply_zone_change(self, run_ssh_mock):
cmd_list = [ZoneConstant.CFG_SAVE]
run_ssh_mock.return_value = (None, None)
self.apply_zone_change(cmd_list)
run_ssh_mock.assert_called_once_with(cmd_list, True, 1)
@patch.object(BrcdFCZoneClientCLI, '_run_ssh')
def test__get_switch_info(self, run_ssh_mock):
cmd_list = [ZoneConstant.NS_SHOW]
nsshow_list = [nsshow]
run_ssh_mock.return_value = (Stream(nsshow), Stream())
switch_data = self._get_switch_info(cmd_list)
self.assertEqual(switch_data, nsshow_list)
run_ssh_mock.assert_called_once_with(cmd_list, True, 1)
def test__parse_ns_output(self):
invalid_switch_data = [' N 011a00;20:1a:00:05:1e:e8:e3:29']
return_wwn_list = []
expected_wwn_list = ['20:1a:00:05:1e:e8:e3:29']
return_wwn_list = self._parse_ns_output(switch_data)
self.assertEqual(return_wwn_list, expected_wwn_list)
self.assertRaises(exception.InvalidParameterValue,
self._parse_ns_output, invalid_switch_data)
@patch.object(BrcdFCZoneClientCLI, '_execute_shell_cmd')
def test_is_supported_firmware(self, exec_shell_cmd_mock):
exec_shell_cmd_mock.return_value = (supported_firmware, None)
self.assertTrue(self.is_supported_firmware())
@patch.object(BrcdFCZoneClientCLI, '_execute_shell_cmd')
def test_is_supported_firmware_invalid(self, exec_shell_cmd_mock):
exec_shell_cmd_mock.return_value = (unsupported_firmware, None)
self.assertFalse(self.is_supported_firmware())
@patch.object(BrcdFCZoneClientCLI, '_execute_shell_cmd')
def test_is_supported_firmware_no_ssh_response(self, exec_shell_cmd_mock):
exec_shell_cmd_mock.return_value = (None, Stream())
self.assertFalse(self.is_supported_firmware())
@patch.object(BrcdFCZoneClientCLI, '_execute_shell_cmd')
def test_is_supported_firmware_ssh_error(self, exec_shell_cmd_mock):
exec_shell_cmd_mock.side_effect = processutils.ProcessExecutionError
self.assertRaises(exception.BrocadeZoningCliException,
self.is_supported_firmware)
class Channel(object):
def recv_exit_status(self):
return 0
class Stream(object):
def __init__(self, buffer=''):
self.buffer = buffer
self.channel = Channel()
def readlines(self):
return self.buffer
def splitlines(self):
return self.buffer.splitlines()
def close(self):
pass
def flush(self):
self.buffer = ''
| apache-2.0 |
gpiotti/tsflask | jqueryui/development-bundle/ui/minified/jquery.ui.effect-slide.min.js | 775 | /*! jQuery UI - v1.10.3 - 2013-10-05
* http://jqueryui.com
* Copyright 2013 jQuery Foundation and other contributors; Licensed MIT */
(function(e){e.effects.effect.slide=function(t,i){var o,s=e(this),n=["position","top","bottom","left","right","width","height"],r=e.effects.setMode(s,t.mode||"show"),f="show"===r,c=t.direction||"left",a="up"===c||"down"===c?"top":"left",h="up"===c||"left"===c,u={};e.effects.save(s,n),s.show(),o=t.distance||s["top"===a?"outerHeight":"outerWidth"](!0),e.effects.createWrapper(s).css({overflow:"hidden"}),f&&s.css(a,h?isNaN(o)?"-"+o:-o:o),u[a]=(f?h?"+=":"-=":h?"-=":"+=")+o,s.animate(u,{queue:!1,duration:t.duration,easing:t.easing,complete:function(){"hide"===r&&s.hide(),e.effects.restore(s,n),e.effects.removeWrapper(s),i()}})}})(jQuery); | apache-2.0 |
drusellers/MassTransit | src/MassTransit/Configuration/PipeConfigurators/LogPipeSpecification.cs | 1702 | // Copyright 2007-2015 Chris Patterson, Dru Sellers, Travis Smith, et. al.
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
namespace MassTransit.PipeConfigurators
{
using System.Collections.Generic;
using System.IO;
using Configurators;
using PipeBuilders;
using Pipeline.Filters;
public class LogPipeSpecification<T> :
IPipeSpecification<T>
where T : class, PipeContext
{
readonly LogFormatter<T> _formatter;
readonly TextWriter _writer;
public LogPipeSpecification(TextWriter writer, LogFormatter<T> formatter)
{
_writer = writer;
_formatter = formatter;
}
void IPipeSpecification<T>.Apply(IPipeBuilder<T> builder)
{
builder.AddFilter(new LogFilter<T>(_writer, _formatter));
}
IEnumerable<ValidationResult> Configurator.Validate()
{
if (_writer == null)
yield return this.Failure("TextWriter", "must not be null");
if (_formatter == null)
yield return this.Failure("Formatter", "must not be null");
}
}
} | apache-2.0 |
drunklite/spring-loaded | springloaded/src/test/java/org/springsource/loaded/test/InterfaceExtractorTest.java | 4681 | /*
* Copyright 2010-2012 VMware and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springsource.loaded.test;
import org.junit.Test;
import org.springsource.loaded.InterfaceExtractor;
import org.springsource.loaded.TypeDescriptor;
import org.springsource.loaded.TypeDescriptorExtractor;
import org.springsource.loaded.TypeRegistry;
/**
* Tests for interface extraction.
*
* @author Andy Clement
* @since 1.0
*/
public class InterfaceExtractorTest extends SpringLoadedTests {
/**
* Attempt simple interface extraction for a class with one void no-arg method
*/
@Test
public void simpleExtraction() {
TypeRegistry registry = getTypeRegistry(null);
byte[] classBytes = loadBytesForClass("data.SimpleClass");
TypeDescriptor td = new TypeDescriptorExtractor(registry).extract(classBytes, true);
// @formatter:off
checkType(classBytes,
"CLASS: data/SimpleClass v50 0x0020(synchronized) super java/lang/Object\n" +
"SOURCE: SimpleClass.java null\n" +
"METHOD: 0x0000() <init>()V\n" +
" CODE\n" +
" L0\n" +
" ALOAD 0\n" +
" INVOKESPECIAL java/lang/Object.<init>()V\n" +
" RETURN\n" +
" L1\n" +
"METHOD: 0x0001(public) foo()V\n" +
" CODE\n" +
" L0\n" +
" RETURN\n" +
" L1\n" +
"\n");
// @formatter:on
byte[] bytes = InterfaceExtractor.extract(classBytes, registry, td);
// @formatter:off
checkType(
bytes,
"CLASS: data/SimpleClass__I v50 0x0601(public abstract interface) super java/lang/Object\n"
+
"METHOD: 0x0401(public abstract) ___init___(Ldata/SimpleClass;)V\n"
+
"METHOD: 0x0401(public abstract) foo(Ldata/SimpleClass;)V\n"
+
"METHOD: 0x0401(public abstract) __execute([Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;\n"
+
"METHOD: 0x0401(public abstract) ___clinit___()V\n"
+
"METHOD: 0x0401(public abstract) hashCode(Ldata/SimpleClass;)I\n"
+
"METHOD: 0x0401(public abstract) equals(Ldata/SimpleClass;Ljava/lang/Object;)Z\n"
+
"METHOD: 0x0401(public abstract) clone(Ldata/SimpleClass;)Ljava/lang/Object; java/lang/CloneNotSupportedException\n"
+
"METHOD: 0x0401(public abstract) toString(Ldata/SimpleClass;)Ljava/lang/String;\n" +
"\n");
// @formatter:on
}
@Test
public void varietyOfMethods() {
TypeRegistry registry = getTypeRegistry(null);
byte[] classBytes = loadBytesForClass("data.SimpleClassFour");
TypeDescriptor td = new TypeDescriptorExtractor(registry).extract(classBytes, true);
byte[] bytes = InterfaceExtractor.extract(classBytes, registry, td);
// @formatter:off
checkType(
bytes,
"CLASS: data/SimpleClassFour__I v50 0x0601(public abstract interface) super java/lang/Object\n"
+
"METHOD: 0x0401(public abstract) ___init___(Ldata/SimpleClassFour;I)V\n"
+
"METHOD: 0x0401(public abstract) ___init___(Ldata/SimpleClassFour;Ljava/lang/String;)V\n"
+
"METHOD: 0x0401(public abstract) boo(Ldata/SimpleClassFour;)V\n"
+
"METHOD: 0x0401(public abstract) foo(Ldata/SimpleClassFour;)V\n"
+
"METHOD: 0x0401(public abstract) goo(Ldata/SimpleClassFour;IDLjava/lang/String;)Ljava/lang/String;\n"
+
"METHOD: 0x0401(public abstract) hoo(Ldata/SimpleClassFour;J)I\n"
+
"METHOD: 0x0401(public abstract) woo(Ldata/SimpleClassFour;)V java/lang/RuntimeException java/lang/IllegalStateException\n"
+
"METHOD: 0x0401(public abstract) __execute([Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/Object;\n"
+
"METHOD: 0x0401(public abstract) ___clinit___()V\n"
+
"METHOD: 0x0401(public abstract) hashCode(Ldata/SimpleClassFour;)I\n"
+
"METHOD: 0x0401(public abstract) equals(Ldata/SimpleClassFour;Ljava/lang/Object;)Z\n"
+
"METHOD: 0x0401(public abstract) clone(Ldata/SimpleClassFour;)Ljava/lang/Object; java/lang/CloneNotSupportedException\n"
+
"METHOD: 0x0401(public abstract) toString(Ldata/SimpleClassFour;)Ljava/lang/String;\n" +
"\n");
// @formatter:on
}
}
| apache-2.0 |
Pluckypan/bigAndroid | api/src/main/java/com/example/android/apis/content/MediaContentJob.java | 4048 | /**
* Copyright (c) 2016, The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.apis.content;
import android.app.job.JobInfo;
import android.app.job.JobParameters;
import android.app.job.JobScheduler;
import android.app.job.JobService;
import android.content.ComponentName;
import android.content.Context;
import android.net.Uri;
import android.os.Handler;
import android.provider.MediaStore;
import android.util.Log;
import android.widget.Toast;
import com.example.android.apis.R;
import java.util.List;
/**
* Example stub job to monitor when there is a change to any media: content URI.
*/
public class MediaContentJob extends JobService {
static final Uri MEDIA_URI = Uri.parse("content://" + MediaStore.AUTHORITY + "/");
final Handler mHandler = new Handler();
final Runnable mWorker = new Runnable() {
@Override public void run() {
scheduleJob(MediaContentJob.this);
jobFinished(mRunningParams, false);
}
};
JobParameters mRunningParams;
public static void scheduleJob(Context context) {
JobScheduler js = context.getSystemService(JobScheduler.class);
JobInfo.Builder builder = new JobInfo.Builder(JobIds.MEDIA_CONTENT_JOB,
new ComponentName(context, MediaContentJob.class));
builder.addTriggerContentUri(new JobInfo.TriggerContentUri(MEDIA_URI,
JobInfo.TriggerContentUri.FLAG_NOTIFY_FOR_DESCENDANTS));
js.schedule(builder.build());
Log.i("MediaContentJob", "JOB SCHEDULED!");
}
public static boolean isScheduled(Context context) {
JobScheduler js = context.getSystemService(JobScheduler.class);
List<JobInfo> jobs = js.getAllPendingJobs();
if (jobs == null) {
return false;
}
for (int i=0; i<jobs.size(); i++) {
if (jobs.get(i).getId() == JobIds.MEDIA_CONTENT_JOB) {
return true;
}
}
return false;
}
public static void cancelJob(Context context) {
JobScheduler js = context.getSystemService(JobScheduler.class);
js.cancel(JobIds.MEDIA_CONTENT_JOB);
}
@Override
public boolean onStartJob(JobParameters params) {
Log.i("MediaContentJob", "JOB STARTED!");
mRunningParams = params;
StringBuilder sb = new StringBuilder();
sb.append("Media content has changed:\n");
if (params.getTriggeredContentAuthorities() != null) {
sb.append("Authorities: ");
boolean first = true;
for (String auth : params.getTriggeredContentAuthorities()) {
if (first) {
first = false;
} else {
sb.append(", ");
}
sb.append(auth);
}
if (params.getTriggeredContentUris() != null) {
for (Uri uri : params.getTriggeredContentUris()) {
sb.append("\n");
sb.append(uri);
}
}
} else {
sb.append("(No content)");
}
Toast.makeText(this, sb.toString(), Toast.LENGTH_LONG).show();
// We will emulate taking some time to do this work, so we can see batching happen.
mHandler.postDelayed(mWorker, 10*1000);
return true;
}
@Override
public boolean onStopJob(JobParameters params) {
mHandler.removeCallbacks(mWorker);
return false;
}
}
| apache-2.0 |
Rikkola/kie-wb-common | kie-wb-common-screens/kie-wb-common-library/kie-wb-common-library-client/src/test/java/org/kie/workbench/common/screens/library/client/settings/sections/knowledgebases/KnowledgeBasesPresenterTest.java | 4605 | package org.kie.workbench.common.screens.library.client.settings.sections.knowledgebases;
import java.util.HashMap;
import java.util.Map;
import javax.enterprise.event.Event;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kie.workbench.common.screens.library.client.settings.SettingsSectionChange;
import org.kie.workbench.common.screens.library.client.settings.util.sections.MenuItem;
import org.kie.workbench.common.screens.library.client.settings.util.modal.single.AddSingleValueModal;
import org.kie.workbench.common.screens.projecteditor.model.ProjectScreenModel;
import org.kie.workbench.common.services.shared.kmodule.KBaseModel;
import org.kie.workbench.common.services.shared.kmodule.KModuleModel;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.uberfire.client.promise.Promises;
import org.uberfire.promise.SyncPromises;
import static java.util.Collections.emptyList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonList;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
@RunWith(MockitoJUnitRunner.class)
public class KnowledgeBasesPresenterTest {
private KnowledgeBasesPresenter knowledgeBasesPresenter;
@Mock
private Event<SettingsSectionChange<ProjectScreenModel>> settingsSectionChangeEvent;
@Mock
private KnowledgeBasesPresenter.View view;
@Mock
private MenuItem<ProjectScreenModel> menuItem;
@Mock
private KnowledgeBasesPresenter.KnowledgeBaseListPresenter knowledgeBaseListPresenter;
private final Promises promises = new SyncPromises();
@Before
public void before() {
knowledgeBasesPresenter = spy(new KnowledgeBasesPresenter(view,
settingsSectionChangeEvent,
promises,
menuItem,
knowledgeBaseListPresenter));
}
@Test
public void testSetup() {
final ProjectScreenModel model = mock(ProjectScreenModel.class);
when(model.getKModule()).thenReturn(spy(new KModuleModel()));
when(model.getKModule().getKBases()).thenReturn(emptyMap());
knowledgeBasesPresenter.setup(model).catch_(i -> {
Assert.fail("Promise should've been resolved!");
return promises.resolve();
});
verify(view).init(eq(knowledgeBasesPresenter));
verify(knowledgeBaseListPresenter).setup(any(), any(), any());
}
@Test
public void testSave() {
final KModuleModel kModuleModel = spy(new KModuleModel());
knowledgeBasesPresenter.kModuleModel = kModuleModel;
final Map<String, KBaseModel> kBasesMap = spy(new HashMap<>());
doReturn(kBasesMap).when(kModuleModel).getKBases();
knowledgeBasesPresenter.save("Test comment", null).catch_(i -> {
Assert.fail("Promise should've been resolved!");
return promises.resolve();
});
verify(kBasesMap).clear();
verify(kBasesMap).putAll(any());
}
@Test
public void testAddKnowledgeBase() {
knowledgeBasesPresenter.addKnowledgeBase();
final KBaseModel kBaseModel = new KBaseModel();
kBaseModel.setName("");
kBaseModel.setDefault(knowledgeBaseListPresenter.getObjectsList().isEmpty());
verify(knowledgeBaseListPresenter).add(kBaseModel);
verify(knowledgeBasesPresenter).fireChangeEvent();
}
@Test
public void testNewKBaseModelEmptyMap() {
doReturn(emptyList()).when(knowledgeBaseListPresenter).getObjectsList();
final KBaseModel kBaseModel = knowledgeBasesPresenter.newKBaseModel("Name");
Assert.assertEquals("Name", kBaseModel.getName());
Assert.assertEquals(true, kBaseModel.isDefault());
}
@Test
public void testNewKBaseModelNonEmptyMap() {
doReturn(singletonList(new KBaseModel())).when(knowledgeBaseListPresenter).getObjectsList();
final KBaseModel kBaseModel = knowledgeBasesPresenter.newKBaseModel("Name");
Assert.assertEquals("Name", kBaseModel.getName());
Assert.assertEquals(false, kBaseModel.isDefault());
}
} | apache-2.0 |
petehunt/react | docs/_js/examples/timer.js | 677 | /**
* @jsx React.DOM
*/
var TIMER_COMPONENT = "\
var Timer = React.createClass({\n\
getInitialState: function() {\n\
return {secondsElapsed: 0};\n\
},\n\
tick: React.autoBind(function() {\n\
this.setState({secondsElapsed: this.state.secondsElapsed + 1});\n\
}),\n\
componentDidMount: function() {\n\
setInterval(this.tick, 1000);\n\
},\n\
render: function() {\n\
return React.DOM.div({},\n\
'Seconds Elapsed: ' + this.state.secondsElapsed\n\
);\n\
}\n\
});\n\
\n\
React.renderComponent(Timer({}), mountNode);\
";
React.renderComponent(
<ReactPlayground codeText={TIMER_COMPONENT} />,
document.getElementById('timerExample')
);
| apache-2.0 |
apache/incubator-taverna-workbench | taverna-report-impl/src/main/java/org/apache/taverna/workbench/report/config/impl/ReportManagerConfigurationImpl.java | 2787 | /**
*
*/
package org.apache.taverna.workbench.report.config.impl;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.HashMap;
import java.util.Map;
import org.apache.taverna.configuration.AbstractConfigurable;
import org.apache.taverna.configuration.ConfigurationManager;
import org.apache.taverna.workbench.report.config.ReportManagerConfiguration;
import org.apache.taverna.workflowmodel.health.RemoteHealthChecker;
/**
* @author alanrw
*
*/
public final class ReportManagerConfigurationImpl extends AbstractConfigurable implements ReportManagerConfiguration {
private static final int DEFAULT_TIMEOUT = 10;
private Map<String, String> defaultPropertyMap;
public ReportManagerConfigurationImpl(ConfigurationManager configurationManager) {
super(configurationManager);
}
public String getCategory() {
return "general";
}
public Map<String, String> getDefaultPropertyMap() {
if (defaultPropertyMap == null) {
defaultPropertyMap = new HashMap<String, String>();
defaultPropertyMap.put(TIMEOUT, Integer.toString(DEFAULT_TIMEOUT));
defaultPropertyMap.put(ON_EDIT, QUICK_CHECK);
defaultPropertyMap.put(ON_OPEN, QUICK_CHECK);
defaultPropertyMap.put(BEFORE_RUN, FULL_CHECK);
defaultPropertyMap.put(QUERY_BEFORE_RUN, ERRORS_OR_WARNINGS);
defaultPropertyMap.put(REPORT_EXPIRATION, Integer.toString(DEFAULT_REPORT_EXPIRATION));
}
return defaultPropertyMap;
}
public String getDisplayName() {
return "Validation report";
}
public String getFilePrefix() {
return "ReportManager";
}
public String getUUID() {
return "F86378E5-0EC4-4DE9-8A55-6098595413DC";
}
@Override
public void applySettings() {
RemoteHealthChecker.setTimeoutInSeconds(Integer.parseInt(this.getProperty(TIMEOUT)));
}
public void setProperty(String key, String value) {
super.setProperty(key, value);
if (key.equals(TIMEOUT)) {
applySettings();
}
}
}
| apache-2.0 |
gawkermedia/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/dfp/axis/v201508/TargetingValue.java | 4372 | /**
* TargetingValue.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.dfp.axis.v201508;
/**
* Contains a {@link Targeting} value.
* <p>
* <b>This object is experimental!
* <code>TargetingValue</code> is an experimental, innovative,
* and rapidly
* changing new feature for DFP. Unfortunately, being on
* the bleeding edge means that we may make
* backwards-incompatible changes to
* <code>TargetingValue</code>. We will inform the community
* when this feature
* is no longer experimental.</b>
*/
public class TargetingValue extends com.google.api.ads.dfp.axis.v201508.ObjectValue implements java.io.Serializable {
/* The {@code Targeting} value. */
private com.google.api.ads.dfp.axis.v201508.Targeting value;
public TargetingValue() {
}
public TargetingValue(
com.google.api.ads.dfp.axis.v201508.Targeting value) {
this.value = value;
}
/**
* Gets the value value for this TargetingValue.
*
* @return value * The {@code Targeting} value.
*/
public com.google.api.ads.dfp.axis.v201508.Targeting getValue() {
return value;
}
/**
* Sets the value value for this TargetingValue.
*
* @param value * The {@code Targeting} value.
*/
public void setValue(com.google.api.ads.dfp.axis.v201508.Targeting value) {
this.value = value;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof TargetingValue)) return false;
TargetingValue other = (TargetingValue) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = super.equals(obj) &&
((this.value==null && other.getValue()==null) ||
(this.value!=null &&
this.value.equals(other.getValue())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = super.hashCode();
if (getValue() != null) {
_hashCode += getValue().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(TargetingValue.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201508", "TargetingValue"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("value");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201508", "value"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201508", "Targeting"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| apache-2.0 |
simplegeo/hadoop-pig | src/org/apache/pig/newplan/logical/optimizer/LogicalPlanPrinter.java | 8151 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.newplan.logical.optimizer;
import java.util.ArrayList;
import java.util.List;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.io.OutputStream;
import java.io.IOException;
import org.apache.pig.newplan.DepthFirstWalker;
import org.apache.pig.newplan.Operator;
import org.apache.pig.newplan.OperatorPlan;
import org.apache.pig.newplan.PlanVisitor;
import org.apache.pig.newplan.PlanWalker;
import org.apache.pig.newplan.ReverseDependencyOrderWalker;
import org.apache.pig.newplan.logical.expression.LogicalExpressionPlan;
import org.apache.pig.newplan.logical.relational.LOCogroup;
import org.apache.pig.newplan.logical.relational.LOFilter;
import org.apache.pig.newplan.logical.relational.LOForEach;
import org.apache.pig.newplan.logical.relational.LOGenerate;
import org.apache.pig.newplan.logical.relational.LOJoin;
import org.apache.pig.newplan.logical.relational.LOSort;
import org.apache.pig.newplan.logical.relational.LOSplitOutput;
import org.apache.pig.newplan.logical.relational.LogicalPlan;
import org.apache.pig.impl.logicalLayer.FrontendException;
import org.apache.pig.impl.plan.VisitorException;
import org.apache.pig.impl.util.MultiMap;
/**
* A visitor mechanism printing out the logical plan.
*/
public class LogicalPlanPrinter extends PlanVisitor {
private PrintStream mStream = null;
private String TAB1 = " ";
private String TABMore = "| ";
private String LSep = "|\n|---";
private String USep = "| |\n| ";
static public String SEPERATE = "\t";
/**
* @param ps PrintStream to output plan information to
* @param plan Logical plan to print
*/
public LogicalPlanPrinter(OperatorPlan plan, PrintStream ps) throws FrontendException {
super(plan, null);
mStream = ps;
}
@Override
public void visit() throws FrontendException {
try {
if (plan instanceof LogicalPlan) {
mStream.write(depthFirstLP().getBytes());
}
else {
mStream.write(reverseDepthFirstLP().getBytes());
}
} catch (IOException e) {
throw new FrontendException(e);
}
}
protected String depthFirstLP() throws FrontendException, IOException {
StringBuilder sb = new StringBuilder();
List<Operator> leaves = plan.getSinks();
for (Operator leaf : leaves) {
sb.append(depthFirst(leaf));
sb.append("\n");
}
return sb.toString();
}
private String depthFirst(Operator node) throws FrontendException, IOException {
String nodeString = printNode(node);
List<Operator> originalPredecessors = plan.getPredecessors(node);
if (originalPredecessors == null)
return nodeString;
StringBuffer sb = new StringBuffer(nodeString);
List<Operator> predecessors = new ArrayList<Operator>(originalPredecessors);
int i = 0;
for (Operator pred : predecessors) {
i++;
String DFStr = depthFirst(pred);
if (DFStr != null) {
sb.append(LSep);
if (i < predecessors.size())
sb.append(shiftStringByTabs(DFStr, 2));
else
sb.append(shiftStringByTabs(DFStr, 1));
}
}
return sb.toString();
}
protected String reverseDepthFirstLP() throws FrontendException, IOException {
StringBuilder sb = new StringBuilder();
List<Operator> roots = plan.getSources();
for (Operator root : roots) {
sb.append(reverseDepthFirst(root));
sb.append("\n");
}
return sb.toString();
}
private String reverseDepthFirst(Operator node) throws FrontendException, IOException {
String nodeString = printNode(node);
List<Operator> originalSuccessors = plan.getSuccessors(node);
if (originalSuccessors == null)
return nodeString;
StringBuffer sb = new StringBuffer(nodeString);
List<Operator> successors = new ArrayList<Operator>(originalSuccessors);
int i = 0;
for (Operator succ : successors) {
i++;
String DFStr = reverseDepthFirst(succ);
if (DFStr != null) {
sb.append(LSep);
if (i < successors.size())
sb.append(shiftStringByTabs(DFStr, 2));
else
sb.append(shiftStringByTabs(DFStr, 1));
}
}
return sb.toString();
}
private String planString(OperatorPlan lp) throws VisitorException, IOException {
StringBuilder sb = new StringBuilder();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
if(lp!=null) {
LogicalPlanPrinter printer = new LogicalPlanPrinter(lp, ps);
printer.visit();
}
else
return "";
sb.append(USep);
sb.append(shiftStringByTabs(baos.toString(), 2));
return sb.toString();
}
private String printNode(Operator node) throws FrontendException, IOException {
StringBuilder sb = new StringBuilder(node.toString()+"\n");
if(node instanceof LOFilter){
sb.append(planString(((LOFilter)node).getFilterPlan()));
}
else if(node instanceof LOForEach){
sb.append(planString(((LOForEach)node).getInnerPlan()));
}
else if(node instanceof LOCogroup){
MultiMap<Integer, LogicalExpressionPlan> plans = ((LOCogroup)node).getExpressionPlans();
for (int i : plans.keySet()) {
// Visit the associated plans
for (OperatorPlan plan : plans.get(i)) {
sb.append(planString(plan));
}
}
}
else if(node instanceof LOJoin){
MultiMap<Integer, LogicalExpressionPlan> plans = ((LOJoin)node).getExpressionPlans();
for (int i: plans.keySet()) {
// Visit the associated plans
for (OperatorPlan plan : plans.get(i)) {
sb.append(planString(plan));
}
}
}
else if(node instanceof LOSort){
for (OperatorPlan plan : ((LOSort)node).getSortColPlans())
sb.append(planString(plan));
}
else if(node instanceof LOSplitOutput){
sb.append(planString(((LOSplitOutput)node).getFilterPlan()));
}
else if(node instanceof LOGenerate){
for (OperatorPlan plan : ((LOGenerate)node).getOutputPlans()) {
sb.append(planString(plan));
}
}
return sb.toString();
}
private String shiftStringByTabs(String DFStr, int TabType) {
StringBuilder sb = new StringBuilder();
String[] spl = DFStr.split("\n");
String tab = (TabType == 1) ? TAB1 : TABMore;
sb.append(spl[0] + "\n");
for (int i = 1; i < spl.length; i++) {
sb.append(tab);
sb.append(spl[i]);
sb.append("\n");
}
return sb.toString();
}
}
| apache-2.0 |
massakam/pulsar | pulsar-client/src/test/java/org/apache/pulsar/client/impl/RoundRobinPartitionMessageRouterImplTest.java | 5523 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl;
import static org.mockito.Mockito.mock;
import static org.powermock.api.mockito.PowerMockito.when;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import java.time.Clock;
import java.time.Instant;
import java.time.ZoneId;
import org.apache.pulsar.client.api.HashingScheme;
import org.apache.pulsar.client.api.Message;
import org.testng.annotations.Test;
/**
* Unit test of {@link RoundRobinPartitionMessageRouterImpl}.
*/
public class RoundRobinPartitionMessageRouterImplTest {
@Test
public void testChoosePartitionWithoutKey() {
Message<?> msg = mock(Message.class);
when(msg.getKey()).thenReturn(null);
RoundRobinPartitionMessageRouterImpl router = new RoundRobinPartitionMessageRouterImpl(
HashingScheme.JavaStringHash, 0, false, 0);
for (int i = 0; i < 10; i++) {
assertEquals(i % 5, router.choosePartition(msg, new TopicMetadataImpl(5)));
}
}
@Test
public void testChoosePartitionWithoutKeyWithBatching() {
Message<?> msg = mock(Message.class);
when(msg.getKey()).thenReturn(null);
// Fake clock, simulate 1 millisecond passes for each invocation
Clock clock = new Clock() {
private long current = 0;
@Override
public Clock withZone(ZoneId zone) {
return null;
}
@Override
public long millis() {
return current++;
}
@Override
public Instant instant() {
return Instant.ofEpochMilli(millis());
}
@Override
public ZoneId getZone() {
return ZoneId.systemDefault();
}
};
RoundRobinPartitionMessageRouterImpl router = new RoundRobinPartitionMessageRouterImpl(
HashingScheme.JavaStringHash, 0, true, 5, clock);
// Since the batching time is 5millis, first 5 messages will go on partition 0 and next five would go on
// partition 1
for (int i = 0; i < 5; i++) {
assertEquals(0, router.choosePartition(msg, new TopicMetadataImpl(5)));
}
for (int i = 5; i < 10; i++) {
assertEquals(1, router.choosePartition(msg, new TopicMetadataImpl(5)));
}
}
@Test
public void testChoosePartitionWithNegativeTime() {
Message<?> msg = mock(Message.class);
when(msg.getKey()).thenReturn(null);
// Fake clock, simulate timestamp that resolves into a negative Integer value
Clock clock = mock(Clock.class);
when(clock.millis()).thenReturn((long) Integer.MAX_VALUE);
RoundRobinPartitionMessageRouterImpl router = new RoundRobinPartitionMessageRouterImpl(
HashingScheme.JavaStringHash, 3, true, 5, clock);
int idx = router.choosePartition(msg, new TopicMetadataImpl(5));
assertTrue(idx >= 0);
assertTrue(idx < 5);
}
@Test
public void testChoosePartitionWithKey() {
String key1 = "key1";
String key2 = "key2";
Message<?> msg1 = mock(Message.class);
when(msg1.hasKey()).thenReturn(true);
when(msg1.getKey()).thenReturn(key1);
Message<?> msg2 = mock(Message.class);
when(msg2.hasKey()).thenReturn(true);
when(msg2.getKey()).thenReturn(key2);
RoundRobinPartitionMessageRouterImpl router = new RoundRobinPartitionMessageRouterImpl(
HashingScheme.JavaStringHash, 0, false, 0);
TopicMetadataImpl metadata = new TopicMetadataImpl(100);
assertEquals(key1.hashCode() % 100, router.choosePartition(msg1, metadata));
assertEquals(key2.hashCode() % 100, router.choosePartition(msg2, metadata));
}
@Test
public void testBatchingAwareness() throws Exception {
Message<?> msg = mock(Message.class);
when(msg.getKey()).thenReturn(null);
Clock clock = mock(Clock.class);
RoundRobinPartitionMessageRouterImpl router = new RoundRobinPartitionMessageRouterImpl(
HashingScheme.JavaStringHash, 0, true, 10, clock);
TopicMetadataImpl metadata = new TopicMetadataImpl(100);
// time at `12345*` milliseconds
for (int i = 0; i < 10; i++) {
when(clock.millis()).thenReturn(123450L + i);
assertEquals(45, router.choosePartition(msg, metadata));
}
// time at `12346*` milliseconds
for (int i = 0; i < 10; i++) {
when(clock.millis()).thenReturn(123460L + i);
assertEquals(46, router.choosePartition(msg, metadata));
}
}
}
| apache-2.0 |
gustavoanatoly/hbase | hbase-server/src/test/java/org/apache/hadoop/hbase/quotas/policies/TestNoWritesCompactionsViolationPolicyEnforcement.java | 2015 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.quotas.policies;
import org.apache.hadoop.hbase.quotas.SpaceLimitingException;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/**
* Test class for {@link NoWritesCompactionsViolationPolicyEnforcement};
*/
@Category(SmallTests.class)
public class TestNoWritesCompactionsViolationPolicyEnforcement
extends BaseViolationPolicyEnforcement {
private NoWritesCompactionsViolationPolicyEnforcement enforcement;
@Before
public void setup() {
enforcement = new NoWritesCompactionsViolationPolicyEnforcement();
}
@Test(expected = SpaceLimitingException.class)
public void testCheckAppend() throws Exception {
enforcement.check(APPEND);
}
@Test(expected = SpaceLimitingException.class)
public void testCheckDelete() throws Exception {
enforcement.check(DELETE);
}
@Test(expected = SpaceLimitingException.class)
public void testCheckIncrement() throws Exception {
enforcement.check(INCREMENT);
}
@Test(expected = SpaceLimitingException.class)
public void testCheckPut() throws Exception {
enforcement.check(PUT);
}
}
| apache-2.0 |
afeiluo/twitter-kit-android | tweet-ui/src/debug/java/com/twitter/sdk/android/tweetui/TestTweetRepository.java | 940 | /*
* Copyright (C) 2015 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.sdk.android.tweetui;
import android.os.Handler;
public class TestTweetRepository extends TweetRepository {
TestTweetRepository(Handler mainHandler, TweetUiAuthRequestQueue userAuthQueue,
TweetUiAuthRequestQueue guestAuthQueue) {
super(mainHandler, userAuthQueue, guestAuthQueue);
}
}
| apache-2.0 |
sacjaya/siddhi | modules/siddhi-extensions/eval-script/src/main/scala/org.wso2.siddhi.extension.evalscript/ScalaEvaluationEngine.scala | 956 | /*
* Copyright (c) 2015, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.siddhi.extension.evalscript
import com.googlecode.scalascriptengine.EvalCode
class ScalaEvaluationEngine {
def eval(code: String) : (Array[Any]) => Any = {
val ect = EvalCode.withoutArgs[(Array[Any]) => Any](code)
val f = ect.newInstance
f()
}
} | apache-2.0 |
justinsb/kops | vendor/github.com/jetstack/cert-manager/pkg/client/clientset/versioned/typed/certmanager/v1alpha2/fake/fake_issuer.go | 5217 | /*
Copyright The cert-manager Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by client-gen. DO NOT EDIT.
package fake
import (
"context"
v1alpha2 "github.com/jetstack/cert-manager/pkg/apis/certmanager/v1alpha2"
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
labels "k8s.io/apimachinery/pkg/labels"
schema "k8s.io/apimachinery/pkg/runtime/schema"
types "k8s.io/apimachinery/pkg/types"
watch "k8s.io/apimachinery/pkg/watch"
testing "k8s.io/client-go/testing"
)
// FakeIssuers implements IssuerInterface
type FakeIssuers struct {
Fake *FakeCertmanagerV1alpha2
ns string
}
var issuersResource = schema.GroupVersionResource{Group: "cert-manager.io", Version: "v1alpha2", Resource: "issuers"}
var issuersKind = schema.GroupVersionKind{Group: "cert-manager.io", Version: "v1alpha2", Kind: "Issuer"}
// Get takes name of the issuer, and returns the corresponding issuer object, and an error if there is any.
func (c *FakeIssuers) Get(ctx context.Context, name string, options v1.GetOptions) (result *v1alpha2.Issuer, err error) {
obj, err := c.Fake.
Invokes(testing.NewGetAction(issuersResource, c.ns, name), &v1alpha2.Issuer{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha2.Issuer), err
}
// List takes label and field selectors, and returns the list of Issuers that match those selectors.
func (c *FakeIssuers) List(ctx context.Context, opts v1.ListOptions) (result *v1alpha2.IssuerList, err error) {
obj, err := c.Fake.
Invokes(testing.NewListAction(issuersResource, issuersKind, c.ns, opts), &v1alpha2.IssuerList{})
if obj == nil {
return nil, err
}
label, _, _ := testing.ExtractFromListOptions(opts)
if label == nil {
label = labels.Everything()
}
list := &v1alpha2.IssuerList{ListMeta: obj.(*v1alpha2.IssuerList).ListMeta}
for _, item := range obj.(*v1alpha2.IssuerList).Items {
if label.Matches(labels.Set(item.Labels)) {
list.Items = append(list.Items, item)
}
}
return list, err
}
// Watch returns a watch.Interface that watches the requested issuers.
func (c *FakeIssuers) Watch(ctx context.Context, opts v1.ListOptions) (watch.Interface, error) {
return c.Fake.
InvokesWatch(testing.NewWatchAction(issuersResource, c.ns, opts))
}
// Create takes the representation of a issuer and creates it. Returns the server's representation of the issuer, and an error, if there is any.
func (c *FakeIssuers) Create(ctx context.Context, issuer *v1alpha2.Issuer, opts v1.CreateOptions) (result *v1alpha2.Issuer, err error) {
obj, err := c.Fake.
Invokes(testing.NewCreateAction(issuersResource, c.ns, issuer), &v1alpha2.Issuer{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha2.Issuer), err
}
// Update takes the representation of a issuer and updates it. Returns the server's representation of the issuer, and an error, if there is any.
func (c *FakeIssuers) Update(ctx context.Context, issuer *v1alpha2.Issuer, opts v1.UpdateOptions) (result *v1alpha2.Issuer, err error) {
obj, err := c.Fake.
Invokes(testing.NewUpdateAction(issuersResource, c.ns, issuer), &v1alpha2.Issuer{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha2.Issuer), err
}
// UpdateStatus was generated because the type contains a Status member.
// Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus().
func (c *FakeIssuers) UpdateStatus(ctx context.Context, issuer *v1alpha2.Issuer, opts v1.UpdateOptions) (*v1alpha2.Issuer, error) {
obj, err := c.Fake.
Invokes(testing.NewUpdateSubresourceAction(issuersResource, "status", c.ns, issuer), &v1alpha2.Issuer{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha2.Issuer), err
}
// Delete takes name of the issuer and deletes it. Returns an error if one occurs.
func (c *FakeIssuers) Delete(ctx context.Context, name string, opts v1.DeleteOptions) error {
_, err := c.Fake.
Invokes(testing.NewDeleteAction(issuersResource, c.ns, name), &v1alpha2.Issuer{})
return err
}
// DeleteCollection deletes a collection of objects.
func (c *FakeIssuers) DeleteCollection(ctx context.Context, opts v1.DeleteOptions, listOpts v1.ListOptions) error {
action := testing.NewDeleteCollectionAction(issuersResource, c.ns, listOpts)
_, err := c.Fake.Invokes(action, &v1alpha2.IssuerList{})
return err
}
// Patch applies the patch and returns the patched issuer.
func (c *FakeIssuers) Patch(ctx context.Context, name string, pt types.PatchType, data []byte, opts v1.PatchOptions, subresources ...string) (result *v1alpha2.Issuer, err error) {
obj, err := c.Fake.
Invokes(testing.NewPatchSubresourceAction(issuersResource, c.ns, name, pt, data, subresources...), &v1alpha2.Issuer{})
if obj == nil {
return nil, err
}
return obj.(*v1alpha2.Issuer), err
}
| apache-2.0 |
plusplusjiajia/hadoop | hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java | 15088 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.retry;
import com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.retry.FailoverProxyProvider.ProxyInfo;
import org.apache.hadoop.io.retry.RetryPolicy.RetryAction;
import org.apache.hadoop.ipc.*;
import org.apache.hadoop.ipc.Client.ConnectionId;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.Arrays;
import java.util.Collections;
import java.util.Map;
/**
* A {@link RpcInvocationHandler} which supports client side retry .
*/
@InterfaceAudience.Private
public class RetryInvocationHandler<T> implements RpcInvocationHandler {
public static final Logger LOG = LoggerFactory.getLogger(
RetryInvocationHandler.class);
static class Call {
private final Method method;
private final Object[] args;
private final boolean isRpc;
private final int callId;
private final Counters counters = new Counters();
private final RetryPolicy retryPolicy;
private final RetryInvocationHandler<?> retryInvocationHandler;
private RetryInfo retryInfo;
Call(Method method, Object[] args, boolean isRpc, int callId,
RetryInvocationHandler<?> retryInvocationHandler) {
this.method = method;
this.args = args;
this.isRpc = isRpc;
this.callId = callId;
this.retryPolicy = retryInvocationHandler.getRetryPolicy(method);
this.retryInvocationHandler = retryInvocationHandler;
}
int getCallId() {
return callId;
}
Counters getCounters() {
return counters;
}
synchronized Long getWaitTime(final long now) {
return retryInfo == null? null: retryInfo.retryTime - now;
}
/** Invoke the call once without retrying. */
synchronized CallReturn invokeOnce() {
try {
if (retryInfo != null) {
return processWaitTimeAndRetryInfo();
}
// The number of times this invocation handler has ever been failed over
// before this method invocation attempt. Used to prevent concurrent
// failed method invocations from triggering multiple failover attempts.
final long failoverCount = retryInvocationHandler.getFailoverCount();
try {
return invoke();
} catch (Exception e) {
if (LOG.isTraceEnabled()) {
LOG.trace(toString(), e);
}
if (Thread.currentThread().isInterrupted()) {
// If interrupted, do not retry.
throw e;
}
retryInfo = retryInvocationHandler.handleException(
method, callId, retryPolicy, counters, failoverCount, e);
return processWaitTimeAndRetryInfo();
}
} catch(Throwable t) {
return new CallReturn(t);
}
}
/**
* It first processes the wait time, if there is any,
* and then invokes {@link #processRetryInfo()}.
*
* If the wait time is positive, it either sleeps for synchronous calls
* or immediately returns for asynchronous calls.
*
* @return {@link CallReturn#RETRY} if the retryInfo is processed;
* otherwise, return {@link CallReturn#WAIT_RETRY}.
*/
CallReturn processWaitTimeAndRetryInfo() throws InterruptedIOException {
final Long waitTime = getWaitTime(Time.monotonicNow());
LOG.trace("#{} processRetryInfo: retryInfo={}, waitTime={}",
callId, retryInfo, waitTime);
if (waitTime != null && waitTime > 0) {
try {
Thread.sleep(retryInfo.delay);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
if (LOG.isDebugEnabled()) {
LOG.debug("Interrupted while waiting to retry", e);
}
InterruptedIOException intIOE = new InterruptedIOException(
"Retry interrupted");
intIOE.initCause(e);
throw intIOE;
}
}
processRetryInfo();
return CallReturn.RETRY;
}
synchronized void processRetryInfo() {
counters.retries++;
if (retryInfo.isFailover()) {
retryInvocationHandler.proxyDescriptor.failover(
retryInfo.expectedFailoverCount, method, callId);
counters.failovers++;
}
retryInfo = null;
}
CallReturn invoke() throws Throwable {
return new CallReturn(invokeMethod());
}
Object invokeMethod() throws Throwable {
if (isRpc) {
Client.setCallIdAndRetryCount(callId, counters.retries,
retryInvocationHandler.asyncCallHandler);
}
return retryInvocationHandler.invokeMethod(method, args);
}
@Override
public String toString() {
return getClass().getSimpleName() + "#" + callId + ": "
+ method.getDeclaringClass().getSimpleName() + "." + method.getName()
+ "(" + (args == null || args.length == 0? "": Arrays.toString(args))
+ ")";
}
}
static class Counters {
/** Counter for retries. */
private int retries;
/** Counter for method invocation has been failed over. */
private int failovers;
boolean isZeros() {
return retries == 0 && failovers == 0;
}
}
private static class ProxyDescriptor<T> {
private final FailoverProxyProvider<T> fpp;
/** Count the associated proxy provider has ever been failed over. */
private long failoverCount = 0;
private ProxyInfo<T> proxyInfo;
ProxyDescriptor(FailoverProxyProvider<T> fpp) {
this.fpp = fpp;
this.proxyInfo = fpp.getProxy();
}
synchronized ProxyInfo<T> getProxyInfo() {
return proxyInfo;
}
synchronized T getProxy() {
return proxyInfo.proxy;
}
synchronized long getFailoverCount() {
return failoverCount;
}
synchronized void failover(long expectedFailoverCount, Method method,
int callId) {
// Make sure that concurrent failed invocations only cause a single
// actual failover.
if (failoverCount == expectedFailoverCount) {
fpp.performFailover(proxyInfo.proxy);
failoverCount++;
} else {
LOG.warn("A failover has occurred since the start of call #" + callId
+ " " + proxyInfo.getString(method.getName()));
}
proxyInfo = fpp.getProxy();
}
boolean idempotentOrAtMostOnce(Method method) throws NoSuchMethodException {
final Method m = fpp.getInterface()
.getMethod(method.getName(), method.getParameterTypes());
return m.isAnnotationPresent(Idempotent.class)
|| m.isAnnotationPresent(AtMostOnce.class);
}
void close() throws IOException {
fpp.close();
}
}
private static class RetryInfo {
private final long retryTime;
private final long delay;
private final RetryAction action;
private final long expectedFailoverCount;
private final Exception failException;
RetryInfo(long delay, RetryAction action, long expectedFailoverCount,
Exception failException) {
this.delay = delay;
this.retryTime = Time.monotonicNow() + delay;
this.action = action;
this.expectedFailoverCount = expectedFailoverCount;
this.failException = failException;
}
boolean isFailover() {
return action != null
&& action.action == RetryAction.RetryDecision.FAILOVER_AND_RETRY;
}
boolean isFail() {
return action != null
&& action.action == RetryAction.RetryDecision.FAIL;
}
Exception getFailException() {
return failException;
}
static RetryInfo newRetryInfo(RetryPolicy policy, Exception e,
Counters counters, boolean idempotentOrAtMostOnce,
long expectedFailoverCount) throws Exception {
RetryAction max = null;
long maxRetryDelay = 0;
Exception ex = null;
final Iterable<Exception> exceptions = e instanceof MultiException ?
((MultiException) e).getExceptions().values()
: Collections.singletonList(e);
for (Exception exception : exceptions) {
final RetryAction a = policy.shouldRetry(exception,
counters.retries, counters.failovers, idempotentOrAtMostOnce);
if (a.action != RetryAction.RetryDecision.FAIL) {
// must be a retry or failover
if (a.delayMillis > maxRetryDelay) {
maxRetryDelay = a.delayMillis;
}
}
if (max == null || max.action.compareTo(a.action) < 0) {
max = a;
if (a.action == RetryAction.RetryDecision.FAIL) {
ex = exception;
}
}
}
return new RetryInfo(maxRetryDelay, max, expectedFailoverCount, ex);
}
@Override
public String toString() {
return "RetryInfo{" +
"retryTime=" + retryTime +
", delay=" + delay +
", action=" + action +
", expectedFailoverCount=" + expectedFailoverCount +
", failException=" + failException +
'}';
}
}
private final ProxyDescriptor<T> proxyDescriptor;
private volatile boolean hasSuccessfulCall = false;
private final RetryPolicy defaultPolicy;
private final Map<String,RetryPolicy> methodNameToPolicyMap;
private final AsyncCallHandler asyncCallHandler = new AsyncCallHandler();
protected RetryInvocationHandler(FailoverProxyProvider<T> proxyProvider,
RetryPolicy retryPolicy) {
this(proxyProvider, retryPolicy, Collections.<String, RetryPolicy>emptyMap());
}
protected RetryInvocationHandler(FailoverProxyProvider<T> proxyProvider,
RetryPolicy defaultPolicy,
Map<String, RetryPolicy> methodNameToPolicyMap) {
this.proxyDescriptor = new ProxyDescriptor<>(proxyProvider);
this.defaultPolicy = defaultPolicy;
this.methodNameToPolicyMap = methodNameToPolicyMap;
}
private RetryPolicy getRetryPolicy(Method method) {
final RetryPolicy policy = methodNameToPolicyMap.get(method.getName());
return policy != null? policy: defaultPolicy;
}
private long getFailoverCount() {
return proxyDescriptor.getFailoverCount();
}
private Call newCall(Method method, Object[] args, boolean isRpc,
int callId) {
if (Client.isAsynchronousMode()) {
return asyncCallHandler.newAsyncCall(method, args, isRpc, callId, this);
} else {
return new Call(method, args, isRpc, callId, this);
}
}
@Override
public Object invoke(Object proxy, Method method, Object[] args)
throws Throwable {
final boolean isRpc = isRpcInvocation(proxyDescriptor.getProxy());
final int callId = isRpc? Client.nextCallId(): RpcConstants.INVALID_CALL_ID;
final Call call = newCall(method, args, isRpc, callId);
while (true) {
final CallReturn c = call.invokeOnce();
final CallReturn.State state = c.getState();
if (state == CallReturn.State.ASYNC_INVOKED) {
return null; // return null for async calls
} else if (c.getState() != CallReturn.State.RETRY) {
return c.getReturnValue();
}
}
}
private RetryInfo handleException(final Method method, final int callId,
final RetryPolicy policy, final Counters counters,
final long expectFailoverCount, final Exception e) throws Exception {
final RetryInfo retryInfo = RetryInfo.newRetryInfo(policy, e,
counters, proxyDescriptor.idempotentOrAtMostOnce(method),
expectFailoverCount);
if (retryInfo.isFail()) {
// fail.
if (retryInfo.action.reason != null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Exception while invoking call #" + callId + " "
+ proxyDescriptor.getProxyInfo().getString(method.getName())
+ ". Not retrying because " + retryInfo.action.reason, e);
}
}
throw retryInfo.getFailException();
}
log(method, retryInfo.isFailover(), counters.failovers, retryInfo.delay, e);
return retryInfo;
}
private void log(final Method method, final boolean isFailover,
final int failovers, final long delay, final Exception ex) {
// log info if this has made some successful calls or
// this is not the first failover
final boolean info = hasSuccessfulCall || failovers != 0
|| asyncCallHandler.hasSuccessfulCall();
if (!info && !LOG.isDebugEnabled()) {
return;
}
final StringBuilder b = new StringBuilder()
.append(ex + ", while invoking ")
.append(proxyDescriptor.getProxyInfo().getString(method.getName()));
if (failovers > 0) {
b.append(" after ").append(failovers).append(" failover attempts");
}
b.append(isFailover? ". Trying to failover ": ". Retrying ");
b.append(delay > 0? "after sleeping for " + delay + "ms.": "immediately.");
if (info) {
LOG.info(b.toString());
} else {
LOG.debug(b.toString(), ex);
}
}
protected Object invokeMethod(Method method, Object[] args) throws Throwable {
try {
if (!method.isAccessible()) {
method.setAccessible(true);
}
final Object r = method.invoke(proxyDescriptor.getProxy(), args);
hasSuccessfulCall = true;
return r;
} catch (InvocationTargetException e) {
throw e.getCause();
}
}
@VisibleForTesting
static boolean isRpcInvocation(Object proxy) {
if (proxy instanceof ProtocolTranslator) {
proxy = ((ProtocolTranslator) proxy).getUnderlyingProxyObject();
}
if (!Proxy.isProxyClass(proxy.getClass())) {
return false;
}
final InvocationHandler ih = Proxy.getInvocationHandler(proxy);
return ih instanceof RpcInvocationHandler;
}
@Override
public void close() throws IOException {
proxyDescriptor.close();
}
@Override //RpcInvocationHandler
public ConnectionId getConnectionId() {
return RPC.getConnectionIdForProxy(proxyDescriptor.getProxy());
}
@VisibleForTesting
public FailoverProxyProvider<T> getProxyProvider() {
return proxyDescriptor.fpp;
}
}
| apache-2.0 |
GoogleCloudPlatform/sap-deployment-automation | third_party/github.com/ansible/awx/awx/ui_next/src/components/ErrorDetail/getErrorMessage.test.js | 1743 | import getErrorMessage from './getErrorMessage';
describe('getErrorMessage', () => {
test('should return data string', () => {
const response = {
data: 'error response',
};
expect(getErrorMessage(response)).toEqual('error response');
});
test('should return detail string', () => {
const response = {
data: {
detail: 'detail string',
},
};
expect(getErrorMessage(response)).toEqual('detail string');
});
test('should return an array of strings', () => {
const response = {
data: {
project: ['project error response'],
},
};
expect(getErrorMessage(response)).toEqual(['project error response']);
});
test('should consolidate error messages from multiple keys into an array', () => {
const response = {
data: {
project: ['project error response'],
inventory: ['inventory error response'],
organization: ['org error response'],
},
};
expect(getErrorMessage(response)).toEqual([
'project error response',
'inventory error response',
'org error response',
]);
});
test('should handle no response.data', () => {
const response = {};
expect(getErrorMessage(response)).toEqual(null);
});
test('should consolidate multiple error messages from multiple keys into an array', () => {
const response = {
data: {
project: ['project error response'],
inventory: [
'inventory error response',
'another inventory error response',
],
},
};
expect(getErrorMessage(response)).toEqual([
'project error response',
'inventory error response',
'another inventory error response',
]);
});
});
| apache-2.0 |
dain/presto | core/trino-main/src/main/java/io/trino/metadata/AbstractPropertyManager.java | 7289 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.metadata;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import io.trino.Session;
import io.trino.connector.CatalogName;
import io.trino.security.AccessControl;
import io.trino.spi.ErrorCodeSupplier;
import io.trino.spi.TrinoException;
import io.trino.spi.block.BlockBuilder;
import io.trino.spi.session.PropertyMetadata;
import io.trino.spi.type.Type;
import io.trino.sql.planner.ParameterRewriter;
import io.trino.sql.tree.Expression;
import io.trino.sql.tree.ExpressionTreeRewriter;
import io.trino.sql.tree.NodeRef;
import io.trino.sql.tree.Parameter;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import static com.google.common.base.Preconditions.checkState;
import static io.trino.spi.StandardErrorCode.NOT_FOUND;
import static io.trino.spi.type.TypeUtils.writeNativeValue;
import static io.trino.sql.planner.ExpressionInterpreter.evaluateConstantExpression;
import static java.lang.String.format;
import static java.util.Locale.ENGLISH;
import static java.util.Objects.requireNonNull;
abstract class AbstractPropertyManager
{
private final ConcurrentMap<CatalogName, Map<String, PropertyMetadata<?>>> connectorProperties = new ConcurrentHashMap<>();
private final String propertyType;
private final ErrorCodeSupplier propertyError;
protected AbstractPropertyManager(String propertyType, ErrorCodeSupplier propertyError)
{
requireNonNull(propertyType, "propertyType is null");
this.propertyType = propertyType;
this.propertyError = requireNonNull(propertyError, "propertyError is null");
}
public final void addProperties(CatalogName catalogName, List<PropertyMetadata<?>> properties)
{
requireNonNull(catalogName, "catalogName is null");
requireNonNull(properties, "properties is null");
Map<String, PropertyMetadata<?>> propertiesByName = Maps.uniqueIndex(properties, PropertyMetadata::getName);
checkState(connectorProperties.putIfAbsent(catalogName, propertiesByName) == null, "Properties for connector '%s' are already registered", catalogName);
}
public final void removeProperties(CatalogName catalogName)
{
connectorProperties.remove(catalogName);
}
public final Map<String, Object> getProperties(
CatalogName catalogName,
String catalog, // only use this for error messages
Map<String, Expression> sqlPropertyValues,
Session session,
Metadata metadata,
AccessControl accessControl,
Map<NodeRef<Parameter>, Expression> parameters)
{
Map<String, PropertyMetadata<?>> supportedProperties = connectorProperties.get(catalogName);
if (supportedProperties == null) {
throw new TrinoException(NOT_FOUND, "Catalog not found: " + catalog);
}
ImmutableMap.Builder<String, Object> properties = ImmutableMap.builder();
// Fill in user-specified properties
for (Map.Entry<String, Expression> sqlProperty : sqlPropertyValues.entrySet()) {
String propertyName = sqlProperty.getKey().toLowerCase(ENGLISH);
PropertyMetadata<?> property = supportedProperties.get(propertyName);
if (property == null) {
throw new TrinoException(
propertyError,
format("Catalog '%s' does not support %s property '%s'",
catalog,
propertyType,
propertyName));
}
Object sqlObjectValue;
try {
sqlObjectValue = evaluatePropertyValue(sqlProperty.getValue(), property.getSqlType(), session, metadata, accessControl, parameters);
}
catch (TrinoException e) {
throw new TrinoException(
propertyError,
format("Invalid value for %s property '%s': Cannot convert [%s] to %s",
propertyType,
property.getName(),
sqlProperty.getValue(),
property.getSqlType()),
e);
}
Object value;
try {
value = property.decode(sqlObjectValue);
}
catch (Exception e) {
throw new TrinoException(
propertyError,
format(
"Unable to set %s property '%s' to [%s]: %s",
propertyType,
property.getName(),
sqlProperty.getValue(),
e.getMessage()),
e);
}
properties.put(property.getName(), value);
}
Map<String, Object> userSpecifiedProperties = properties.build();
// Fill in the remaining properties with non-null defaults
for (PropertyMetadata<?> propertyMetadata : supportedProperties.values()) {
if (!userSpecifiedProperties.containsKey(propertyMetadata.getName())) {
Object value = propertyMetadata.getDefaultValue();
if (value != null) {
properties.put(propertyMetadata.getName(), value);
}
}
}
return properties.build();
}
public Map<CatalogName, Map<String, PropertyMetadata<?>>> getAllProperties()
{
return ImmutableMap.copyOf(connectorProperties);
}
private Object evaluatePropertyValue(
Expression expression,
Type expectedType,
Session session,
Metadata metadata,
AccessControl accessControl,
Map<NodeRef<Parameter>, Expression> parameters)
{
Expression rewritten = ExpressionTreeRewriter.rewriteWith(new ParameterRewriter(parameters), expression);
Object value = evaluateConstantExpression(rewritten, expectedType, metadata, session, accessControl, parameters);
// convert to object value type of SQL type
BlockBuilder blockBuilder = expectedType.createBlockBuilder(null, 1);
writeNativeValue(expectedType, blockBuilder, value);
Object objectValue = expectedType.getObjectValue(session.toConnectorSession(), blockBuilder, 0);
if (objectValue == null) {
throw new TrinoException(propertyError, format("Invalid null value for %s property", propertyType));
}
return objectValue;
}
}
| apache-2.0 |
virgo-agent-toolkit/rackspace-monitoring-agent | hostinfo/network.lua | 1790 | --[[
Copyright 2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS-IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--]]
local HostInfo = require('./base').HostInfo
local sigar = require('sigar')
local table = require('table')
--[[ Info ]]--
local Info = HostInfo:extend()
function Info:initialize()
HostInfo.initialize(self)
end
function Info:_run(callback)
local ctx = sigar:new()
local netifs = ctx:netifs()
for i=1,#netifs do
local info = netifs[i]:info()
local usage = netifs[i]:usage()
local name = info.name
local obj = {}
local info_fields = {
'address',
'address6',
'broadcast',
'flags',
'hwaddr',
'mtu',
'name',
'netmask',
'type'
}
local usage_fields = {
'rx_packets',
'rx_bytes',
'rx_errors',
'rx_overruns',
'rx_dropped',
'tx_packets',
'tx_bytes',
'tx_errors',
'tx_overruns',
'tx_dropped',
'tx_collisions',
'tx_carrier',
}
if info then
for _, v in pairs(info_fields) do
obj[v] = info[v]
end
end
if usage then
for _, v in pairs(usage_fields) do
obj[v] = usage[v]
end
end
obj['name'] = name
table.insert(self._params, obj)
end
callback()
end
function Info:getType()
return 'NETWORK'
end
return Info
| apache-2.0 |
joshualitt/incubator-beam | sdks/java/core/src/test/java/org/apache/beam/sdk/transforms/MaxTest.java | 2518 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.transforms;
import static org.apache.beam.sdk.TestUtils.checkCombineFn;
import static org.apache.beam.sdk.transforms.display.DisplayDataMatchers.hasDisplayItem;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import com.google.common.collect.Lists;
import org.apache.beam.sdk.transforms.display.DisplayData;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for Max.
*/
@RunWith(JUnit4.class)
public class MaxTest {
@Test
public void testMeanGetNames() {
assertEquals("Max.Globally", Max.integersGlobally().getName());
assertEquals("Max.Globally", Max.doublesGlobally().getName());
assertEquals("Max.Globally", Max.longsGlobally().getName());
assertEquals("Max.PerKey", Max.integersPerKey().getName());
assertEquals("Max.PerKey", Max.doublesPerKey().getName());
assertEquals("Max.PerKey", Max.longsPerKey().getName());
}
@Test
public void testMaxIntegerFn() {
checkCombineFn(
new Max.MaxIntegerFn(),
Lists.newArrayList(1, 2, 3, 4),
4);
}
@Test
public void testMaxLongFn() {
checkCombineFn(
new Max.MaxLongFn(),
Lists.newArrayList(1L, 2L, 3L, 4L),
4L);
}
@Test
public void testMaxDoubleFn() {
checkCombineFn(
new Max.MaxDoubleFn(),
Lists.newArrayList(1.0, 2.0, 3.0, 4.0),
4.0);
}
@Test
public void testDisplayData() {
Top.Largest<Integer> comparer = new Top.Largest<>();
Combine.Globally<Integer, Integer> max = Max.globally(comparer);
assertThat(DisplayData.from(max), hasDisplayItem("comparer", comparer.getClass()));
}
}
| apache-2.0 |
appcelerator/titanium_mobile_tizen | tests/samples/KitchenSink/Resources/ui/handheld/ios/baseui/coverflow_remote.js | 1873 | function cf_remote() {
var win = Titanium.UI.createWindow();
var images = [];
var prefix = 'http://grin.hq.nasa.gov/IMAGES/SMALL/GPN-2000-0000';
var start = 38;
for (var c=0;c<30;c++)
{
var name = prefix + (start+c) + '.jpg';
images[c]= {image:name, width:225, height:225};
}
// create coverflow view with images
var view = Titanium.UI.iOS.createCoverFlowView({
images:images,
backgroundColor:'#000'
});
// click listener - when image is clicked
view.addEventListener('click',function(e)
{
Titanium.API.info("image clicked: "+e.index+', selected is '+view.selected);
});
// change listener when active image changes
view.addEventListener('change',function(e)
{
Titanium.API.info("image changed: "+e.index+', selected is '+view.selected);
});
win.add(view);
// change button to dynamically change the image
var change = Titanium.UI.createButton({
title:'Change Image',
style:Titanium.UI.iPhone.SystemButtonStyle.BORDERED
});
change.addEventListener('click',function()
{
Titanium.API.info("selected is = "+view.selected);
view.setImage(view.selected,'/images/imageview/28.jpg');
});
// move scroll view left
var left = Titanium.UI.createButton({
image:'/images/icon_arrow_left.png'
});
left.addEventListener('click', function(e)
{
var i = view.selected - 1;
if (i < 0)
{
i = 0;
}
view.selected = i;
});
// move scroll view right
var right = Titanium.UI.createButton({
image:'/images/icon_arrow_right.png'
});
right.addEventListener('click', function(e)
{
var i = view.selected + 1;
if (i >= images.length)
{
i = images.length - 1;
}
view.selected = i;
});
var flexSpace = Titanium.UI.createButton({
systemButton:Titanium.UI.iPhone.SystemButton.FLEXIBLE_SPACE
});
win.setToolbar([flexSpace,left,change,right,flexSpace]);
return win;
};
module.exports = cf_remote; | apache-2.0 |
sohaniwso2/devstudio-tooling-esb | plugins/org.wso2.developerstudio.esb.form.editors/src/org/wso2/developerstudio/esb/form/editors/article/providers/TaskPropertyDialog.java | 14688 | /*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.esb.form.editors.article.providers;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.eclipse.emf.common.command.CompoundCommand;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.custom.TableEditor;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.MouseAdapter;
import org.eclipse.swt.events.MouseEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.layout.FormAttachment;
import org.eclipse.swt.layout.FormData;
import org.eclipse.swt.layout.FormLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Combo;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.swt.widgets.TableItem;
import org.eclipse.swt.widgets.Text;
import org.wso2.developerstudio.eclipse.gmf.esb.EsbFactory;
import org.wso2.developerstudio.eclipse.gmf.esb.TaskProperty;
import org.wso2.developerstudio.eclipse.gmf.esb.TaskPropertyType;
public class TaskPropertyDialog extends Dialog {
private boolean defaultESBtask;
private static final String LITERAL = "LITERAL";
private List<TaskProperty> taskPropertyList = new ArrayList<TaskProperty>();
private boolean newButtonSelected = false;
/**
* Table for add/edit/remove parameters.
*/
private Table propertyTable;
/**
* Table Editor for inline property edit
*/
private TableEditor propertyTypeEditor;
/**
* Combo box for select parameter type.
*/
private Combo cmbPropertyType;
/**
* Button for add new parameter.
*/
private Button newPropertyButton;
/**
* Button for remove parameter.
*/
private Button removePropertyButton;
/**
* Command for recording user operations.
*/
private CompoundCommand resultCommand;
private String[] properties = { "format", "message", "soapAction", "to", "proxyName", "sequenceName", "injectTo" };
public TaskPropertyDialog(Shell parentShell, String taskImpel, List<TaskProperty> list) {
super(parentShell);
// When updating an existing property, then get the property list
if (list != null) {
taskPropertyList = list;
} else {
// When adding properties initially, create a new list
list = new ArrayList<TaskProperty>();
}
defaultESBtask = (taskImpel.equals("org.apache.synapse.startup.tasks.MessageInjector"));
}
protected void configureShell(Shell newShell) {
super.configureShell(newShell);
newShell.setText("Task Properties");
}
protected Control createDialogArea(Composite parent) {
Composite container = (Composite) super.createDialogArea(parent);
FormLayout mainLayout = new FormLayout();
mainLayout.marginHeight = 5;
mainLayout.marginWidth = 5;
container.setLayout(mainLayout);
// Button for add new parameter.
newPropertyButton = new Button(container, SWT.NONE);
newPropertyButton.setText("New...");
FormData newTaskPropertyButtonLayoutData = new FormData(80, SWT.DEFAULT);
newTaskPropertyButtonLayoutData.right = new FormAttachment(100);
newPropertyButton.setLayoutData(newTaskPropertyButtonLayoutData);
newPropertyButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
TableItem item = bindPram(EsbFactory.eINSTANCE.createTaskProperty());
propertyTable.select(propertyTable.indexOf(item));
}
});
// Enable new button
enableNewButton();
// Button for remove Property.
removePropertyButton = new Button(container, SWT.NONE);
removePropertyButton.setText("Remove");
FormData removeTaskPropertyButtonLayoutData = new FormData();
removeTaskPropertyButtonLayoutData.top = new FormAttachment(newPropertyButton, 5);
removeTaskPropertyButtonLayoutData.right = new FormAttachment(100);
removeTaskPropertyButtonLayoutData.left = new FormAttachment(newPropertyButton, 0, SWT.LEFT);
removePropertyButton.setLayoutData(removeTaskPropertyButtonLayoutData);
removePropertyButton.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event event) {
int selectedIndex = propertyTable.getSelectionIndex();
if (-1 != selectedIndex) {
unbindParam(selectedIndex);
// Select the next available candidate for deletion.
if (selectedIndex < propertyTable.getItemCount()) {
propertyTable.select(selectedIndex);
} else {
propertyTable.select(selectedIndex - 1);
}
}
}
});
// Table for show the parameters.
propertyTable = new Table(container, SWT.BORDER | SWT.FULL_SELECTION | SWT.HIDE_SELECTION);
TableColumn nameColumn = new TableColumn(propertyTable, SWT.LEFT);
TableColumn typeColumn = new TableColumn(propertyTable, SWT.LEFT);
TableColumn valueColumn = new TableColumn(propertyTable, SWT.LEFT);
nameColumn.setText("Parameter Name");
nameColumn.setWidth(150);
valueColumn.setText("Value/Expression");
valueColumn.setWidth(200);
typeColumn.setText("Parameter Type");
typeColumn.setWidth(150);
propertyTable.setHeaderVisible(true);
propertyTable.setLinesVisible(true);
Listener tblPropertiesListener = new Listener() {
public void handleEvent(Event evt) {
if (null != evt.item) {
if (evt.item instanceof TableItem) {
TableItem item = (TableItem) evt.item;
editItem(item);
}
}
}
};
propertyTable.addListener(SWT.Selection, tblPropertiesListener);
// When updating an existing property
if (taskPropertyList.size() > 0) {
if (getMissingProperties() != null) {
taskPropertyList.addAll(getMissingProperties());
}
for (TaskProperty property : taskPropertyList) {
bindPram(property);
}
} else {
// when adding properties for the first time
for (int i = 0; i < properties.length; i++) {
bindPram(properties[i]);
}
}
setupTableEditor(propertyTable);
FormData taskPropertiesTableLayoutData = new FormData(SWT.DEFAULT, 150);
taskPropertiesTableLayoutData.top = new FormAttachment(newPropertyButton, 0, SWT.TOP);
taskPropertiesTableLayoutData.left = new FormAttachment(0);
taskPropertiesTableLayoutData.right = new FormAttachment(newPropertyButton, -5);
taskPropertiesTableLayoutData.bottom = new FormAttachment(100);
propertyTable.setLayoutData(taskPropertiesTableLayoutData);
return parent;
}
/**
* Get missing required properties
*/
private List<TaskProperty> getMissingProperties() {
List<TaskProperty> newList = new ArrayList<TaskProperty>();
for (String prop : properties) {
boolean isAvailable = false;
for (TaskProperty property : taskPropertyList) {
if (prop.equals(property.getPropertyName())) {
isAvailable = true;
break;
}
}
if (!isAvailable) {
TaskProperty tskProperty = createProperty(prop);
newList.add(tskProperty);
}
}
return newList;
}
/**
* Creates a new property
*
* @param name
* @return
*/
private TaskProperty createProperty(String name) {
TaskProperty newPrp = EsbFactory.eINSTANCE.createTaskProperty();
newPrp.setPropertyName(name);
newPrp.setPropertyType(TaskPropertyType.LITERAL);
newPrp.setPropertyValue(null);
return newPrp;
}
/**
* Enable and disable the New button based on the property values
*/
private void enableNewButton() {
if (taskPropertyList.size() > 0) {
String[] checkProperties = new String[taskPropertyList.size()];
for (int i = 0; i < taskPropertyList.size(); i++) {
checkProperties[i] = String.valueOf(taskPropertyList.get(i).getPropertyName());
}
Arrays.sort(checkProperties);
Arrays.sort(properties);
if (!Arrays.equals(checkProperties, properties)) {
// Enable New button if required properties are missing
newPropertyButton.setEnabled(true);
newButtonSelected = true;
} else {
newPropertyButton.setEnabled(false);
}
} else {
// Disable the button when adding properties for the first time
newPropertyButton.setEnabled(false);
}
}
protected void okPressed() {
for (TableItem item : propertyTable.getItems()) {
TaskProperty param = null;
if (item.getData() == null) {
param = EsbFactory.eINSTANCE.createTaskProperty();
param.setPropertyName(item.getText(0));
if (item.getText(1).equals(TaskPropertyType.LITERAL.toString())) {
param.setPropertyValue(item.getText(2));
param.setPropertyType(TaskPropertyType.LITERAL);
}
if (item.getText(1).equals(TaskPropertyType.XML.toString())) {
param.setPropertyValue(item.getText(2));
param.setPropertyType(TaskPropertyType.XML);
}
} else {
param = (TaskProperty) item.getData();
param.setPropertyName(item.getText(0));
if (item.getText(1).equals(TaskPropertyType.LITERAL.toString())) {
param.setPropertyValue(item.getText(2));
param.setPropertyType(TaskPropertyType.LITERAL);
}
if (item.getText(1).equals(TaskPropertyType.XML.toString())) {
param.setPropertyValue(item.getText(2));
param.setPropertyType(TaskPropertyType.XML);
}
}
for (TaskProperty propertyItem : taskPropertyList) {
// When updating the existing properties, remove the old
// property
if (propertyItem.getPropertyName().equals(param.getPropertyName())) {
taskPropertyList.remove(propertyItem);
break;
}
}
taskPropertyList.add(param);
}
setTaskPropertyList(taskPropertyList);
finalizeDefaultMessageInjecttorTask();
super.okPressed();
}
public void setTaskPropertyList(List<TaskProperty> taskPropertyList) {
this.taskPropertyList = taskPropertyList;
}
public List<TaskProperty> getTaskPropertyList() {
return taskPropertyList;
}
protected void cancelPressed() {
finalizeDefaultMessageInjecttorTask();
super.cancelPressed();
}
private TableItem bindPram(String value) {
TableItem item = new TableItem(propertyTable, SWT.NONE);
item.setText(new String[] { value, LITERAL, null });
return item;
}
private TableItem bindPram(TaskProperty param) {
TableItem item = new TableItem(propertyTable, SWT.NONE);
item.setText(new String[] { param.getPropertyName(), param.getPropertyType().getLiteral(),
param.getPropertyValue() });
item.setData(param);
return item;
}
private void unbindParam(int itemIndex) {
TableItem item = propertyTable.getItem(itemIndex);
TaskProperty param = (TaskProperty) item.getData();
removeTaskProperty(param);
propertyTable.remove(propertyTable.indexOf(item));
}
private void removeTaskProperty(TaskProperty param) {
if (param != null) {
for (TaskProperty propertyItem : taskPropertyList) {
if (propertyItem.getPropertyName().equals(param.getPropertyName())) {
taskPropertyList.remove(propertyItem);
break;
}
}
}
}
private void setupTableEditor(final Table table) {
final TableEditor cellEditor = new TableEditor(table);
cellEditor.grabHorizontal = true;
cellEditor.minimumWidth = 50;
table.addMouseListener(new MouseAdapter() {
/**
* Setup a new cell editor control at double click event.
*/
public void mouseDoubleClick(MouseEvent e) {
// Dispose the old editor control (if one is setup).
Control oldEditorControl = cellEditor.getEditor();
if (null != oldEditorControl)
oldEditorControl.dispose();
// Mouse location.
Point mouseLocation = new Point(e.x, e.y);
// Grab the selected row.
TableItem item = (TableItem) table.getItem(mouseLocation);
if (null == item)
return;
// Determine which column was selected.
int selectedColumn = -1;
for (int i = 0, n = table.getColumnCount(); i < n; i++) {
if (item.getBounds(i).contains(mouseLocation)) {
selectedColumn = i;
break;
}
}
// Setup a new editor control.
if (-1 != selectedColumn) {
// for default message-injector-task don't allow
// to edit the property names
if (selectedColumn == 0 && !newButtonSelected) {
return; // for default message-injector-task don't allow
// to edit the property names
}
Text editorControl = new Text(table, SWT.NONE);
final int editorControlColumn = selectedColumn;
editorControl.setText(item.getText(selectedColumn));
editorControl.addModifyListener(new ModifyListener() {
public void modifyText(ModifyEvent e) {
Text text = (Text) cellEditor.getEditor();
cellEditor.getItem().setText(editorControlColumn, text.getText());
}
});
editorControl.selectAll();
editorControl.setFocus();
cellEditor.setEditor(editorControl, item, selectedColumn);
}
}
/**
* Dispose cell editor control at mouse down (otherwise the control keep showing).
*/
public void mouseDown(MouseEvent e) {
Control oldEditorControl = cellEditor.getEditor();
if (null != oldEditorControl)
oldEditorControl.dispose();
}
});
}
private void editItem(final TableItem item) {
propertyTypeEditor = initTableEditor(propertyTypeEditor, item.getParent());
cmbPropertyType = new Combo(item.getParent(), SWT.READ_ONLY);
cmbPropertyType.setItems(new String[] { TaskPropertyType.LITERAL.toString(), TaskPropertyType.XML.toString() });
cmbPropertyType.setText(item.getText(1));
propertyTypeEditor.setEditor(cmbPropertyType, item, 1);
item.getParent().redraw();
item.getParent().layout();
cmbPropertyType.addListener(SWT.Selection, new Listener() {
public void handleEvent(Event evt) {
item.setText(1, cmbPropertyType.getText());
}
});
}
private TableEditor initTableEditor(TableEditor editor, Table table) {
if (null != editor) {
Control lastCtrl = editor.getEditor();
if (null != lastCtrl) {
lastCtrl.dispose();
}
}
editor = new TableEditor(table);
editor.horizontalAlignment = SWT.LEFT;
editor.grabHorizontal = true;
return editor;
}
private void finalizeDefaultMessageInjecttorTask() {
resultCommand = null;
if (defaultESBtask) {
}
}
}
| apache-2.0 |
dsrbecky/gapid | gapis/memory/types.go | 2297 | // Copyright (C) 2017 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package memory
import "reflect"
var (
tyPointer = reflect.TypeOf((*Pointer)(nil)).Elem()
tyCharTy = reflect.TypeOf((*CharTy)(nil)).Elem()
tyIntTy = reflect.TypeOf((*IntTy)(nil)).Elem()
tyUintTy = reflect.TypeOf((*UintTy)(nil)).Elem()
tySizeTy = reflect.TypeOf((*SizeTy)(nil)).Elem()
)
// Int is a signed integer type.
type Int int64
// IntTy is the interface implemented by types that should be treated as int type.
type IntTy interface {
IsInt()
}
// Dummy function to make Int implement IntTy interface
func (Int) IsInt() {}
// Uint is an unsigned integer type.
type Uint uint64
// UintTy is the interface implemented by types that should be treated as uint type.
type UintTy interface {
IsUint()
}
// Dummy function to make Uint implement UintTy interface
func (Uint) IsUint() {}
// Char is the possibly signed but maybe unsigned C/C++ char.
type Char uint8
// CharTy is the interface implemented by types that should be treated as char type.
type CharTy interface {
IsChar()
}
// Dummy function to make Char implement CharTy interface
func (Char) IsChar() {}
// CharToBytes changes the Char values to their byte[] representation.
func CharToBytes(ϟchars []Char) []byte {
bytes := make([]byte, len(ϟchars))
for i := range ϟchars {
bytes[i] = byte(ϟchars[i])
}
return bytes
}
// Size is a size_t type.
type Size uint64
// SizeTy is the interface implemented by types that should be treated as size_t type.
type SizeTy interface {
IsMemorySize()
}
// Dummy function to make Size implement SizeTy interface
func (Size) IsMemorySize() {}
// IsSize returns true if v is a Size or alias to a Size.
func IsSize(v interface{}) bool {
_, ok := v.(SizeTy)
return ok
}
| apache-2.0 |
romartin/dashbuilder | dashbuilder-client/dashbuilder-common-client/src/test/java/org/dashbuilder/common/client/editor/list/DropDownEditorTest.java | 3893 | /**
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dashbuilder.common.client.editor.list;
import java.util.ArrayList;
import java.util.Collection;
import javax.enterprise.event.Event;
import org.dashbuilder.common.client.event.ValueChangeEvent;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.uberfire.ext.widgets.common.client.dropdown.LiveSearchDropDown;
import static junit.framework.Assert.*;
import static org.mockito.Mockito.*;
@RunWith(MockitoJUnitRunner.class)
public class DropDownEditorTest {
@Mock
DropDownEditor.View view;
@Mock
LiveSearchDropDown liveSearchDropDown;
@Mock
Event<ValueChangeEvent<String>> valueChangeEvent;
DropDownEditor presenter;
Collection<DropDownEditor.Entry> entries = new ArrayList<>();
@Before
public void setup() {
presenter = new DropDownEditor(view, liveSearchDropDown, valueChangeEvent);
entries.add(presenter.newEntry("entry1", "Entry 1"));
entries.add(presenter.newEntry("entry2", "Entry 2"));
presenter.setEntries(entries);
presenter.init();
}
@Test
public void testInit() {
verify(liveSearchDropDown).setSearchEnabled(false);
verify(liveSearchDropDown).setSearchService(any());
assertNull(presenter.getValue());
}
@Test
public void testEntries() {
presenter.getDropDownEntries("", -1, itemList -> {
assertEquals(itemList.size(), 2);
assertEquals(itemList.get(0).getKey(), "entry1");
assertEquals(itemList.get(1).getKey(), "entry2");
});
}
@Test
public void testSelect() {
when(liveSearchDropDown.getSelectedValue()).thenReturn("Entry 1");
presenter.setValue("entry2");
presenter.onEntrySelected();
ArgumentCaptor<ValueChangeEvent> ac = ArgumentCaptor.forClass(ValueChangeEvent.class);
verify(valueChangeEvent).fire(ac.capture());
assertEquals(presenter.getValue(), "entry1");
ValueChangeEvent event = ac.getValue();
assertEquals(event.getValue(), "entry1");
assertEquals(event.getOldValue(), "entry2");
}
@Test
public void testSetValue() {
presenter.setValue("entry2");
verify(liveSearchDropDown).setSelectedItem("entry2", "Entry 2");
assertEquals(presenter.getValue(), "entry2");
presenter.setSelectHint("- select - ");
presenter.setValue(null);
verify(liveSearchDropDown).setSelectedItem("- select - ", "- select - ");
assertNull(presenter.getValue());
reset(liveSearchDropDown);
presenter.clear();
presenter.setValue("entry2");
presenter.setEntries(entries);
verify(liveSearchDropDown).setSelectedItem("entry2", "Entry 2");
reset(liveSearchDropDown);
presenter.clear();
presenter.setEntries(entries);
verify(liveSearchDropDown, never()).setSelectedItem(anyString(), anyString());
}
@Test
public void testClear() {
presenter.setValue("entry2");
presenter.clear();
verify(liveSearchDropDown).clear();;
assertNull(presenter.getValue());
}
}
| apache-2.0 |
lxp521125/weixin | game/more/games/pigu/js/touch.js | 5957 | // Zepto.js
// (c) 2010-2014 Thomas Fuchs
// Zepto.js may be freely distributed under the MIT license.
;(function($){
var touch = {},
touchTimeout, tapTimeout, swipeTimeout, longTapTimeout,
longTapDelay = 750,
gesture
function swipeDirection(x1, x2, y1, y2) {
return Math.abs(x1 - x2) >=
Math.abs(y1 - y2) ? (x1 - x2 > 0 ? 'Left' : 'Right') : (y1 - y2 > 0 ? 'Up' : 'Down')
}
function longTap() {
longTapTimeout = null
if (touch.last) {
touch.el.trigger('longTap')
touch = {}
}
}
function cancelLongTap() {
if (longTapTimeout) clearTimeout(longTapTimeout)
longTapTimeout = null
}
function cancelAll() {
if (touchTimeout) clearTimeout(touchTimeout)
if (tapTimeout) clearTimeout(tapTimeout)
if (swipeTimeout) clearTimeout(swipeTimeout)
if (longTapTimeout) clearTimeout(longTapTimeout)
touchTimeout = tapTimeout = swipeTimeout = longTapTimeout = null
touch = {}
}
function isPrimaryTouch(event){
return (event.pointerType == 'touch' ||
event.pointerType == event.MSPOINTER_TYPE_TOUCH)
&& event.isPrimary
}
function isPointerEventType(e, type){
return (e.type == 'pointer'+type ||
e.type.toLowerCase() == 'mspointer'+type)
}
$(document).ready(function(){
var now, delta, deltaX = 0, deltaY = 0, firstTouch, _isPointerType
if ('MSGesture' in window) {
gesture = new MSGesture()
gesture.target = document.body
}
$(document)
.bind('MSGestureEnd', function(e){
var swipeDirectionFromVelocity =
e.velocityX > 1 ? 'Right' : e.velocityX < -1 ? 'Left' : e.velocityY > 1 ? 'Down' : e.velocityY < -1 ? 'Up' : null;
if (swipeDirectionFromVelocity) {
touch.el.trigger('swipe')
touch.el.trigger('swipe'+ swipeDirectionFromVelocity)
}
})
.on('touchstart MSPointerDown pointerdown', function(e){
if((_isPointerType = isPointerEventType(e, 'down')) &&
!isPrimaryTouch(e)) return
firstTouch = _isPointerType ? e : e.touches[0]
if (e.touches && e.touches.length === 1 && touch.x2) {
// Clear out touch movement data if we have it sticking around
// This can occur if touchcancel doesn't fire due to preventDefault, etc.
touch.x2 = undefined
touch.y2 = undefined
}
now = Date.now()
delta = now - (touch.last || now)
touch.el = $('tagName' in firstTouch.target ?
firstTouch.target : firstTouch.target.parentNode)
touchTimeout && clearTimeout(touchTimeout)
touch.x1 = firstTouch.pageX
touch.y1 = firstTouch.pageY
if (delta > 0 && delta <= 250) touch.isDoubleTap = true
touch.last = now
longTapTimeout = setTimeout(longTap, longTapDelay)
// adds the current touch contact for IE gesture recognition
if (gesture && _isPointerType) gesture.addPointer(e.pointerId);
})
.on('touchmove MSPointerMove pointermove', function(e){
e.preventDefault()
if((_isPointerType = isPointerEventType(e, 'move')) &&
!isPrimaryTouch(e)) return
firstTouch = _isPointerType ? e : e.touches[0]
cancelLongTap()
touch.x2 = firstTouch.pageX
touch.y2 = firstTouch.pageY
deltaX += Math.abs(touch.x1 - touch.x2)
deltaY += Math.abs(touch.y1 - touch.y2)
})
.on('touchend MSPointerUp pointerup', function(e){
if((_isPointerType = isPointerEventType(e, 'up')) &&
!isPrimaryTouch(e)) return
cancelLongTap()
// swipe
if ((touch.x2 && Math.abs(touch.x1 - touch.x2) > 30) ||
(touch.y2 && Math.abs(touch.y1 - touch.y2) > 30))
swipeTimeout = setTimeout(function() {
touch.el.trigger('swipe')
touch.el.trigger('swipe' + (swipeDirection(touch.x1, touch.x2, touch.y1, touch.y2)))
touch = {}
}, 0)
// normal tap
else if ('last' in touch)
// don't fire tap when delta position changed by more than 30 pixels,
// for instance when moving to a point and back to origin
if (deltaX < 30 && deltaY < 30) {
// delay by one tick so we can cancel the 'tap' event if 'scroll' fires
// ('tap' fires before 'scroll')
tapTimeout = setTimeout(function() {
// trigger universal 'tap' with the option to cancelTouch()
// (cancelTouch cancels processing of single vs double taps for faster 'tap' response)
var event = $.Event('tap')
event.cancelTouch = cancelAll
touch.el.trigger(event)
// trigger double tap immediately
if (touch.isDoubleTap) {
if (touch.el) touch.el.trigger('doubleTap')
touch = {}
}
// trigger single tap after 250ms of inactivity
else {
touchTimeout = setTimeout(function(){
touchTimeout = null
if (touch.el) touch.el.trigger('singleTap')
touch = {}
}, 250)
}
}, 0)
} else {
touch = {}
}
deltaX = deltaY = 0
})
// when the browser window loses focus,
// for example when a modal dialog is shown,
// cancel all ongoing events
.on('touchcancel MSPointerCancel pointercancel', cancelAll)
// scrolling the window indicates intention of the user
// to scroll, not tap or swipe, so cancel all ongoing events
$(window).on('scroll', cancelAll)
})
;['swipe', 'swipeLeft', 'swipeRight', 'swipeUp', 'swipeDown',
'doubleTap', 'tap', 'singleTap', 'longTap'].forEach(function(eventName){
$.fn[eventName] = function(callback){ return this.on(eventName, callback) }
})
})(Zepto)/* |xGv00|6081124c0a251261067f01c901a19781 */ | apache-2.0 |