repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
gigaal/calendarwrapper | src/org/joda/time/IllegalFieldValueException.java | 213 | package org.joda.time;
public class IllegalFieldValueException extends IllegalArgumentException {
/** Serialization lock. */
private static final long serialVersionUID = 6305711765985447737L;
}
| apache-2.0 |
open-pay/openpay-dotnet | OpenpayTest/MerchantServiceTest.cs | 982 | using Microsoft.VisualStudio.TestTools.UnitTesting;
using Openpay;
using Openpay.Entities;
using Openpay.Entities.Request;
using System;
using System.Collections.Generic;
namespace OpenpayTest
{
[TestClass]
public class MerchantServiceTest
{
[TestMethod]
public void TestMerchant_Get()
{
OpenpayAPI openpayAPI = new OpenpayAPI(Constants.API_KEY, Constants.MERCHANT_ID);
Merchant merchant = openpayAPI.MerchantService.Get();
Assert.IsNotNull(merchant);
Assert.IsNotNull(merchant.Name);
Assert.IsNotNull(merchant.Email);
Assert.IsNotNull(merchant.CreationDate);
Assert.IsNotNull(merchant.Status);
Assert.IsNull(merchant.CLABE);
Assert.IsNotNull(merchant.Phone);
Assert.IsTrue(merchant.Balance.CompareTo(1000.00M) > 0);
Assert.IsTrue(merchant.AvailableFunds.CompareTo(1000.00M) > 0);
}
}
}
| apache-2.0 |
Talend/ui | packages/components/src/VirtualizedList/index.js | 1880 | import Content from './Content.component';
import VirtualizedList from './VirtualizedList.component';
import { cellDictionary, headerDictionary } from './utils/dictionary';
import { ActionsColumn } from './CellActions';
import { BadgeColumn } from './CellBadge';
import { CheckboxColumn } from './CellCheckbox';
import { DatetimeColumn } from './CellDatetime';
import { TextIconColumn } from './CellTextIcon';
import { TitleColumn } from './CellTitle';
import { BooleanColumn } from './CellBoolean';
import { LabelColumn } from './CellLabel';
import { IconTextColumn } from './CellIconText';
import { QualityBarColumn } from './CellQualityBar';
import { MappedDataColumn } from './CellMappedData';
import HeaderResizable from './HeaderResizable';
import RowCollapsiblePanel from './RowCollapsiblePanel';
import { listTypes, SORT_BY, SELECTION_MODE } from './utils/constants';
import * as rowUtils from './utils/gridrow';
// For compatibility
VirtualizedList.Content = Content;
VirtualizedList.Actions = ActionsColumn;
VirtualizedList.Badge = BadgeColumn;
VirtualizedList.Checkbox = CheckboxColumn;
VirtualizedList.Datetime = DatetimeColumn;
VirtualizedList.Text = Content;
VirtualizedList.TextIcon = TextIconColumn;
VirtualizedList.Title = TitleColumn;
VirtualizedList.Boolean = BooleanColumn;
VirtualizedList.Label = LabelColumn;
VirtualizedList.IconText = IconTextColumn;
VirtualizedList.QualityBar = QualityBarColumn;
VirtualizedList.MappedData = MappedDataColumn;
VirtualizedList.RowCollapsiblePanel = RowCollapsiblePanel;
VirtualizedList.HeaderResizable = HeaderResizable;
VirtualizedList.cellDictionary = cellDictionary;
VirtualizedList.headerDictionary = headerDictionary;
VirtualizedList.rowUtils = rowUtils;
VirtualizedList.LIST_TYPES = listTypes;
VirtualizedList.SORT_BY = SORT_BY;
VirtualizedList.SELECTION_MODE = SELECTION_MODE;
export default VirtualizedList;
| apache-2.0 |
javabilities/kafka-demo | consumer/src/main/java/com/javabilities/consumer/service/KafkaService.java | 6681 | package com.javabilities.consumer.service;
import com.javabilities.consumer.config.KafkaProperties;
import com.javabilities.consumer.config.ZookeeperProperties;
import kafka.admin.AdminUtils;
import kafka.common.TopicExistsException;
import kafka.utils.ZKStringSerializer$;
import org.I0Itec.zkclient.ZkClient;
import org.apache.kafka.common.serialization.StringSerializer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.SmartLifecycle;
import org.springframework.context.annotation.Bean;
import org.springframework.expression.common.LiteralExpression;
import org.springframework.integration.annotation.ServiceActivator;
import org.springframework.integration.channel.QueueChannel;
import org.springframework.integration.kafka.core.*;
import org.springframework.integration.kafka.inbound.KafkaMessageDrivenChannelAdapter;
import org.springframework.integration.kafka.listener.KafkaMessageListenerContainer;
import org.springframework.integration.kafka.listener.KafkaTopicOffsetManager;
import org.springframework.integration.kafka.listener.OffsetManager;
import org.springframework.integration.kafka.outbound.KafkaProducerMessageHandler;
import org.springframework.integration.kafka.serializer.common.StringDecoder;
import org.springframework.integration.kafka.support.*;
import org.springframework.messaging.MessageHandler;
import org.springframework.messaging.PollableChannel;
import org.springframework.stereotype.Service;
import java.util.Collections;
import java.util.Map;
import java.util.Properties;
@Service
public class KafkaService {
private final Logger logger = LoggerFactory.getLogger(KafkaService.class);
@Autowired
ZookeeperProperties zookeeperProperties;
@Autowired
KafkaProperties kafkaProperties;
@Value("${consumer.topic}")
private String topic;
@ServiceActivator(inputChannel = "toKafka")
@Bean
public MessageHandler handler() throws Exception {
KafkaProducerMessageHandler handler = new KafkaProducerMessageHandler(producerContext());
handler.setTopicExpression(new LiteralExpression(this.topic));
// handler.setMessageKeyExpression(new LiteralExpression(this.messageKey));
return handler;
}
@Bean
public ConnectionFactory kafkaBrokerConnectionFactory() throws Exception {
return new DefaultConnectionFactory(kafkaConfiguration());
}
@Bean
public Configuration kafkaConfiguration() {
BrokerAddressListConfiguration configuration = new BrokerAddressListConfiguration(
BrokerAddress.fromAddress(getBrokerAddress()));
configuration.setSocketTimeout(500);
return configuration;
}
@Bean
public KafkaProducerContext producerContext() throws Exception {
KafkaProducerContext kafkaProducerContext = new KafkaProducerContext();
ProducerMetadata<String, String> producerMetadata = new ProducerMetadata<>(this.topic, String.class,
String.class, new StringSerializer(), new StringSerializer());
Properties props = new Properties();
props.put("linger.ms", "1000");
ProducerFactoryBean<String, String> producer = new ProducerFactoryBean<>(producerMetadata, getBrokerAddress(), props);
ProducerConfiguration<String, String> config = new ProducerConfiguration<>(producerMetadata, producer.getObject());
Map<String, ProducerConfiguration<?, ?>> producerConfigurationMap =
Collections.<String, ProducerConfiguration<?, ?>>singletonMap(this.topic, config);
kafkaProducerContext.setProducerConfigurations(producerConfigurationMap);
return kafkaProducerContext;
}
@Bean
public OffsetManager offsetManager() {
return new KafkaTopicOffsetManager(new ZookeeperConnect(getZookeeperConnect()), "si-offsets");
}
@Bean
public KafkaMessageListenerContainer container(OffsetManager offsetManager) throws Exception {
final KafkaMessageListenerContainer kafkaMessageListenerContainer = new KafkaMessageListenerContainer(
kafkaBrokerConnectionFactory(), new Partition(this.topic, 0));
kafkaMessageListenerContainer.setOffsetManager(offsetManager);
kafkaMessageListenerContainer.setMaxFetch(100);
kafkaMessageListenerContainer.setConcurrency(1);
return kafkaMessageListenerContainer;
}
@Bean
public KafkaMessageDrivenChannelAdapter adapter(KafkaMessageListenerContainer container) {
KafkaMessageDrivenChannelAdapter kafkaMessageDrivenChannelAdapter = new KafkaMessageDrivenChannelAdapter(container);
StringDecoder decoder = new StringDecoder();
kafkaMessageDrivenChannelAdapter.setKeyDecoder(decoder);
kafkaMessageDrivenChannelAdapter.setPayloadDecoder(decoder);
kafkaMessageDrivenChannelAdapter.setOutputChannel(received());
return kafkaMessageDrivenChannelAdapter;
}
@Bean
public PollableChannel received() {
return new QueueChannel();
}
@Bean
public TopicCreator topicCreator() {
return new TopicCreator(this.topic, getZookeeperConnect());
}
public static class TopicCreator implements SmartLifecycle {
private final String topic;
private final String zkConnect;
private volatile boolean running;
public TopicCreator(String topic, String zkConnect) {
this.topic = topic;
this.zkConnect = zkConnect;
}
@Override
public void start() {
ZkClient client = new ZkClient(this.zkConnect, 10000, 10000, ZKStringSerializer$.MODULE$);
try {
AdminUtils.createTopic(client, this.topic, 1, 1, new Properties());
}
catch (TopicExistsException e) {
}
this.running = true;
}
@Override
public void stop() {
}
@Override
public boolean isRunning() {
return this.running;
}
@Override
public int getPhase() {
return Integer.MIN_VALUE;
}
@Override
public boolean isAutoStartup() {
return true;
}
@Override
public void stop(Runnable callback) {
callback.run();
}
}
public String getBrokerAddress() {
return kafkaProperties.getHost() + ":" + kafkaProperties.getPort();
}
public String getZookeeperConnect() {
return zookeeperProperties.getHost() + ":" + zookeeperProperties.getPort();
}
}
| apache-2.0 |
agentmilindu/stratos | components/org.apache.stratos.autoscaler/src/main/java/org/apache/stratos/autoscaler/services/impl/AutoscalerServiceImpl.java | 53740 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.stratos.autoscaler.services.impl;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.stratos.autoscaler.algorithms.networkpartition.NetworkPartitionAlgorithmContext;
import org.apache.stratos.autoscaler.applications.ApplicationHolder;
import org.apache.stratos.autoscaler.applications.parser.ApplicationParser;
import org.apache.stratos.autoscaler.applications.parser.DefaultApplicationParser;
import org.apache.stratos.autoscaler.applications.pojo.*;
import org.apache.stratos.autoscaler.applications.topic.ApplicationBuilder;
import org.apache.stratos.autoscaler.context.AutoscalerContext;
import org.apache.stratos.autoscaler.context.InstanceContext;
import org.apache.stratos.autoscaler.context.cluster.ClusterInstanceContext;
import org.apache.stratos.autoscaler.context.partition.ClusterLevelPartitionContext;
import org.apache.stratos.autoscaler.context.partition.network.ClusterLevelNetworkPartitionContext;
import org.apache.stratos.autoscaler.exception.AutoScalerException;
import org.apache.stratos.autoscaler.exception.AutoScalingPolicyAlreadyExistException;
import org.apache.stratos.autoscaler.exception.CloudControllerConnectionException;
import org.apache.stratos.autoscaler.exception.InvalidArgumentException;
import org.apache.stratos.autoscaler.exception.application.ApplicationDefinitionException;
import org.apache.stratos.autoscaler.exception.application.InvalidApplicationPolicyException;
import org.apache.stratos.autoscaler.exception.application.InvalidServiceGroupException;
import org.apache.stratos.autoscaler.exception.policy.*;
import org.apache.stratos.autoscaler.monitor.cluster.ClusterMonitor;
import org.apache.stratos.autoscaler.monitor.component.ApplicationMonitor;
import org.apache.stratos.autoscaler.pojo.Dependencies;
import org.apache.stratos.autoscaler.pojo.ServiceGroup;
import org.apache.stratos.autoscaler.pojo.policy.PolicyManager;
import org.apache.stratos.autoscaler.pojo.policy.autoscale.AutoscalePolicy;
import org.apache.stratos.autoscaler.pojo.policy.deployment.ApplicationPolicy;
import org.apache.stratos.autoscaler.pojo.policy.deployment.DeploymentPolicy;
import org.apache.stratos.autoscaler.registry.RegistryManager;
import org.apache.stratos.autoscaler.services.AutoscalerService;
import org.apache.stratos.autoscaler.util.AutoscalerUtil;
import org.apache.stratos.cloud.controller.stub.CloudControllerServiceInvalidCartridgeTypeExceptionException;
import org.apache.stratos.cloud.controller.stub.CloudControllerServiceInvalidPartitionExceptionException;
import org.apache.stratos.cloud.controller.stub.domain.MemberContext;
import org.apache.stratos.common.Properties;
import org.apache.stratos.common.client.CloudControllerServiceClient;
import org.apache.stratos.common.client.StratosManagerServiceClient;
import org.apache.stratos.common.constants.StratosConstants;
import org.apache.stratos.common.partition.NetworkPartition;
import org.apache.stratos.common.partition.Partition;
import org.apache.stratos.common.util.CommonUtil;
import org.apache.stratos.manager.service.stub.domain.application.signup.ApplicationSignUp;
import org.apache.stratos.manager.service.stub.domain.application.signup.ArtifactRepository;
import org.apache.stratos.messaging.domain.application.Application;
import org.apache.stratos.messaging.domain.application.ClusterDataHolder;
import org.apache.stratos.messaging.domain.topology.Cluster;
import org.apache.stratos.messaging.domain.topology.Member;
import org.apache.stratos.messaging.message.receiver.application.ApplicationManager;
import org.apache.stratos.messaging.message.receiver.topology.TopologyManager;
import org.wso2.carbon.registry.api.RegistryException;
import java.rmi.RemoteException;
import java.text.MessageFormat;
import java.util.*;
/**
* Auto Scaler Service API is responsible getting Partitions and Policies.
*/
public class AutoscalerServiceImpl implements AutoscalerService {
private static final Log log = LogFactory.getLog(AutoscalerServiceImpl.class);
public AutoscalePolicy[] getAutoScalingPolicies() {
return PolicyManager.getInstance().getAutoscalePolicyList();
}
@Override
public boolean addAutoScalingPolicy(AutoscalePolicy autoscalePolicy) throws AutoScalingPolicyAlreadyExistException {
return PolicyManager.getInstance().addAutoscalePolicy(autoscalePolicy);
}
@Override
public boolean updateAutoScalingPolicy(AutoscalePolicy autoscalePolicy) throws InvalidPolicyException {
return PolicyManager.getInstance().updateAutoscalePolicy(autoscalePolicy);
}
@Override
public boolean removeAutoScalingPolicy(String autoscalePolicyId) throws UnremovablePolicyException,
PolicyDoesNotExistException {
if (removableAutoScalerPolicy(autoscalePolicyId)) {
return PolicyManager.getInstance().removeAutoscalePolicy(autoscalePolicyId);
} else {
throw new UnremovablePolicyException("This autoscaler policy cannot be removed, since it is used in " +
"applications.");
}
}
/**
* Validate the Auto Scalar policy removal
*
* @param autoscalePolicyId Auto Scalar policy id boolean
* @return
*/
private boolean removableAutoScalerPolicy(String autoscalePolicyId) {
boolean canRemove = true;
Collection<ApplicationContext> appContexts = AutoscalerContext.getInstance().getApplicationContexts();
for (ApplicationContext app : appContexts) {
CartridgeContext[] cartrideContexts = app.getComponents().getCartridgeContexts();
for (CartridgeContext cartridgeContext : cartrideContexts) {
SubscribableInfoContext subscribableInfoContexts = cartridgeContext.getSubscribableInfoContext();
if (subscribableInfoContexts.getAutoscalingPolicy().equals(autoscalePolicyId)) {
canRemove = false;
}
}
}
return canRemove;
}
@Override
public AutoscalePolicy getAutoscalingPolicy(String autoscalingPolicyId) {
return PolicyManager.getInstance().getAutoscalePolicy(autoscalingPolicyId);
}
@Override
public void addApplication(ApplicationContext applicationContext)
throws ApplicationDefinitionException {
if (log.isInfoEnabled()) {
log.info(String.format("Adding application: [application-id] %s",
applicationContext.getApplicationId()));
}
ApplicationParser applicationParser = new DefaultApplicationParser();
Application application = applicationParser.parse(applicationContext);
ApplicationHolder.persistApplication(application);
List<ApplicationClusterContext> applicationClusterContexts = applicationParser.getApplicationClusterContexts();
ApplicationClusterContext[] applicationClusterContextsArray = applicationClusterContexts.toArray(
new ApplicationClusterContext[applicationClusterContexts.size()]);
applicationContext.getComponents().setApplicationClusterContexts(applicationClusterContextsArray);
applicationContext.setStatus(ApplicationContext.STATUS_CREATED);
AutoscalerContext.getInstance().addApplicationContext(applicationContext);
if (log.isInfoEnabled()) {
log.info(String.format("Application added successfully: [application-id] %s",
applicationContext.getApplicationId()));
}
}
@Override
public void updateApplication(ApplicationContext applicationContext)
throws ApplicationDefinitionException {
String applicationId = applicationContext.getApplicationId();
if (log.isInfoEnabled()) {
log.info(String.format("Updating application: [application-id] %s",
applicationContext.getApplicationId()));
}
if (AutoscalerContext.getInstance().getApplicationContext(applicationId) == null) {
String msg = "Application is not found as ApplicationContext. Please add application before updating it";
log.error(msg);
throw new ApplicationDefinitionException(msg);
}
if (ApplicationHolder.getApplications().getApplication(applicationId) == null) {
String msg = "Application is not found as Application. Please add application before updating it";
log.error(msg);
throw new ApplicationDefinitionException(msg);
}
ApplicationParser applicationParser = new DefaultApplicationParser();
Application application = applicationParser.parse(applicationContext);
//Need to update the application
AutoscalerUtil.getInstance().updateApplicationsTopology(application);
//Update the clusterMonitors
AutoscalerUtil.getInstance().updateClusterMonitor(application);
List<ApplicationClusterContext> applicationClusterContexts = applicationParser.getApplicationClusterContexts();
ApplicationClusterContext[] applicationClusterContextsArray = applicationClusterContexts.toArray(
new ApplicationClusterContext[applicationClusterContexts.size()]);
applicationContext.getComponents().setApplicationClusterContexts(applicationClusterContextsArray);
ApplicationContext existingApplicationContext = AutoscalerContext.getInstance().
getApplicationContext(applicationId);
applicationContext.setStatus(existingApplicationContext.getStatus());
//updating the applicationContext
AutoscalerContext.getInstance().updateApplicationContext(applicationContext);
if (log.isInfoEnabled()) {
log.info(String.format("Application added successfully: [application-id] %s",
applicationId));
}
}
@Override
public ApplicationContext getApplication(String applicationId) {
return AutoscalerContext.getInstance().getApplicationContext(applicationId);
}
@Override
public boolean existApplication(String applicationId) {
return AutoscalerContext.getInstance().getApplicationContext(applicationId) != null;
}
@Override
public ApplicationContext[] getApplications() {
return AutoscalerContext.getInstance().getApplicationContexts().
toArray(new ApplicationContext[AutoscalerContext.getInstance().getApplicationContexts().size()]);
}
@Override
public boolean deployApplication(String applicationId, String applicationPolicyId) throws ApplicationDefinitionException {
try {
Application application = ApplicationHolder.getApplications().getApplication(applicationId);
if (application == null) {
throw new RuntimeException("Application not found: " + applicationId);
}
ApplicationContext applicationContext = RegistryManager.getInstance().getApplicationContext(applicationId);
if (applicationContext == null) {
throw new RuntimeException("Application context not found: " + applicationId);
}
// Create application clusters in cloud controller and send application created event
ApplicationBuilder.handleApplicationCreatedEvent(application, applicationContext.getComponents().getApplicationClusterContexts());
// validating application policy against the application
AutoscalerUtil.validateApplicationPolicyAgainstApplication(applicationId, applicationPolicyId);
// Setting application policy id in application object
try {
ApplicationHolder.acquireWriteLock();
application = ApplicationHolder.getApplications().getApplication(applicationId);
application.setApplicationPolicyId(applicationPolicyId);
ApplicationHolder.persistApplication(application);
} finally {
ApplicationHolder.releaseWriteLock();
}
// adding network partition algorithm context to registry
ApplicationPolicy applicationPolicy = PolicyManager.getInstance().getApplicationPolicy(applicationPolicyId);
NetworkPartitionAlgorithmContext algorithmContext = new NetworkPartitionAlgorithmContext(applicationId, applicationPolicy, 0);
AutoscalerContext.getInstance().addNetworkPartitionAlgorithmContext(algorithmContext);
if (!applicationContext.isMultiTenant()) {
// Add application signup for single tenant applications
addApplicationSignUp(applicationContext, application.getKey(), findApplicationClusterIds(application));
}
applicationContext.setStatus(ApplicationContext.STATUS_DEPLOYED);
AutoscalerContext.getInstance().updateApplicationContext(applicationContext);
log.info("Waiting for application clusters to be created: [application] " + applicationId);
return true;
} catch (Exception e) {
ApplicationContext applicationContext = RegistryManager.getInstance().getApplicationContext(applicationId);
if (applicationContext != null) {
// Revert application status
applicationContext.setStatus(ApplicationContext.STATUS_CREATED);
AutoscalerContext.getInstance().updateApplicationContext(applicationContext);
}
String message = "Application deployment failed";
log.error(message, e);
throw new RuntimeException(message, e);
}
}
/**
* Find application cluster ids.
*
* @param application
* @return
*/
private List<String> findApplicationClusterIds(Application application) {
List<String> clusterIds = new ArrayList<String>();
for (ClusterDataHolder clusterDataHolder : application.getClusterDataRecursively()) {
clusterIds.add(clusterDataHolder.getClusterId());
}
return clusterIds;
}
/**
* Add application signup.
*
* @param applicationContext
* @param applicationKey
* @param clusterIds
*/
private void addApplicationSignUp(ApplicationContext applicationContext, String applicationKey,
List<String> clusterIds) {
try {
if (log.isInfoEnabled()) {
log.info(String.format("Adding application signup: [application-id] %s",
applicationContext.getApplicationId()));
}
ComponentContext components = applicationContext.getComponents();
if (components != null) {
ApplicationSignUp applicationSignUp = new ApplicationSignUp();
applicationSignUp.setApplicationId(applicationContext.getApplicationId());
applicationSignUp.setTenantId(applicationContext.getTenantId());
String[] clusterIdsArray = clusterIds.toArray(new String[clusterIds.size()]);
applicationSignUp.setClusterIds(clusterIdsArray);
List<ArtifactRepository> artifactRepositoryList = new ArrayList<ArtifactRepository>();
CartridgeContext[] cartridgeContexts = components.getCartridgeContexts();
if (cartridgeContexts != null) {
updateArtifactRepositoryList(artifactRepositoryList, cartridgeContexts);
}
if (components.getGroupContexts() != null) {
CartridgeContext[] cartridgeContextsOfGroups = getCartridgeContextsOfGroupsRecursively(
components.getGroupContexts());
if (cartridgeContextsOfGroups != null) {
updateArtifactRepositoryList(artifactRepositoryList, cartridgeContextsOfGroups);
}
}
ArtifactRepository[] artifactRepositoryArray = artifactRepositoryList.toArray(
new ArtifactRepository[artifactRepositoryList.size()]);
applicationSignUp.setArtifactRepositories(artifactRepositoryArray);
// Encrypt artifact repository passwords
encryptRepositoryPasswords(applicationSignUp, applicationKey);
StratosManagerServiceClient serviceClient = StratosManagerServiceClient.getInstance();
serviceClient.addApplicationSignUp(applicationSignUp);
if (log.isInfoEnabled()) {
log.info(String.format("Application signup added successfully: [application-id] %s",
applicationContext.getApplicationId()));
}
}
} catch (Exception e) {
String message = "Could not add application signup";
log.error(message, e);
throw new RuntimeException(message, e);
}
}
private CartridgeContext[] getCartridgeContextsOfGroupsRecursively(GroupContext[] passedGroupContexts) {
List<CartridgeContext> cartridgeContextsList = new ArrayList<CartridgeContext>();
for (GroupContext groupContext : passedGroupContexts) {
if (groupContext.getCartridgeContexts() != null) {
for (CartridgeContext cartridgeContext : groupContext.getCartridgeContexts()) {
cartridgeContextsList.add(cartridgeContext);
}
}
if (groupContext.getGroupContexts() != null) {
for (CartridgeContext cartridgeContext : getCartridgeContextsOfGroupsRecursively(groupContext.getGroupContexts())) {
cartridgeContextsList.add(cartridgeContext);
}
}
}
return cartridgeContextsList.toArray(new CartridgeContext[0]);
}
private void removeApplicationSignUp(ApplicationContext applicationContext) {
try {
if (log.isInfoEnabled()) {
log.info(String.format("Removing application signup: [application-id] %s",
applicationContext.getApplicationId()));
}
StratosManagerServiceClient serviceClient = StratosManagerServiceClient.getInstance();
ApplicationSignUp applicationSignUp[] = serviceClient.getApplicationSignUps(applicationContext.getApplicationId());
if (applicationSignUp != null) {
for (ApplicationSignUp appSignUp : applicationSignUp) {
if (appSignUp != null) {
serviceClient.removeApplicationSignUp(appSignUp.getApplicationId(), appSignUp.getTenantId());
}
}
}
} catch (Exception e) {
String message = "Could not remove application signup(s)";
log.error(message, e);
throw new RuntimeException(message, e);
}
}
/**
* Encrypt artifact repository passwords.
*
* @param applicationSignUp
* @param applicationKey
*/
private void encryptRepositoryPasswords(ApplicationSignUp applicationSignUp, String applicationKey) {
if (applicationSignUp.getArtifactRepositories() != null) {
for (ArtifactRepository artifactRepository : applicationSignUp.getArtifactRepositories()) {
String repoPassword = artifactRepository.getRepoPassword();
if ((artifactRepository != null) && (StringUtils.isNotBlank(repoPassword))) {
String encryptedRepoPassword = CommonUtil.encryptPassword(repoPassword,
applicationKey);
artifactRepository.setRepoPassword(encryptedRepoPassword);
if (log.isDebugEnabled()) {
log.debug(String.format("Artifact repository password encrypted: [application-id] %s " +
"[tenant-id] %d [repo-url] %s", applicationSignUp.getApplicationId(),
applicationSignUp.getTenantId(), artifactRepository.getRepoUrl()));
}
}
}
}
}
private void updateArtifactRepositoryList(List<ArtifactRepository> artifactRepositoryList, CartridgeContext[] cartridgeContexts) {
if (cartridgeContexts == null) {
return;
}
for (CartridgeContext cartridgeContext : cartridgeContexts) {
SubscribableInfoContext subscribableInfoContext = cartridgeContext.getSubscribableInfoContext();
ArtifactRepositoryContext artifactRepositoryContext = subscribableInfoContext.getArtifactRepositoryContext();
if (artifactRepositoryContext != null) {
ArtifactRepository artifactRepository = new ArtifactRepository();
artifactRepository.setCartridgeType(cartridgeContext.getType());
artifactRepository.setAlias(subscribableInfoContext.getAlias());
artifactRepository.setRepoUrl(artifactRepositoryContext.getRepoUrl());
artifactRepository.setPrivateRepo(artifactRepositoryContext.isPrivateRepo());
artifactRepository.setRepoUsername(artifactRepositoryContext.getRepoUsername());
artifactRepository.setRepoPassword(artifactRepositoryContext.getRepoPassword());
artifactRepositoryList.add(artifactRepository);
}
}
}
public void undeployApplication(String applicationId, boolean force) {
AutoscalerContext asCtx = AutoscalerContext.getInstance();
ApplicationMonitor appMonitor = asCtx.getAppMonitor(applicationId);
if (appMonitor == null) {
log.info(String.format("Could not find application monitor for the application %s, hence returning", applicationId));
return;
}
if (!force) {
// Gracefull undeployment flow
if (appMonitor.isTerminating()) {
log.info("Application monitor is already in terminating, graceful undeployment is has already been attempted thus not invoking again");
return;
} else {
log.info(String.format("Gracefully undeploying the application " + applicationId));
undeployApplicationGracefully(applicationId);
}
} else {
// force undeployment flow
if (appMonitor.isTerminating()) {
if (appMonitor.isForce()) {
log.warn("Force undeployment is already in progress, hence not invoking again");
return;
} else {
log.info(String.format("Previous gracefull undeployment is in progress for [application-id] %s , thus terminating instances directly", applicationId));
appMonitor.setForce(true);
terminateAllApplicationMembersForcefully(applicationId);
}
} else {
log.info(String.format("Forcefully undeploying the application " + applicationId));
appMonitor.setForce(true);
undeployApplicationGracefully(applicationId);
}
}
}
private void undeployApplicationGracefully(String applicationId) {
try {
if (log.isInfoEnabled()) {
log.info("Starting to undeploy application: [application-id] " + applicationId);
}
ApplicationContext applicationContext = AutoscalerContext.getInstance().getApplicationContext(applicationId);
Application application = ApplicationHolder.getApplications().getApplication(applicationId);
if ((applicationContext == null) || (application == null)) {
String msg = String.format("Application not found: [application-id] %s", applicationId);
throw new RuntimeException(msg);
}
if (!applicationContext.getStatus().equals(ApplicationContext.STATUS_DEPLOYED)) {
String message = String.format("Application is not deployed: [application-id] %s", applicationId);
log.error(message);
throw new RuntimeException(message);
}
// Remove application signup(s) in stratos manager
removeApplicationSignUp(applicationContext);
// Remove network partition algorithm context
AutoscalerContext.getInstance().removeNetworkPartitionAlgorithmContext(applicationId);
ApplicationBuilder.handleApplicationUnDeployedEvent(applicationId);
if (log.isInfoEnabled()) {
log.info("Application undeployment process started: [application-id] " + applicationId);
}
} catch (Exception e) {
String message = "Could not start application undeployment process: [application-id] " + applicationId;
log.error(message, e);
throw new RuntimeException(message, e);
}
}
@Override
public void deleteApplication(String applicationId) {
try {
ApplicationContext applicationContext = AutoscalerContext.getInstance().getApplicationContext(applicationId);
Application application = ApplicationHolder.getApplications().getApplication(applicationId);
if ((applicationContext == null) || (application == null)) {
String msg = String.format("Application cannot be deleted, application not found: [application-id] %s",
applicationId);
throw new RuntimeException(msg);
}
if (ApplicationContext.STATUS_DEPLOYED.equals(applicationContext.getStatus())) {
String msg = String.format("Application is in deployed state, please undeploy it before deleting: " +
"[application-id] %s", applicationId);
throw new AutoScalerException(msg);
}
if (application.getInstanceContextCount() > 0) {
String message = String.format("Application cannot be deleted, undeployment process is still in " +
"progress: [application-id] %s", applicationId);
log.error(message);
throw new RuntimeException(message);
}
ApplicationBuilder.handleApplicationRemoval(applicationId);
log.info(String.format("Application deleted successfully: [application-id] %s", applicationId));
} catch (Exception e) {
String message = String.format("Could not delete application: [application-id] %s", applicationId);
log.error(message, e);
throw new RuntimeException(message, e);
}
}
public void updateClusterMonitor(String clusterId, Properties properties) throws InvalidArgumentException {
if (log.isDebugEnabled()) {
log.debug(String.format("Updating Cluster monitor [Cluster id] %s ", clusterId));
}
AutoscalerContext asCtx = AutoscalerContext.getInstance();
ClusterMonitor monitor = asCtx.getClusterMonitor(clusterId);
if (monitor != null) {
monitor.handleDynamicUpdates(properties);
} else {
log.debug(String.format("Updating Cluster monitor failed: Cluster monitor [Cluster id] %s not found.",
clusterId));
}
}
public void addServiceGroup(ServiceGroup servicegroup) throws InvalidServiceGroupException {
if (servicegroup == null || StringUtils.isEmpty(servicegroup.getName())) {
String msg = "Cartridge group can not be null service name can not be empty.";
log.error(msg);
throw new IllegalArgumentException(msg);
}
if (log.isInfoEnabled()) {
log.info(String.format("Adding cartridge group: [group-name] %s", servicegroup.getName()));
}
String groupName = servicegroup.getName();
if (RegistryManager.getInstance().serviceGroupExist(groupName)) {
throw new InvalidServiceGroupException("Cartridge group with the name " + groupName + " already exists.");
}
if (log.isDebugEnabled()) {
log.debug(MessageFormat.format("Adding cartridge group {0}", servicegroup.getName()));
}
String[] subGroups = servicegroup.getCartridges();
if (log.isDebugEnabled()) {
log.debug("SubGroups" + Arrays.toString(subGroups));
if (subGroups != null) {
log.debug("subGroups:size" + subGroups.length);
} else {
log.debug("subGroups: are null");
}
}
Dependencies dependencies = servicegroup.getDependencies();
if (log.isDebugEnabled()) {
log.debug("Dependencies" + dependencies);
}
if (dependencies != null) {
String[] startupOrders = dependencies.getStartupOrders();
if (log.isDebugEnabled()) {
log.debug("StartupOrders " + Arrays.toString(startupOrders));
if (startupOrders != null) {
log.debug("StartupOrder:size " + startupOrders.length);
} else {
log.debug("StartupOrder: is null");
}
}
String[] scalingDependents = dependencies.getScalingDependants();
if (log.isDebugEnabled()) {
log.debug("ScalingDependent " + Arrays.toString(scalingDependents));
if (scalingDependents != null) {
log.debug("ScalingDependents:size " + scalingDependents.length);
} else {
log.debug("ScalingDependent: is null");
}
}
}
RegistryManager.getInstance().persistServiceGroup(servicegroup);
if (log.isInfoEnabled()) {
log.info(String.format("Cartridge group successfully added: [group-name] %s", servicegroup.getName()));
}
}
@Override
public void removeServiceGroup(String groupName) {
try {
if (log.isInfoEnabled()) {
log.info(String.format("Starting to remove cartridge group: [group-name] %s", groupName));
}
if (RegistryManager.getInstance().serviceGroupExist(groupName)) {
RegistryManager.getInstance().removeServiceGroup(groupName);
if (log.isInfoEnabled()) {
log.info(String.format("Cartridge group removed: [group-name] %s", groupName));
}
} else {
if (log.isWarnEnabled()) {
log.warn(String.format("Cartridge group not found: [group-name] %s", groupName));
}
}
} catch (org.wso2.carbon.registry.core.exceptions.RegistryException e) {
String message = "Could not remove cartridge group: " + groupName;
log.error(message, e);
throw new RuntimeException(message, e);
}
}
public ServiceGroup getServiceGroup(String name) {
if (StringUtils.isEmpty(name)) {
return null;
}
try {
return RegistryManager.getInstance().getServiceGroup(name);
} catch (Exception e) {
throw new AutoScalerException("Error occurred while retrieving cartridge group", e);
}
}
@Override
public String findClusterId(String applicationId, String alias) {
try {
Application application = ApplicationManager.getApplications().getApplication(applicationId);
if (application != null) {
ClusterDataHolder clusterData = application.getClusterDataHolderRecursivelyByAlias(alias);
if (clusterData != null) {
return clusterData.getClusterId();
}
}
return null;
} catch (Exception e) {
String message = String.format("Could not find cluster id: [application-id] %s [alias] %s",
applicationId, alias);
throw new AutoScalerException(message, e);
}
}
public ServiceGroup[] getServiceGroups() throws AutoScalerException {
return RegistryManager.getInstance().getServiceGroups();
}
public boolean serviceGroupExist(String serviceName) {
return false;
}
public void undeployServiceGroup(String name) throws AutoScalerException {
try {
RegistryManager.getInstance().removeServiceGroup(name);
} catch (RegistryException e) {
throw new AutoScalerException("Error occurred while removing the cartridge groups", e);
}
}
@Override
public String[] getApplicationNetworkPartitions(String applicationId)
throws AutoScalerException {
List<String> networkPartitionIds = AutoscalerUtil.getNetworkPartitionIdsReferedInApplication(applicationId);
if (networkPartitionIds == null) {
return null;
}
return networkPartitionIds.toArray(new String[networkPartitionIds.size()]);
}
@Override
public void addApplicationPolicy(ApplicationPolicy applicationPolicy)
throws RemoteException, InvalidApplicationPolicyException {
// validating application policy
AutoscalerUtil.validateApplicationPolicy(applicationPolicy);
// Add application policy to the registry
PolicyManager.getInstance().addApplicationPolicy(applicationPolicy);
}
@Override
public ApplicationPolicy getApplicationPolicy(String applicationPolicyId) {
return PolicyManager.getInstance().getApplicationPolicy(applicationPolicyId);
}
@Override
public void removeApplicationPolicy(String applicationPolicyId) throws InvalidPolicyException {
PolicyManager.getInstance().removeApplicationPolicy(applicationPolicyId);
}
@Override
public void updateApplicationPolicy(ApplicationPolicy applicationPolicy)
throws InvalidApplicationPolicyException, RemoteException, ApplicatioinPolicyNotExistsException {
if (applicationPolicy == null) {
String msg = "Application policy is null";
log.error(msg);
throw new InvalidApplicationPolicyException(msg);
}
String applicationPolicyId = applicationPolicy.getId();
ApplicationPolicy existingApplicationPolicy = PolicyManager.getInstance().getApplicationPolicy(applicationPolicyId);
if (existingApplicationPolicy == null) {
String msg = String.format("No such application poliicy found [application-policy-id] %s", applicationPolicyId);
log.error(msg);
throw new ApplicatioinPolicyNotExistsException(msg);
}
// validating application policy
AutoscalerUtil.validateApplicationPolicy(applicationPolicy);
// updating application policy
PolicyManager.getInstance().updateApplicationPolicy(applicationPolicy);
}
@Override
public ApplicationPolicy[] getApplicationPolicies() {
return PolicyManager.getInstance().getApplicationPolicies();
}
private void terminateAllApplicationMembersForcefully(String applicationId) {
if (StringUtils.isEmpty(applicationId)) {
throw new IllegalArgumentException("Application Id can not be empty");
}
Application application;
try {
ApplicationManager.acquireReadLockForApplication(applicationId);
application = ApplicationManager.getApplications().getApplication(applicationId);
if (application == null) {
log.warn(String.format("Could not find application, thus no members to be terminated [application-id] %s", applicationId));
return;
}
} finally {
ApplicationManager.releaseReadLockForApplication(applicationId);
}
Set<ClusterDataHolder> allClusters = application.getClusterDataRecursively();
//CloudControllerServiceClient cloudControllerServiceClient = CloudControllerServiceClient.getInstance().ter
for (ClusterDataHolder clusterDataHolder : allClusters) {
String serviceType = clusterDataHolder.getServiceType();
String clusterId = clusterDataHolder.getClusterId();
Cluster cluster;
try {
TopologyManager.acquireReadLockForCluster(serviceType, clusterId);
cluster = TopologyManager.getTopology().getService(serviceType).getCluster(clusterId);
} finally {
TopologyManager.releaseReadLockForCluster(serviceType, clusterId);
}
List<String> memberListToTerminate = new LinkedList<String>();
for (Member member : cluster.getMembers()) {
memberListToTerminate.add(member.getMemberId());
}
for (String memberIdToTerminate : memberListToTerminate) {
try {
log.info(String.format("Terminating member forcefully [member-id] %s of the cluster [cluster-id] %s [application-id] %s", memberIdToTerminate, clusterId, application));
CloudControllerServiceClient.getInstance().terminateInstanceForcefully(memberIdToTerminate);
} catch (Exception e) {
log.error(String.format("Forcefull termination of member %s is failed, but continuing forcefull deletion of other members", memberIdToTerminate));
}
}
}
}
@Override
public void addDeployementPolicy(DeploymentPolicy deploymentPolicy) throws RemoteException,
InvalidDeploymentPolicyException, DeploymentPolicyAlreadyExistsException {
validateDeploymentPolicy(deploymentPolicy);
if (log.isInfoEnabled()) {
log.info("Adding deployment policy: [deployment-policy-id] " + deploymentPolicy.getDeploymentPolicyID());
}
if (log.isDebugEnabled()) {
log.debug("Deployment policy definition: " + deploymentPolicy.toString());
}
String deploymentPolicyID = deploymentPolicy.getDeploymentPolicyID();
if (PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyID) != null) {
String message = "Deployment policy already exists: [deployment-policy-id] " + deploymentPolicyID;
log.error(message);
throw new DeploymentPolicyAlreadyExistsException(message);
}
// Add cartridge to the cloud controller context and persist
PolicyManager.getInstance().addDeploymentPolicy(deploymentPolicy);
// PolicyManager.getInstance().persist();
if (log.isInfoEnabled()) {
log.info("Successfully added deployment policy: [deployment-policy-id] " + deploymentPolicyID);
}
}
private void validateDeploymentPolicy(DeploymentPolicy deploymentPolicy) throws
InvalidDeploymentPolicyException, RemoteException {
// deployment policy can't be null
if (null == deploymentPolicy) {
String msg = "Invalid deployment policy. Cause -> Deployment policy is null";
log.error(msg);
throw new InvalidDeploymentPolicyException(msg);
}
if (log.isInfoEnabled()) {
log.info(String.format("Validating deployment policy %s", deploymentPolicy.toString()));
}
// deployment policy id can't be null or empty
if (null == deploymentPolicy.getDeploymentPolicyID() || deploymentPolicy.getDeploymentPolicyID().isEmpty()) {
String msg = String.format("Invalid deployment policy. Cause -> Invalid deployment policy id [deployment-policy-id] %s",
deploymentPolicy.getDeploymentPolicyID());
log.error(msg);
throw new InvalidDeploymentPolicyException(msg);
}
// deployment policy should contain at least one network partition reference
if (null == deploymentPolicy.getNetworkPartitions() || deploymentPolicy.getNetworkPartitions().length == 0) {
String msg = String.format("Invalid deployment policy - [deployment-policy-id] %s. "
+ "Cause -> Deployment policy doesn't have at least one network partition reference",
deploymentPolicy.getDeploymentPolicyID());
log.error(msg);
throw new InvalidDeploymentPolicyException(msg);
}
// validate each network partition references
for (NetworkPartition networkPartition : deploymentPolicy.getNetworkPartitions()) {
// network partition id can't be null or empty
if (null == networkPartition.getId() || networkPartition.getId().isEmpty()) {
String msg = String.format("Invalid deployment policy - [deployment-policy-id] %s. "
+ "Cause -> Invalid network partition id in network partition references section",
deploymentPolicy.getDeploymentPolicyID());
log.error(msg);
throw new InvalidDeploymentPolicyException(msg);
}
// network partitions should be already added
if (null == CloudControllerServiceClient.getInstance().getNetworkPartition(networkPartition.getId())) {
String msg = String.format("Invalid deployment policy - [deployment-policy-id] %s. "
+ "Cause -> Network partition is not added - [network-partition-id] %s",
deploymentPolicy.getDeploymentPolicyID(), networkPartition.getId());
log.error(msg);
throw new InvalidDeploymentPolicyException(msg);
}
// partition algorithm can't be null or empty
if (null == networkPartition.getPartitionAlgo() || networkPartition.getPartitionAlgo().isEmpty()) {
String msg = String.format("Invalid deployment policy - [deployment-policy-id] %s. "
+ "Cause -> Invalid partition algorithm - [network-partition-id] %s [partition-algo] %s",
deploymentPolicy.getDeploymentPolicyID(), networkPartition.getId(), networkPartition.getPartitionAlgo());
log.error(msg);
throw new InvalidDeploymentPolicyException(msg);
}
// partition algorithm should be either one-after-another or round-robin
if (!StratosConstants.PARTITION_ROUND_ROBIN_ALGORITHM_ID.equals(networkPartition.getPartitionAlgo())
&& !StratosConstants.PARTITION_ONE_AFTER_ANOTHER_ALGORITHM_ID.equals(networkPartition.getPartitionAlgo())) {
String msg = String.format("Invalid deployment policy - [deployment-policy-id] %s. "
+ "Cause -> Invalid partition algorithm - [network-partition-id] %s [partition-algo] %s",
deploymentPolicy.getDeploymentPolicyID(), networkPartition.getId(), networkPartition.getPartitionAlgo());
log.error(msg);
throw new InvalidDeploymentPolicyException(msg);
}
// a network partition reference should contain at least one partition reference
if (null == networkPartition.getPartitions() || networkPartition.getPartitions().length == 0) {
String msg = String.format("Invalid deployment policy - [deployment-policy-id] %s. "
+ "Cause -> Network partition reference doesn't have at lease one partition reference - "
+ "[network-partition-id] %s", deploymentPolicy.getDeploymentPolicyID(), networkPartition.getId());
log.error(msg);
throw new InvalidDeploymentPolicyException(msg);
}
}
}
@Override
public void updateDeploymentPolicy(DeploymentPolicy deploymentPolicy) throws RemoteException,
InvalidDeploymentPolicyException, DeploymentPolicyNotExistsException, InvalidPolicyException, CloudControllerConnectionException {
validateDeploymentPolicy(deploymentPolicy);
if (log.isInfoEnabled()) {
log.info("Updating deployment policy: [deployment-policy-id] " + deploymentPolicy.getDeploymentPolicyID());
}
if (log.isDebugEnabled()) {
log.debug("Updating Deployment policy definition: " + deploymentPolicy.toString());
}
String deploymentPolicyID = deploymentPolicy.getDeploymentPolicyID();
if (PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyID) == null) {
String message = "Deployment policy not exists: [deployment-policy-id] " + deploymentPolicyID;
log.error(message);
throw new DeploymentPolicyNotExistsException(message);
}
// Add cartridge to the cloud controller context and persist
PolicyManager.getInstance().updateDeploymentPolicy(deploymentPolicy);
// PolicyManager.getInstance().persist();
updateClusterMonitors(deploymentPolicy);
if (log.isInfoEnabled()) {
log.info("Successfully updated deployment policy: [deployment-policy-id] " + deploymentPolicyID);
}
}
private void updateClusterMonitors(DeploymentPolicy deploymentPolicy) throws InvalidDeploymentPolicyException,
CloudControllerConnectionException {
for (ClusterMonitor clusterMonitor : AutoscalerContext.getInstance().getClusterMonitors().values()) {
//Following if statement checks the relevant clusters for the updated deployment policy
if (deploymentPolicy.getDeploymentPolicyID().equals(clusterMonitor.getDeploymentPolicyId())) {
for (NetworkPartition networkPartition : deploymentPolicy.getNetworkPartitions()) {
ClusterLevelNetworkPartitionContext clusterLevelNetworkPartitionContext
= clusterMonitor.getClusterContext().getNetworkPartitionCtxt(networkPartition.getId());
try {
addNewPartitionsToClusterMonitor(clusterLevelNetworkPartitionContext, networkPartition,
deploymentPolicy.getDeploymentPolicyID(), clusterMonitor.getClusterContext().getServiceId());
} catch (RemoteException e) {
String message = "Connection to cloud controller failed, Cluster monitor update failed for" +
" [deployment-policy] " + deploymentPolicy.getDeploymentPolicyID();
log.error(message);
throw new CloudControllerConnectionException(message, e);
} catch (CloudControllerServiceInvalidPartitionExceptionException e) {
String message = "Invalid partition, Cluster monitor update failed for [deployment-policy] "
+ deploymentPolicy.getDeploymentPolicyID();
log.error(message);
throw new InvalidDeploymentPolicyException(message, e);
} catch (CloudControllerServiceInvalidCartridgeTypeExceptionException e) {
String message = "Invalid cartridge type, Cluster monitor update failed for [deployment-policy] "
+ deploymentPolicy.getDeploymentPolicyID() + " [cartridge] "
+ clusterMonitor.getClusterContext().getServiceId();
log.error(message);
throw new InvalidDeploymentPolicyException(message, e);
}
removeOldPartitionsFromClusterMonitor(clusterLevelNetworkPartitionContext, networkPartition);
}
}
}
}
private void removeOldPartitionsFromClusterMonitor(ClusterLevelNetworkPartitionContext clusterLevelNetworkPartitionContext,
NetworkPartition networkPartition) {
for (InstanceContext instanceContext : clusterLevelNetworkPartitionContext.getInstanceIdToInstanceContextMap().values()) {
ClusterInstanceContext clusterInstanceContext = (ClusterInstanceContext) instanceContext;
for (ClusterLevelPartitionContext clusterLevelPartitionContext : clusterInstanceContext.getPartitionCtxts()) {
if (null == networkPartition.getPartition(clusterLevelPartitionContext.getPartitionId())) {
//It has found that this partition context which is in cluster monitor is removed in updated policy
clusterLevelPartitionContext.setIsObsoletePartition(true);
Iterator<MemberContext> memberContextIterator = clusterLevelPartitionContext.getActiveMembers().iterator();
while (memberContextIterator.hasNext()) {
clusterLevelPartitionContext.moveActiveMemberToTerminationPendingMembers(
memberContextIterator.next().getMemberId());
}
memberContextIterator = clusterLevelPartitionContext.getPendingMembers().iterator();
while (memberContextIterator.hasNext()) {
clusterLevelPartitionContext.movePendingMemberToObsoleteMembers(
memberContextIterator.next().getMemberId());
}
}
}
}
}
private void addNewPartitionsToClusterMonitor(ClusterLevelNetworkPartitionContext clusterLevelNetworkPartitionContext,
NetworkPartition networkPartition, String deploymentPolicyID,
String cartridgeType) throws RemoteException,
CloudControllerServiceInvalidPartitionExceptionException,
CloudControllerServiceInvalidCartridgeTypeExceptionException {
boolean validationOfNetworkPartitionRequired = false;
for (Partition partition : networkPartition.getPartitions()) {
//Iterating through instances
for (InstanceContext instanceContext : clusterLevelNetworkPartitionContext.getInstanceIdToInstanceContextMap().values()) {
ClusterInstanceContext clusterInstanceContext = (ClusterInstanceContext) instanceContext;
if (null == clusterInstanceContext.getPartitionCtxt(partition.getId())) {
//It has found that this partition which is in deployment policy/network partition is new
ClusterLevelPartitionContext clusterLevelPartitionContext = new ClusterLevelPartitionContext(
partition, networkPartition.getId(), deploymentPolicyID);
validationOfNetworkPartitionRequired = true;
clusterInstanceContext.addPartitionCtxt(clusterLevelPartitionContext);
}
}
}
if (validationOfNetworkPartitionRequired) {
CloudControllerServiceClient.getInstance().validateNetworkPartitionOfDeploymentPolicy(cartridgeType,
clusterLevelNetworkPartitionContext.getId());
}
}
@Override
public void removeDeployementPolicy(String deploymentPolicyID) throws DeploymentPolicyNotExistsException {
if (log.isInfoEnabled()) {
log.info("Removing deployment policy: [deployment-policy_id] " + deploymentPolicyID);
}
if (PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyID) == null) {
String message = "Deployment policy not exists: [deployment-policy-id] " + deploymentPolicyID;
log.error(message);
throw new DeploymentPolicyNotExistsException(message);
}
PolicyManager.getInstance().removeDeploymentPolicy(deploymentPolicyID);
if (log.isInfoEnabled()) {
log.info("Successfully removed deployment policy: [deployment_policy_id] " + deploymentPolicyID);
}
}
@Override
public DeploymentPolicy getDeploymentPolicy(String deploymentPolicyID) {
if (log.isDebugEnabled()) {
log.debug("Getting deployment policy: [deployment-policy_id] " + deploymentPolicyID);
}
return PolicyManager.getInstance().getDeploymentPolicy(deploymentPolicyID);
}
@Override
public DeploymentPolicy[] getDeploymentPolicies() {
try {
Collection<DeploymentPolicy> deploymentPolicies = PolicyManager.getInstance().getDeploymentPolicies();
return deploymentPolicies.toArray(new DeploymentPolicy[deploymentPolicies.size()]);
} catch (Exception e) {
String message = "Could not get deployment policies";
log.error(message);
throw new AutoScalerException(message, e);
}
}
}
| apache-2.0 |
Communote/communote-server | communote/api/src/main/java/com/communote/server/model/client/ClientConstants.java | 856 | package com.communote.server.model.client;
/**
* This class holds constants of attributes and associations of the entity <code>Client</code>.
*
* @author Communote GmbH - <a href="http://www.communote.com/">http://www.communote.com/</a>
*/
public class ClientConstants {
public final static String CLASS_NAME = Client.class.getName();
public final static String CLIENTID = "clientId";
public final static String NAME = "name";
public final static String CLIENTSTATUS = "clientStatus";
public final static String CREATIONVERSION = "creationVersion";
public final static String CREATIONTIME = "creationTime";
public final static String CREATIONREVISION = "creationRevision";
public final static String MOREDATAREQUIRED = "moreDataRequired";
public final static String ID = "id";
} | apache-2.0 |
CORDEA/analysis_of_1000genomes-data | programs/machine_learning/homo_hetero.py | 960 | #!/bin/env python
# encoding:utf-8
#
#
#
__Author__ = "CORDEA"
__date__ = "2014-09-05"
chrList = ["chrX"]
for i in range(1,23):
chrList.append("chr" + str(i))
for chr in chrList:
infile = open("proc/proc_input." + chr + ".vcf", "r")
lines = infile.readlines()
infile.close()
conDict = {}
count = 0
for line in lines:
tmp = [r.rstrip("\r\n") for r in line.split(",")]
if count == 0:
conList = tmp[1:]
else:
for i in range(len(tmp[1:])):
con = [int(r) for r in tmp[1:][i].split(":")]
try:
conDict[conList[i]][0] += con[0]
conDict[conList[i]][1] += con[1]
conDict[conList[i]][2] += con[2]
conDict[conList[i]][3] += con[3]
except:
conDict[conList[i]] = [con[0], con[1], con[2], con[3]]
count += 1
print conDict
| apache-2.0 |
jentfoo/aws-sdk-java | aws-java-sdk-kinesisvideo/src/main/java/com/amazonaws/services/kinesisvideo/model/ListTagsForStreamResult.java | 7169 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.kinesisvideo.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/kinesisvideo-2017-09-30/ListTagsForStream" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListTagsForStreamResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* If you specify this parameter and the result of a <code>ListTags</code> call is truncated, the response includes
* a token that you can use in the next request to fetch the next set of tags.
* </p>
*/
private String nextToken;
/**
* <p>
* A map of tag keys and values associated with the specified stream.
* </p>
*/
private java.util.Map<String, String> tags;
/**
* <p>
* If you specify this parameter and the result of a <code>ListTags</code> call is truncated, the response includes
* a token that you can use in the next request to fetch the next set of tags.
* </p>
*
* @param nextToken
* If you specify this parameter and the result of a <code>ListTags</code> call is truncated, the response
* includes a token that you can use in the next request to fetch the next set of tags.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* If you specify this parameter and the result of a <code>ListTags</code> call is truncated, the response includes
* a token that you can use in the next request to fetch the next set of tags.
* </p>
*
* @return If you specify this parameter and the result of a <code>ListTags</code> call is truncated, the response
* includes a token that you can use in the next request to fetch the next set of tags.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* If you specify this parameter and the result of a <code>ListTags</code> call is truncated, the response includes
* a token that you can use in the next request to fetch the next set of tags.
* </p>
*
* @param nextToken
* If you specify this parameter and the result of a <code>ListTags</code> call is truncated, the response
* includes a token that you can use in the next request to fetch the next set of tags.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForStreamResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* A map of tag keys and values associated with the specified stream.
* </p>
*
* @return A map of tag keys and values associated with the specified stream.
*/
public java.util.Map<String, String> getTags() {
return tags;
}
/**
* <p>
* A map of tag keys and values associated with the specified stream.
* </p>
*
* @param tags
* A map of tag keys and values associated with the specified stream.
*/
public void setTags(java.util.Map<String, String> tags) {
this.tags = tags;
}
/**
* <p>
* A map of tag keys and values associated with the specified stream.
* </p>
*
* @param tags
* A map of tag keys and values associated with the specified stream.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForStreamResult withTags(java.util.Map<String, String> tags) {
setTags(tags);
return this;
}
public ListTagsForStreamResult addTagsEntry(String key, String value) {
if (null == this.tags) {
this.tags = new java.util.HashMap<String, String>();
}
if (this.tags.containsKey(key))
throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided.");
this.tags.put(key, value);
return this;
}
/**
* Removes all the entries added into Tags.
*
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTagsForStreamResult clearTagsEntries() {
this.tags = null;
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListTagsForStreamResult == false)
return false;
ListTagsForStreamResult other = (ListTagsForStreamResult) obj;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public ListTagsForStreamResult clone() {
try {
return (ListTagsForStreamResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| apache-2.0 |
rowantyj/Jabber | droidparts-develop/droidparts-samples/droidparts-sample/src/org/droidparts/sample/DependencyProvider.java | 1558 | /**
* Copyright 2014 Alex Yanchenko
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.droidparts.sample;
import org.droidparts.AbstractDependencyProvider;
import org.droidparts.persist.sql.AbstractDBOpenHelper;
import org.droidparts.sample.db.DBOpenHelper;
import org.droidparts.sample.json.EntrySerializer;
import org.droidparts.util.ui.AbstractDialogFactory;
import android.content.Context;
public class DependencyProvider extends AbstractDependencyProvider {
private final DBOpenHelper dbOpenHelper;
private final EntrySerializer entrySerializer;
public DependencyProvider(Context ctx) {
super(ctx);
dbOpenHelper = new DBOpenHelper(ctx);
entrySerializer = new EntrySerializer(ctx);
}
@Override
public AbstractDBOpenHelper getDBOpenHelper() {
return dbOpenHelper;
}
public EntrySerializer getEntrySerializer() {
// singleton
return entrySerializer;
}
public AbstractDialogFactory getDialogFactory(Context ctx) {
// new instance each time injected
return new AbstractDialogFactory(ctx);
}
}
| apache-2.0 |
XLabs/xlabs.github.io | html/dir_0fa41e00b0d61de812967c86e24504f5.js | 252 | var dir_0fa41e00b0d61de812967c86e24504f5 =
[
[ "Debug", "dir_810c4b35534b0047586dbdbdc390a7cf.html", "dir_810c4b35534b0047586dbdbdc390a7cf" ],
[ "Release", "dir_ad0f63bef6a48e03dbf8615952642cf9.html", "dir_ad0f63bef6a48e03dbf8615952642cf9" ]
]; | apache-2.0 |
spring-projects/spring-framework | spring-webmvc/src/main/java/org/springframework/web/servlet/config/ViewResolversBeanDefinitionParser.java | 8644 | /*
* Copyright 2002-2021 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.web.servlet.config;
import java.util.List;
import org.w3c.dom.Element;
import org.springframework.beans.MutablePropertyValues;
import org.springframework.beans.factory.config.BeanDefinition;
import org.springframework.beans.factory.parsing.BeanComponentDefinition;
import org.springframework.beans.factory.parsing.CompositeComponentDefinition;
import org.springframework.beans.factory.support.ManagedList;
import org.springframework.beans.factory.support.RootBeanDefinition;
import org.springframework.beans.factory.xml.BeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.springframework.core.Ordered;
import org.springframework.util.xml.DomUtils;
import org.springframework.web.servlet.view.BeanNameViewResolver;
import org.springframework.web.servlet.view.ContentNegotiatingViewResolver;
import org.springframework.web.servlet.view.InternalResourceViewResolver;
import org.springframework.web.servlet.view.ViewResolverComposite;
import org.springframework.web.servlet.view.freemarker.FreeMarkerViewResolver;
import org.springframework.web.servlet.view.groovy.GroovyMarkupViewResolver;
import org.springframework.web.servlet.view.script.ScriptTemplateViewResolver;
/**
* Parses the {@code view-resolvers} MVC namespace element and registers
* {@link org.springframework.web.servlet.ViewResolver} bean definitions.
*
* <p>All registered resolvers are wrapped in a single (composite) ViewResolver
* with its order property set to 0 so that other external resolvers may be ordered
* before or after it.
*
* <p>When content negotiation is enabled the order property is set to highest priority
* instead with the ContentNegotiatingViewResolver encapsulating all other registered
* view resolver instances. That way the resolvers registered through the MVC namespace
* form a self-encapsulated resolver chain.
*
* @author Sivaprasad Valluru
* @author Sebastien Deleuze
* @author Rossen Stoyanchev
* @since 4.1
* @see FreeMarkerConfigurerBeanDefinitionParser
* @see GroovyMarkupConfigurerBeanDefinitionParser
* @see ScriptTemplateConfigurerBeanDefinitionParser
*/
public class ViewResolversBeanDefinitionParser implements BeanDefinitionParser {
/**
* The bean name used for the {@code ViewResolverComposite}.
*/
public static final String VIEW_RESOLVER_BEAN_NAME = "mvcViewResolver";
@Override
public BeanDefinition parse(Element element, ParserContext context) {
Object source = context.extractSource(element);
context.pushContainingComponent(new CompositeComponentDefinition(element.getTagName(), source));
ManagedList<Object> resolvers = new ManagedList<>(4);
resolvers.setSource(context.extractSource(element));
String[] names = new String[] {
"jsp", "tiles", "bean-name", "freemarker", "groovy", "script-template", "bean", "ref"};
for (Element resolverElement : DomUtils.getChildElementsByTagName(element, names)) {
String name = resolverElement.getLocalName();
if ("bean".equals(name) || "ref".equals(name)) {
resolvers.add(context.getDelegate().parsePropertySubElement(resolverElement, null));
continue;
}
RootBeanDefinition resolverBeanDef;
if ("jsp".equals(name)) {
resolverBeanDef = new RootBeanDefinition(InternalResourceViewResolver.class);
resolverBeanDef.getPropertyValues().add("prefix", "/WEB-INF/");
resolverBeanDef.getPropertyValues().add("suffix", ".jsp");
addUrlBasedViewResolverProperties(resolverElement, resolverBeanDef);
}
else if ("freemarker".equals(name)) {
resolverBeanDef = new RootBeanDefinition(FreeMarkerViewResolver.class);
resolverBeanDef.getPropertyValues().add("suffix", ".ftl");
addUrlBasedViewResolverProperties(resolverElement, resolverBeanDef);
}
else if ("groovy".equals(name)) {
resolverBeanDef = new RootBeanDefinition(GroovyMarkupViewResolver.class);
resolverBeanDef.getPropertyValues().add("suffix", ".tpl");
addUrlBasedViewResolverProperties(resolverElement, resolverBeanDef);
}
else if ("script-template".equals(name)) {
resolverBeanDef = new RootBeanDefinition(ScriptTemplateViewResolver.class);
addUrlBasedViewResolverProperties(resolverElement, resolverBeanDef);
}
else if ("bean-name".equals(name)) {
resolverBeanDef = new RootBeanDefinition(BeanNameViewResolver.class);
}
else {
// Should never happen
throw new IllegalStateException("Unexpected element name: " + name);
}
resolverBeanDef.setSource(source);
resolverBeanDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
resolvers.add(resolverBeanDef);
}
String beanName = VIEW_RESOLVER_BEAN_NAME;
RootBeanDefinition compositeResolverBeanDef = new RootBeanDefinition(ViewResolverComposite.class);
compositeResolverBeanDef.setSource(source);
compositeResolverBeanDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
names = new String[] {"content-negotiation"};
List<Element> contentNegotiationElements = DomUtils.getChildElementsByTagName(element, names);
if (contentNegotiationElements.isEmpty()) {
compositeResolverBeanDef.getPropertyValues().add("viewResolvers", resolvers);
}
else if (contentNegotiationElements.size() == 1) {
BeanDefinition beanDef = createContentNegotiatingViewResolver(contentNegotiationElements.get(0), context);
beanDef.getPropertyValues().add("viewResolvers", resolvers);
ManagedList<Object> list = new ManagedList<>(1);
list.add(beanDef);
compositeResolverBeanDef.getPropertyValues().add("order", Ordered.HIGHEST_PRECEDENCE);
compositeResolverBeanDef.getPropertyValues().add("viewResolvers", list);
}
else {
throw new IllegalArgumentException("Only one <content-negotiation> element is allowed.");
}
if (element.hasAttribute("order")) {
compositeResolverBeanDef.getPropertyValues().add("order", element.getAttribute("order"));
}
context.getReaderContext().getRegistry().registerBeanDefinition(beanName, compositeResolverBeanDef);
context.registerComponent(new BeanComponentDefinition(compositeResolverBeanDef, beanName));
context.popAndRegisterContainingComponent();
return null;
}
private void addUrlBasedViewResolverProperties(Element element, RootBeanDefinition beanDefinition) {
if (element.hasAttribute("prefix")) {
beanDefinition.getPropertyValues().add("prefix", element.getAttribute("prefix"));
}
if (element.hasAttribute("suffix")) {
beanDefinition.getPropertyValues().add("suffix", element.getAttribute("suffix"));
}
if (element.hasAttribute("cache-views")) {
beanDefinition.getPropertyValues().add("cache", element.getAttribute("cache-views"));
}
if (element.hasAttribute("view-class")) {
beanDefinition.getPropertyValues().add("viewClass", element.getAttribute("view-class"));
}
if (element.hasAttribute("view-names")) {
beanDefinition.getPropertyValues().add("viewNames", element.getAttribute("view-names"));
}
}
private BeanDefinition createContentNegotiatingViewResolver(Element resolverElement, ParserContext context) {
RootBeanDefinition beanDef = new RootBeanDefinition(ContentNegotiatingViewResolver.class);
beanDef.setSource(context.extractSource(resolverElement));
beanDef.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
MutablePropertyValues values = beanDef.getPropertyValues();
List<Element> elements = DomUtils.getChildElementsByTagName(resolverElement, "default-views");
if (!elements.isEmpty()) {
ManagedList<Object> list = new ManagedList<>();
for (Element element : DomUtils.getChildElementsByTagName(elements.get(0), "bean", "ref")) {
list.add(context.getDelegate().parsePropertySubElement(element, null));
}
values.add("defaultViews", list);
}
if (resolverElement.hasAttribute("use-not-acceptable")) {
values.add("useNotAcceptableStatusCode", resolverElement.getAttribute("use-not-acceptable"));
}
Object manager = MvcNamespaceUtils.getContentNegotiationManager(context);
if (manager != null) {
values.add("contentNegotiationManager", manager);
}
return beanDef;
}
}
| apache-2.0 |
junkdog/ecs-matrix | matrix/src/main/java/net/onedaybeard/ecs/model/ComponentDependencyMatrix.java | 2811 | package net.onedaybeard.ecs.model;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
import java.util.SortedMap;
import net.onedaybeard.ecs.util.MatrixStringUtil;
import org.objectweb.asm.Opcodes;
import com.x5.template.Chunk;
import com.x5.template.Theme;
import static java.util.Arrays.asList;
public class ComponentDependencyMatrix implements Opcodes {
private final List<URI> files;
private final File output;
private final String projectName;
public ComponentDependencyMatrix(String projectName, List<URI> files, File output) {
this.projectName = projectName;
this.files = files;
this.output = output;
}
public String detectAndProcess() {
EcsTypeInspector typeInspector;
// TODO: get classpath and/or deps
for (String ecs : asList("artemis", "ashley")) {
typeInspector = new EcsTypeInspector(files, "/" + ecs);
if (typeInspector.foundEcsClasses()) {
process(typeInspector);
return "Found ECS framework: " + ecs;
}
}
return "Failed finding any ECS related classes.";
}
public void process() {
process("");
}
public void process(String resourcePrefix) {
process(new EcsTypeInspector(files, resourcePrefix));
}
private void process(EcsTypeInspector typeInspector) {
write(typeInspector.getTypeMap(), typeInspector.getMatrixData());
}
private void write(SortedMap<String, List<RowTypeMapping>> mappedSystems, MatrixData matrix) {
Theme theme = new Theme();
Chunk chunk = theme.makeChunk("matrix");
List<RowTypeMapping> rows = new ArrayList<RowTypeMapping>();
for (Entry<String,List<RowTypeMapping>> entry : mappedSystems.entrySet()) {
rows.add(new RowTypeMapping(entry.getKey()));
rows.addAll(entry.getValue());
}
chunk.set("longestName", MatrixStringUtil.findLongestClassName(mappedSystems).replaceAll(".", "_") + "______");
chunk.set("rows", rows);
chunk.set("headersComponents", matrix.componentColumns);
chunk.set("componentCount", matrix.componentColumns.size());
chunk.set("headersManagers", matrix.managerColumns);
chunk.set("managerCount", matrix.managerColumns.size());
chunk.set("headersSystems", matrix.systemColumns);
chunk.set("systemCount", matrix.systemColumns.size());
chunk.set("factoryCount", matrix.factoryColumns.size());
chunk.set("headersFactories", matrix.factoryColumns);
chunk.set("project", projectName);
BufferedWriter out = null;
try {
out = new BufferedWriter(new FileWriter(output));
chunk.render(out);
} catch (IOException e) {
e.printStackTrace();
} finally {
if (out != null) try {
out.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
}
| apache-2.0 |
Erik-Yim/spring_boot_demo | springboot-transaction/src/main/java/cn/itcast/zt/SpringbootTransactionConfiguration.java | 1920 | package cn.itcast.zt;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.transaction.annotation.TransactionManagementConfigurer;
import javax.persistence.EntityManagerFactory;
/**
* Created by zhangtian on 2017/4/12.
*/
@EnableTransactionManagement // 开启注解事务管理,等同于xml配置文件中的 <tx:annotation-driven />
@Configuration
public class SpringbootTransactionConfiguration implements TransactionManagementConfigurer {
@Autowired
@Qualifier(value = "transactionManager")
private PlatformTransactionManager transactionManager;
// 不能同时实现两个事务管理器
/*// 创建事务管理器1
@Bean(name = "dataSourceTransactionManager")
public PlatformTransactionManager dataSourceTransactionManager(DataSource dataSource) {
return new DataSourceTransactionManager(dataSource);
}*/
// 创建事务管理器2
@Bean(name = "transactionManager")
public PlatformTransactionManager jpaTransactionManager(EntityManagerFactory factory) {
return new JpaTransactionManager(factory);
}
// 实现接口 TransactionManagementConfigurer 方法,其返回值代表在拥有多个事务管理器的情况下默认使用的事务管理器
@Override
public PlatformTransactionManager annotationDrivenTransactionManager() {
return transactionManager;
}
// public static void main(String[] args) {
// SpringApplication.run(SpringbootTransactionConfiguration.class, args);
// }
}
| apache-2.0 |
adrapereira/jena | jena-permissions/src/main/java/org/apache/jena/permissions/model/impl/SecuredSelector.java | 2510 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.permissions.model.impl;
import org.apache.jena.permissions.SecurityEvaluator.SecNode;
import org.apache.jena.permissions.SecurityEvaluator.SecTriple;
import org.apache.jena.permissions.impl.SecuredItem;
import org.apache.jena.permissions.impl.SecuredItemImpl;
import org.apache.jena.rdf.model.* ;
public class SecuredSelector implements Selector
{
private final SecuredItem securedItem;
private final Selector selector;
public SecuredSelector( final SecuredItem securedItem )
{
this(securedItem, new SimpleSelector());
}
public SecuredSelector( final SecuredItem securedItem,
final Selector selector )
{
this.securedItem = securedItem;
this.selector = selector;
}
private SecNode getNode( final RDFNode node )
{
return node == null ? SecNode.ANY : SecuredItemImpl.convert(node
.asNode());
}
@Override
public RDFNode getObject()
{
return selector.getObject();
}
@Override
public Property getPredicate()
{
return selector.getPredicate();
}
@Override
public Resource getSubject()
{
return selector.getSubject();
}
@Override
public boolean isSimple()
{
return securedItem.canRead(SecTriple.ANY);
}
/**
* This method is designed to be over ridden by subclasses to define
* application
* specific constraints on the statements selected.
*
* @param s
* the statement to be tested
* @return true if the statement satisfies the constraint
*/
@Override
public boolean test( final Statement s )
{
if (selector.test(s))
{
final SecTriple t = new SecTriple(getNode(s.getSubject()),
getNode(s.getPredicate()), getNode(s.getObject()));
return securedItem.canRead(t);
}
return false;
}
}
| apache-2.0 |
google/or-tools | ortools/base/threadpool.cc | 2495 | // Copyright 2010-2021 Google LLC
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "ortools/base/threadpool.h"
#include "ortools/base/logging.h"
namespace operations_research {
void RunWorker(void* data) {
ThreadPool* const thread_pool = reinterpret_cast<ThreadPool*>(data);
std::function<void()> work = thread_pool->GetNextTask();
while (work != NULL) {
work();
work = thread_pool->GetNextTask();
}
}
ThreadPool::ThreadPool(const std::string& prefix, int num_workers)
: num_workers_(num_workers) {}
ThreadPool::~ThreadPool() {
if (started_) {
std::unique_lock<std::mutex> mutex_lock(mutex_);
waiting_to_finish_ = true;
mutex_lock.unlock();
condition_.notify_all();
for (int i = 0; i < num_workers_; ++i) {
all_workers_[i].join();
}
}
}
void ThreadPool::SetQueueCapacity(int capacity) {
CHECK_GT(capacity, num_workers_);
CHECK(!started_);
queue_capacity_ = capacity;
}
void ThreadPool::StartWorkers() {
started_ = true;
for (int i = 0; i < num_workers_; ++i) {
all_workers_.push_back(std::thread(&RunWorker, this));
}
}
std::function<void()> ThreadPool::GetNextTask() {
std::unique_lock<std::mutex> lock(mutex_);
for (;;) {
if (!tasks_.empty()) {
std::function<void()> task = tasks_.front();
tasks_.pop_front();
if (tasks_.size() < queue_capacity_ && waiting_for_capacity_) {
waiting_for_capacity_ = false;
capacity_condition_.notify_all();
}
return task;
}
if (waiting_to_finish_) {
return nullptr;
} else {
condition_.wait(lock);
}
}
return nullptr;
}
void ThreadPool::Schedule(std::function<void()> closure) {
std::unique_lock<std::mutex> lock(mutex_);
while (tasks_.size() >= queue_capacity_) {
waiting_for_capacity_ = true;
capacity_condition_.wait(lock);
}
tasks_.push_back(closure);
if (started_) {
lock.unlock();
condition_.notify_all();
}
}
} // namespace operations_research
| apache-2.0 |
kenaiX/kenai-common-android | src/main/java/com/kenai/function/view/XViewManager.java | 543 | package com.kenai.function.view;
import android.content.Context;
import android.util.DisplayMetrics;
import android.view.WindowManager;
public class XViewManager {
/**
* [0]:width
* [1]:height
* @param context
* @return
*/
public static int[] getScreenWidth_Height(Context context) {
DisplayMetrics metric = new DisplayMetrics();
((WindowManager) context.getSystemService("window"))
.getDefaultDisplay().getMetrics(metric);
int[] i = { metric.widthPixels, metric.heightPixels };
return i;
}
}
| apache-2.0 |
dlzhangxg/cloud-ml-sdk | cloud_ml_samples/paddlepaddle/linear_regression/trainer/task.py | 2119 | # Copyright 2017 Xiaomi, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle.v2 as paddle
import paddle.v2.dataset.uci_housing as uci_housing
def main():
paddle.init(use_gpu=False, trainer_count=1)
x = paddle.layer.data(name='x', type=paddle.data_type.dense_vector(13))
y_predict = paddle.layer.fc(input=x, size=1, act=paddle.activation.Linear())
y = paddle.layer.data(name='y', type=paddle.data_type.dense_vector(1))
cost = paddle.layer.mse_cost(input=y_predict, label=y)
parameters = paddle.parameters.create(cost)
optimizer = paddle.optimizer.Momentum(momentum=0)
trainer = paddle.trainer.SGD(cost=cost,
parameters=parameters,
update_equation=optimizer)
feeding = {'x': 0, 'y': 1}
def event_handler(event):
if isinstance(event, paddle.event.EndIteration):
if event.batch_id % 100 == 0:
print "Pass %d, Batch %d, Cost %f" % (event.pass_id, event.batch_id,
event.cost)
if isinstance(event, paddle.event.EndPass):
result = trainer.test(reader=paddle.batch(uci_housing.test(),
batch_size=2),
feeding=feeding)
print "Test %d, Cost %f" % (event.pass_id, result.cost)
trainer.train(reader=paddle.batch(
paddle.reader.shuffle(uci_housing.train(),
buf_size=500),
batch_size=2),
feeding=feeding,
event_handler=event_handler,
num_passes=30)
if __name__ == "__main__":
main()
| apache-2.0 |
daniel-de-vries/OpenLEGO | openlego/core/exec_comp.py | 1643 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Copyright 2019 I. van Gent
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
This file contains the definition the `ExecComp` class.
"""
from __future__ import absolute_import, division, print_function
from openmdao.api import ExecComp as OpenmdaoExecComp
import time
class ExecComp(OpenmdaoExecComp):
"""Executable component based on mathematical expression with the additional function of adding
a sleep time to simulate longer execution times."""
def __init__(self, exprs, sleep_time=None, **kwargs):
self.number_of_computes = 0
self.sleep_time = sleep_time
super(ExecComp, self).__init__(exprs, **kwargs)
def compute(self, inputs, outputs):
"""
Execute this component's assignment statements.
Parameters
----------
inputs : `Vector`
`Vector` containing inputs.
outputs : `Vector`
`Vector` containing outputs.
"""
OpenmdaoExecComp.compute(self, inputs, outputs)
self.number_of_computes += 1
if self.sleep_time is not None:
time.sleep(self.sleep_time)
| apache-2.0 |
hubo1016/namedstruct | tests/testNamedstruct.py | 10498 | '''
Created on 2016/1/19
:author: hubo
'''
from __future__ import print_function
import unittest
from namedstruct import *
from pprint import pprint
bitfield_test = bitfield(uint32,
(1, 'a'),
(9, 'r'),
(11, 'g'),
(11, 'b'),
name = 'bitfield_test',
init = packvalue(1, 'a'))
bitfield_array = bitfield(uint64,
(3, 'pre'),
(1, 'bits', 50),
(4,),
(7, 'post'),
name = 'bitfield_array'
)
pre_enum = enum('pre_enum', globals(), uint8, True,
PRE_A = 0x1,
PRE_B = 0x2,
PRE_C = 0x4
)
test_struct = nstruct((bitfield_array, 's1'),
(bitfield_test[2], 'colors'),
(bitfield_array[0], 'extras'),
size = sizefromlen(128, 's1', 'post'),
prepack = packsize('s1', 'post'),
name = 'test_struct',
extend = {('s1', 'pre'): pre_enum}
)
class Test(unittest.TestCase):
def testBitfield(self):
c = bitfield_test.new(a = 0, r = 0x77, g = 0x312, b = 0x57a)
# 0b00011101110110001001010101111010
self.assertEqual(bitfield_test.tobytes(c), b'\x1d\xd8\x95\x7a')
self.assertEqual(dump(c, False), dump(bitfield_test.parse(bitfield_test.tobytes(c))[0], False))
self.assertEqual(dump(c, False), dump(bitfield_test.create(bitfield_test.tobytes(c)), False))
c2 = bitfield_test.new()
self.assertEqual(bitfield_test.tobytes(c2), b'\x80\x00\x00\x00')
c3 = bitfield_array.new(pre=2, bits = [(r & 1) for r in range(0,50)], post = 0x3f)
self.assertEqual(c3._tobytes(), b'\x4a\xaa\xaa\xaa\xaa\xaa\xa8\x3f')
self.assertEqual(dump(c3, False), dump(bitfield_array.parse(bitfield_array.tobytes(c3))[0], False))
self.assertEqual(dump(c3, False), dump(bitfield_array.create(bitfield_array.tobytes(c3)), False))
c4 = test_struct.new()
# 0b0100000000000000000010000000000010000000000000000000000000100000
c4.s1.pre = 2
c4.s1.bits[17] = 1
c4.s1.bits[29] = 1
# 0b10000010100000000000000000001100
c4.colors[0].r = 10
c4.colors[0].b = 12
# 0b00000000000000000100100000000000
c4.colors[1].a = 0
c4.colors[1].g = 9
c4.extras.append(bitfield_array.new(pre=1, post = 0x1f))
c4.extras.append(bitfield_array.new(pre=2, bits = [1] * 50, post = 0x17))
b = c4._tobytes()
self.assertEqual(b, b'\x40\x00\x08\x00\x80\x00\x00\x20\x82\x80\x00\x0c\x00\x00\x48\x00'\
b'\x20\x00\x00\x00\x00\x00\x00\x1f\x5f\xff\xff\xff\xff\xff\xf8\x17')
self.assertEqual(dump(c4, False), dump(test_struct.parse(b)[0], False))
self.assertEqual(dump(c4, False), dump(test_struct.create(b), False))
pprint(dump(test_struct.create(b)))
def testDarray(self):
s1 = nstruct((uint8, 'length'),
(raw, 'data'),
size = lambda x: x.length + 1,
prepack = packexpr(lambda x: len(x.data), 'length'),
name = 's1',
padding = 1
)
s2 = nstruct((uint16, 'size'),
(darray(s1, 'strings', lambda x: x.size),),
name = 's2',
prepack = packexpr(lambda x:len(x.strings), 'size'),
padding = 1)
array = s2()
array.strings.append(s1(data=b'abc'))
array.strings.append(s1(data=b'defghi'))
b = s2.tobytes(array)
self.assertEqual(b, b'\x00\x02\x03abc\x06defghi')
array2, size = s2.parse(b)
self.assertEqual(size, len(b))
self.assertEqual(dump(array, False), dump(array2, False))
def testEmptyBase(self):
s1 = nstruct(name = 's1', padding = 1, classifier = lambda x: x.type)
s2 = nstruct((uint16, 'a'), base = s1, classifyby = (1,), name = 's2', init = packvalue(1, 'type'))
s3 = nstruct((uint8,'type'),(s1,),padding = 1, name = 's3', lastextra = True)
r = s3.create(b'\x01\x00\x02')
self.assertEqual(r.a, 2)
self.assertEqual(r._tobytes(), b'\x01\x00\x02')
s = s3((s1, s2), a = 3)
self.assertEqual(s._tobytes(), b'\x01\x00\x03')
def testEmbeddedTypes(self):
s1 = nstruct(name = 's1', padding = 1, classifier = lambda x: x.type, size = lambda x: 2 if x.type == 1 else 0)
s2 = nstruct((uint16, 'a'), base = s1, classifyby = (1,), name = 's2', init = packvalue(1, 'type'))
# Embedded struct
s3 = nstruct((uint8,'type'),(s1,),padding = 1, name = 's3')
# Embedded struct in an inherited type
s4 = nstruct((uint8, 'maintype'), (uint8, 'type'), padding = 1, name = 's4')
s5 = nstruct((s1,), base = s4, criteria = lambda x: x.maintype == 1, init = packvalue(1, 'maintype'),
name = 's5', lastextra = True)
# Embedded struct in another embedded type
s6 = nstruct((s1,),padding = 1, name = 's6', inline = False)
s7 = nstruct((uint8,'type'),(uint8,'type2'),(s6,),padding = 1, name = 's7', lastextra = True)
# Replace after replace
s8 = nstruct((uint16, 'b'), base = s6, name = 's8', criteria = lambda x: x.type2 == 3, init = packvalue(3, 'type2'))
s = s3((s1,s2), a = 3)
b = s._tobytes()
self.assertEqual(b, b'\x01\x00\x03')
self.assertEqual(dump(s3.create(b), False), dump(s, False))
s = s5((s1,s2), a = 3)
b = s._tobytes()
self.assertEqual(b, b'\x01\x01\x00\x03')
self.assertEqual(dump(s5.create(b), False), dump(s, False))
s = s7((s1,s2), a = 3)
b = s._tobytes()
self.assertEqual(b, b'\x01\x00\x00\x03')
self.assertEqual(dump(s7.create(b), False), dump(s, False))
s = s7((s6,s8), (s1,s2), a = 2, b = 6)
b = s._tobytes()
self.assertEqual(b, b'\x01\x03\x00\x02\x00\x06')
self.assertEqual(dump(s7.create(b), False), dump(s, False))
# _get_embedded
s = s7()
b = s._get_embedded(s1)._tobytes()
self.assertEqual(b, b'')
s = s7((s1, s2), a = 3)
b = s._get_embedded(s1)._tobytes()
self.assertEqual(b, b'\x00\x03')
b = s._get_embedded(s1)._tobytes(True)
self.assertEqual(b, b'\x00\x03')
s = s7((s6,s8), (s1,s2), a = 2, b = 6)
b = s._get_embedded(s1)._tobytes()
self.assertEqual(b, b'\x00\x02')
b = s._get_embedded(s1)._tobytes(True)
self.assertEqual(b, b'\x00\x02')
def testVariant(self):
vtype = enum('vtype', None, uint8,
TYPE_A = 1,
TYPE_B = 2,
TYPE_C = 3)
v1 = nvariant('v1',
nstruct((vtype, 'type'),
name = 'v1h',
padding = 1),
classifier = lambda x: x.type)
n1 = nstruct((uint32, 'a'), name = 'n1', base = v1, classifyby = (1,), init = packvalue(1, 'type'))
n2 = nstruct((uint16, 'b'), name = 'n1', base = v1, classifyby = (2,), init = packvalue(2, 'type'))
n3 = nstruct((uint16, 'sublen'),
(uint16, 'subtype'),
name = 'n3',
base = v1,
classifyby = (3,),
init = packvalue(3, 'type'),
prepack = packrealsize('sublen'),
size = lambda x: x.sublen,
padding = 1
)
n3_sub1 = nstruct((uint16[0], 'subarray'),
name = 'n3_sub1',
base = n3,
criteria = lambda x: x.subtype == 1,
init = packvalue(1, 'subtype'))
n3_sub2 = nstruct((raw, 'text'),
name = 'n3_sub2',
base = n3,
criteria = lambda x: x.subtype == 2,
init = packvalue(2, 'subtype'))
np_array = nstruct((uint32, 'length'),
(v1[0], 'array'),
name = 'np_array',
padding = 1,
size = lambda x: x.length,
prepack = packsize('length'))
s = n1(a = 1)
b = s._tobytes()
self.assertEqual(b, b'\x01\x00\x00\x00\x01')
r = v1.parse(b'\x01\x00')
self.assertIsNone(r)
r = v1.parse(b)
self.assertIsNotNone(r)
s2, l = r
self.assertEqual(l, len(b))
self.assertEqual(s2.a, 1)
self.assertEqual(dump(s2, typeinfo = DUMPTYPE_NONE), {'type': 'TYPE_A', 'a': 1})
s = n2(b = 2)
b = s._tobytes()
self.assertEqual(b, b'\x02\x00\x02')
r = v1.parse(b)
self.assertIsNotNone(r)
s2, l = r
self.assertEqual(l, len(b))
self.assertEqual(s2.b, 2)
s = n3_sub1(subarray = [1,2,3])
b = s._tobytes()
self.assertEqual(b, b'\x03\x00\x0a\x00\x01\x00\x01\x00\x02\x00\x03')
r = v1.parse(b)
self.assertIsNotNone(r)
s2, l = r
self.assertEqual(l, len(b))
self.assertEqual(s2.subarray, [1,2,3])
s = v1(type = 4)
b = s._tobytes()
self.assertEqual(b, b'\x04')
r = v1.parse(b)
self.assertIsNotNone(r)
s2, l = r
self.assertEqual(l, len(b))
self.assertEqual(s2.type, 4)
s = np_array(array = [n1(a = 1), n2(b = 2), n1(a = 3), n3_sub2(text = b'def'), n3_sub1(subarray = [1,2,3])])
b = s._tobytes()
self.assertEqual(b, b'\x00\x00\x00\x24\x01\x00\x00\x00\x01\x02\x00\x02\x01\x00\x00\x00\x03\x03\x00\x07\x00\x02def\x03\x00\x0a\x00\x01\x00\x01\x00\x02\x00\x03')
s2, l = np_array.parse(b)
self.assertEqual(l, len(b))
self.assertEqual(s2.array[0].a, 1)
self.assertEqual(s2.array[1].b, 2)
self.assertEqual(s2.array[2].a, 3)
self.assertEqual(s2.array[3].text, b'def')
self.assertEqual(s2.array[4].subarray, [1,2,3])
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main() | apache-2.0 |
haonancool/OnlineJudge | leetcode/cpp/recover_tree.cc | 977 | class Solution {
public:
void recoverTree(TreeNode* root) {
TreeNode *pre = 0, *cur = root, *last = 0, *first = 0, *second = 0;
while (cur) {
if (cur->left) {
pre = cur->left;
while (pre->right && pre->right != cur)
pre = pre->right;
if (pre->right) {
pre->right = 0;
if (last && last->val > cur->val) {
if (!first)
first = last;
second = cur;
}
last = cur;
cur = cur->right;
} else {
pre->right = cur;
cur = cur->left;
}
} else {
if (last && last->val > cur->val) {
if (!first)
first = last;
second = cur;
}
last = cur;
cur = cur->right;
}
}
if (first && second) {
int temp = first->val;
first->val = second->val;
second->val = temp;
}
}
}; | apache-2.0 |
sylow/google-adwords-api | examples/v201101/get_all_ad_group_criteria.rb | 3182 | #!/usr/bin/ruby
#
# Author:: api.sgomes@gmail.com (Sérgio Gomes)
#
# Copyright:: Copyright 2011, Google Inc. All Rights Reserved.
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example illustrates how to retrieve all the criteria for an ad group.
# To add criteria to an existing ad group, run add_ad_group_criteria.rb.
#
# Tags: AdGroupCriterionService.get
require 'rubygems'
gem 'google-adwords-api'
require 'adwords_api'
API_VERSION = :v201101
def get_all_ad_group_criteria()
# AdwordsApi::Api will read a config file from ENV['HOME']/adwords_api.yml
# when called without parameters.
adwords = AdwordsApi::Api.new
ad_group_criterion_srv =
adwords.service(:AdGroupCriterionService, API_VERSION)
ad_group_id = 'INSERT_AD_GROUP_ID_HERE'.to_i
# Get all the criteria for this ad group.
selector = {
:fields => ['Id'],
:ordering => [{
:field => 'AdGroupId',
:sort_order => 'ASCENDING'
}],
:predicates => [{
:field => 'AdGroupId',
:operator => 'IN',
:values => [ad_group_id]
}]
}
response = ad_group_criterion_srv.get(selector)
if response and response[:entries]
ad_group_criteria = response[:entries]
puts "Ad group ##{ad_group_id} has #{ad_group_criteria.length} criteria."
ad_group_criteria.each do |ad_group_criterion|
puts " Criterion id is #{ad_group_criterion[:criterion][:id]} and " +
"type is #{ad_group_criterion[:criterion][:xsi_type]}."
end
else
puts "No criteria found for ad group ##{ad_group_id}."
end
end
if __FILE__ == $0
# To enable logging of SOAP requests, set the ADWORDSAPI_DEBUG environment
# variable to 'true'. This can be done either from your operating system
# environment or via code, as done below.
ENV['ADWORDSAPI_DEBUG'] = 'false'
begin
get_all_ad_group_criteria()
# Connection error. Likely transitory.
rescue Errno::ECONNRESET, SOAP::HTTPStreamError, SocketError => e
puts 'Connection Error: %s' % e
puts 'Source: %s' % e.backtrace.first
# API Error.
rescue AdwordsApi::Errors::ApiException => e
puts 'API Exception caught.'
puts 'Message: %s' % e.message
puts 'Code: %d' % e.code if e.code
puts 'Trigger: %s' % e.trigger if e.trigger
puts 'Errors:'
if e.errors
e.errors.each_with_index do |error, index|
puts ' %d. Error type is %s. Fields:' % [index + 1, error[:xsi_type]]
error.each_pair do |field, value|
if field != :xsi_type
puts ' %s: %s' % [field, value]
end
end
end
end
end
end
| apache-2.0 |
lsimons/phloc-schematron-standalone | phloc-commons/src/main/java/com/phloc/commons/vminit/IVirtualMachineEventSPI.java | 1551 | /**
* Copyright (C) 2006-2013 phloc systems
* http://www.phloc.com
* office[at]phloc[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.phloc.commons.vminit;
import com.phloc.commons.annotations.IsSPIInterface;
/**
* The callback to implemented by VM startup SPIs. Unfortunately you need to
* manually call {@link VirtualMachineInitializer#runInitialization()} to get
* this SPI up and running!
*
* @author Philip Helger
*/
@IsSPIInterface
public interface IVirtualMachineEventSPI
{
/**
* Called upon Java VM initialization.
*
* @throws Exception
* in case of an error.
* @see VirtualMachineInitializer#runInitialization()
*/
void onVirtualMachineStart () throws Exception;
/**
* Called upon Java VM shutdown.<br>
* Note for web applications: this happens when the application server is shut
* down and not when an application is shut down!
*
* @throws Exception
* in case of an error.
*/
void onVirtualMachineStop () throws Exception;
}
| apache-2.0 |
openstack/sahara | sahara/utils/api.py | 14624 | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import traceback
import flask
import microversion_parse
from oslo_log import log as logging
from oslo_middleware import request_id as oslo_req_id
import six
from werkzeug import datastructures
from sahara.api import microversion as mv
from sahara import context
from sahara import exceptions as ex
from sahara.i18n import _
from sahara.utils import types
from sahara.utils import wsgi
LOG = logging.getLogger(__name__)
class Rest(flask.Blueprint):
def get(self, rule, status_code=200):
return self._mroute('GET', rule, status_code)
def post(self, rule, status_code=202):
return self._mroute('POST', rule, status_code)
def post_file(self, rule, status_code=202):
return self._mroute('POST', rule, status_code, file_upload=True)
def put(self, rule, status_code=202):
return self._mroute('PUT', rule, status_code)
def put_file(self, rule, status_code=202):
return self._mroute('PUT', rule, status_code, file_upload=True)
def delete(self, rule, status_code=204):
return self._mroute('DELETE', rule, status_code)
def patch(self, rule, status_code=202):
return self._mroute('PATCH', rule, status_code)
def _mroute(self, methods, rule, status_code=None, **kw):
if isinstance(methods, six.string_types):
methods = [methods]
return self.route(rule, methods=methods, status_code=status_code, **kw)
def route(self, rule, **options):
status = options.pop('status_code', None)
file_upload = options.pop('file_upload', False)
def decorator(func):
endpoint = options.pop('endpoint', func.__name__)
def handler(**kwargs):
context.set_ctx(None)
LOG.debug("Rest.route.decorator.handler, kwargs={kwargs}"
.format(kwargs=kwargs))
_init_resp_type(file_upload)
# update status code
if status:
flask.request.status_code = status
kwargs.pop("tenant_id", None)
req_id = flask.request.environ.get(oslo_req_id.ENV_REQUEST_ID)
auth_plugin = flask.request.environ.get('keystone.token_auth')
ctx = context.Context(
flask.request.headers['X-User-Id'],
flask.request.headers['X-Tenant-Id'],
flask.request.headers['X-Auth-Token'],
flask.request.headers['X-Service-Catalog'],
flask.request.headers['X-User-Name'],
flask.request.headers['X-Tenant-Name'],
flask.request.headers['X-Roles'].split(','),
auth_plugin=auth_plugin,
request_id=req_id)
context.set_ctx(ctx)
try:
if flask.request.method in ['POST', 'PUT', 'PATCH']:
kwargs['data'] = request_data()
return func(**kwargs)
except ex.Forbidden as e:
return access_denied(e)
except ex.SaharaException as e:
return bad_request(e)
except Exception as e:
return internal_error(500, 'Internal Server Error', e)
f_rule = "/<tenant_id>" + rule
self.add_url_rule(rule, endpoint, handler, **options)
self.add_url_rule(rule + '.json', endpoint, handler, **options)
self.add_url_rule(f_rule, endpoint, handler, **options)
self.add_url_rule(f_rule + '.json', endpoint, handler, **options)
return func
return decorator
def check_microversion_header():
requested_version = get_requested_microversion()
if not re.match(mv.VERSION_STRING_REGEX, requested_version):
bad_request_microversion(requested_version)
if requested_version not in mv.API_VERSIONS:
not_acceptable_microversion(requested_version)
def add_vary_header(response):
response.headers[mv.VARY_HEADER] = mv.OPENSTACK_API_VERSION_HEADER
response.headers[mv.OPENSTACK_API_VERSION_HEADER] = "{} {}".format(
mv.SAHARA_SERVICE_TYPE, get_requested_microversion())
return response
class RestV2(Rest):
def __init__(self, *args, **kwargs):
super(RestV2, self).__init__(*args, **kwargs)
self.before_request(check_microversion_header)
self.after_request(add_vary_header)
def route(self, rule, **options):
status = options.pop('status_code', None)
file_upload = options.pop('file_upload', False)
def decorator(func):
endpoint = options.pop('endpoint', func.__name__)
def handler(**kwargs):
context.set_ctx(None)
LOG.debug("Rest.route.decorator.handler, kwargs={kwargs}"
.format(kwargs=kwargs))
_init_resp_type(file_upload)
# update status code
if status:
flask.request.status_code = status
kwargs.pop("tenant_id", None)
req_id = flask.request.environ.get(oslo_req_id.ENV_REQUEST_ID)
auth_plugin = flask.request.environ.get('keystone.token_auth')
ctx = context.Context(
flask.request.headers['X-User-Id'],
flask.request.headers['X-Tenant-Id'],
flask.request.headers['X-Auth-Token'],
flask.request.headers['X-Service-Catalog'],
flask.request.headers['X-User-Name'],
flask.request.headers['X-Tenant-Name'],
flask.request.headers['X-Roles'].split(','),
auth_plugin=auth_plugin,
request_id=req_id)
context.set_ctx(ctx)
try:
if flask.request.method in ['POST', 'PUT', 'PATCH']:
kwargs['data'] = request_data()
return func(**kwargs)
except ex.Forbidden as e:
return access_denied(e)
except ex.SaharaException as e:
return bad_request(e)
except Exception as e:
return internal_error(500, 'Internal Server Error', e)
f_rule = "/<tenant_id>" + rule
self.add_url_rule(rule, endpoint, handler, **options)
self.add_url_rule(rule + '.json', endpoint, handler, **options)
self.add_url_rule(f_rule, endpoint, handler, **options)
self.add_url_rule(f_rule + '.json', endpoint, handler, **options)
return func
return decorator
RT_JSON = datastructures.MIMEAccept([("application/json", 1)])
def _init_resp_type(file_upload):
"""Extracts response content type."""
# get content type from Accept header
resp_type = flask.request.accept_mimetypes
# url /foo.json
if flask.request.path.endswith('.json'):
resp_type = RT_JSON
flask.request.resp_type = resp_type
# set file upload flag
flask.request.file_upload = file_upload
def render(res=None, resp_type=None, status=None, name=None, **kwargs):
if not res and type(res) is not types.Page:
res = {}
if type(res) is dict:
res.update(kwargs)
elif type(res) is types.Page:
result = {name: [item.to_dict() for item in res]}
result.update(kwargs)
if res.prev or res.next or ('marker' in get_request_args()):
result["markers"] = {"prev": res.prev, "next": res.next}
res = result
elif kwargs:
# can't merge kwargs into the non-dict res
abort_and_log(500,
_("Non-dict and non-empty kwargs passed to render"))
status_code = getattr(flask.request, 'status_code', None)
if status:
status_code = status
if not status_code:
status_code = 200
if not resp_type:
resp_type = getattr(flask.request, 'resp_type', RT_JSON)
if not resp_type:
resp_type = RT_JSON
serializer = None
if "application/json" in resp_type:
resp_type = RT_JSON
serializer = wsgi.JSONDictSerializer()
else:
raise ex.InvalidDataException(
_("Content type '%s' isn't supported") % resp_type)
body = serializer.serialize(res)
resp_type = str(resp_type)
return flask.Response(response=body, status=status_code,
mimetype=resp_type)
def request_data():
if hasattr(flask.request, 'parsed_data'):
return flask.request.parsed_data
if (flask.request.content_length is None
or not flask.request.content_length > 0):
LOG.debug("Empty body provided in request")
return dict()
if flask.request.file_upload:
return flask.request.data
deserializer = None
content_type = flask.request.mimetype
if not content_type or content_type in RT_JSON:
deserializer = wsgi.JSONDeserializer()
else:
raise ex.InvalidDataException(
_("Content type '%s' isn't supported") % content_type)
# parsed request data to avoid unwanted re-parsings
parsed_data = deserializer.deserialize(flask.request.data)['body']
flask.request.parsed_data = parsed_data
return flask.request.parsed_data
def get_request_args():
return flask.request.args
def get_requested_microversion():
requested_version = microversion_parse.get_version(
flask.request.headers,
mv.SAHARA_SERVICE_TYPE
)
if requested_version is None:
requested_version = mv.MIN_API_VERSION
elif requested_version == mv.LATEST:
requested_version = mv.MAX_API_VERSION
return requested_version
def abort_and_log(status_code, descr, exc=None):
LOG.error("Request aborted with status code {code} and "
"message '{message}'".format(code=status_code, message=descr))
if exc is not None:
LOG.error(traceback.format_exc())
flask.abort(status_code, description=descr)
def render_error_message(error_code, error_message, error_name, **msg_kwargs):
message = {
"error_code": error_code,
"error_message": error_message,
"error_name": error_name
}
message.update(**msg_kwargs)
resp = render(message)
resp.status_code = error_code
return resp
def not_acceptable_microversion(requested_version):
message = ("Version {} is not supported by the API. "
"Minimum is {} and maximum is {}.".format(
requested_version,
mv.MIN_API_VERSION,
mv.MAX_API_VERSION
))
resp = render_error_message(
mv.NOT_ACCEPTABLE_STATUS_CODE,
message,
mv.NOT_ACCEPTABLE_STATUS_NAME,
max_version=mv.MAX_API_VERSION,
min_version=mv.MIN_API_VERSION
)
flask.abort(resp)
def bad_request_microversion(requested_version):
message = ("API Version String {} is of invalid format. Must be of format"
" MajorNum.MinorNum.").format(requested_version)
resp = render_error_message(
mv.BAD_REQUEST_STATUS_CODE,
message,
mv.BAD_REQUEST_STATUS_NAME,
max_version=mv.MAX_API_VERSION,
min_version=mv.MIN_API_VERSION
)
flask.abort(resp)
def invalid_param_error(status_code, descr, exc=None):
LOG.error("Request aborted with status code {code} and "
"message '{message}'".format(code=status_code, message=descr))
if exc is not None:
LOG.error(traceback.format_exc())
error_code = "INVALID_PARAMS_ON_REQUEST"
return render_error_message(status_code, descr, error_code)
def internal_error(status_code, descr, exc=None):
LOG.error("Request aborted with status code {code} and "
"message '{message}'".format(code=status_code, message=descr))
if exc is not None:
LOG.error(traceback.format_exc())
error_code = "INTERNAL_SERVER_ERROR"
if status_code == 501:
error_code = "NOT_IMPLEMENTED_ERROR"
return render_error_message(status_code, descr, error_code)
def bad_request(error):
error_code = 400
LOG.error("Validation Error occurred: "
"error_code={code}, error_message={message}, "
"error_name={name}".format(code=error_code,
message=error.message,
name=error.code))
return render_error_message(error_code, error.message, error.code)
def access_denied(error):
error_code = 403
LOG.error("Access Denied: error_code={code}, error_message={message}, "
"error_name={name}".format(code=error_code,
message=error.message,
name=error.code))
return render_error_message(error_code, error.message, error.code)
def not_found(error):
error_code = 404
LOG.error("Not Found exception occurred: "
"error_code={code}, error_message={message}, "
"error_name={name}".format(code=error_code,
message=error.message,
name=error.code))
return render_error_message(error_code, error.message, error.code)
def to_wrapped_dict(func, id, *args, **kwargs):
return render(to_wrapped_dict_no_render(func, id, *args, **kwargs))
def to_wrapped_dict_no_render(func, id, *args, **kwargs):
obj = func(id, *args, **kwargs)
if obj is None:
e = ex.NotFoundException(
{'id': id}, _('Object with %s not found'))
return not_found(e)
return obj.to_wrapped_dict()
def _replace_hadoop_version_plugin_version(obj):
dict.update(obj, {'plugin_version': obj['hadoop_version']})
dict.pop(obj, 'hadoop_version')
def _replace_tenant_id_project_id(obj):
dict.update(obj, {'project_id': obj['tenant_id']})
dict.pop(obj, 'tenant_id')
| apache-2.0 |
allenz8512/AndroidAppLog | android-app-log/src/main/java/me/allenz/androidapplog/Configure.java | 4260 | package me.allenz.androidapplog;
import java.io.File;
import java.util.ArrayList;
import java.util.List;
public class Configure {
public static final LogLevel DEFAULT_ROOT_LOG_LEVEL = LogLevel.VERBOSE;
private static final long DEFAULT_LOG_FILE_ROLLING_SIZE = 1024 * 1024;
private static final String ROOT_LOGGER_NAME = "root";
private boolean debug = true;
private LogLevel rootLogLevel = DEFAULT_ROOT_LOG_LEVEL;
private String rootTag = null;
private boolean rootShowThread = false;
private List<LoggerConfig> loggerConfigs;
private boolean handleException = true;
private boolean useLogCatAppender = true;
private boolean useFileAppender = false;
private File logFileDir;
private long logFileRollingSize = DEFAULT_LOG_FILE_ROLLING_SIZE;
private boolean compressLogFiles = true;
private boolean useTextViewAppender = false;
public Configure() {
loggerConfigs = new ArrayList<LoggerConfig>();
}
public boolean isDebug() {
return debug;
}
public void setDebug(final boolean debug) {
this.debug = debug;
}
public LogLevel getRootLogLevel() {
return rootLogLevel;
}
public void setRootLogLevel(final LogLevel rootLogLevel) {
this.rootLogLevel = rootLogLevel;
}
public String getRootTag() {
return rootTag;
}
public void setRootTag(final String rootTag) {
this.rootTag = rootTag;
}
public boolean isRootShowThread() {
return rootShowThread;
}
public void setRootShowThread(final boolean rootShowThread) {
this.rootShowThread = rootShowThread;
}
public boolean isHandleException() {
return handleException;
}
public void setHandleException(final boolean handleException) {
this.handleException = handleException;
}
public boolean isUseLogCatAppender() {
return useLogCatAppender;
}
public void setUseLogCatAppender(final boolean useLogCatAppender) {
this.useLogCatAppender = useLogCatAppender;
}
public boolean isUseFileAppender() {
return useFileAppender;
}
public void setUseFileAppender(final boolean useFileAppender) {
this.useFileAppender = useFileAppender;
}
public File getLogFileDir() {
return logFileDir;
}
public void setLogFileDir(final File logFileDir) {
this.logFileDir = logFileDir;
}
public long getLogFileRollingSize() {
return logFileRollingSize;
}
public void setLogFileRollingSize(final long logFileRollingSize) {
this.logFileRollingSize = logFileRollingSize;
}
public boolean isCompressLogFiles() {
return compressLogFiles;
}
public void setCompressLogFiles(final boolean compressLogFiles) {
this.compressLogFiles = compressLogFiles;
}
public boolean isUseTextViewAppender() {
return useTextViewAppender;
}
public void setUseTextViewAppender(final boolean useTextViewAppender) {
this.useTextViewAppender = useTextViewAppender;
}
public List<LoggerConfig> getLoggerConfigs() {
return loggerConfigs;
}
public void addLoggerConfig(final String name, final LogLevel level,
final String tag, final boolean showThreadName) {
loggerConfigs.add(new LoggerConfig(name, tag, level, showThreadName));
}
public static Configure defaultConfigure() {
final Configure configure = new Configure();
return configure;
}
public static Configure releaseConfigure() {
final Configure configure = new Configure();
configure.setDebug(false);
configure.setRootLogLevel(LogLevel.OFF);
configure.setUseLogCatAppender(false);
return configure;
}
public void applyConfigure(final Repository repository) {
if (!debug) {
LoggerFactory.getInternalLogger().setLogLevel(LogLevel.OFF);
} else {
LoggerFactory.getInternalLogger().setLogLevel(
DEFAULT_ROOT_LOG_LEVEL);
}
repository.setRootLoggerConfig(new LoggerConfig(ROOT_LOGGER_NAME,
rootTag, rootLogLevel, rootShowThread));
if (handleException) {
LoggerFactory.enableLoggingUncaughtException(null);
}
if (useLogCatAppender) {
repository.addAppender(new LogCatAppender());
}
if (useFileAppender) {
repository.addAppender(new RollingFileAppender(logFileDir,
logFileRollingSize, compressLogFiles));
}
if (useTextViewAppender) {
repository.addAppender(new TextViewAppender());
}
for (final LoggerConfig loggerConfig : loggerConfigs) {
repository.addLoggerConfig(loggerConfig);
}
}
}
| apache-2.0 |
spinnaker/deck | packages/core/src/pipeline/status/Artifact.tsx | 1701 | import React from 'react';
import { ArtifactIconService } from '../../artifact';
import type { IArtifact } from '../../domain';
import './artifact.less';
export interface IArtifactProps {
artifact: IArtifact;
isDefault?: boolean;
sequence?: number;
}
export class Artifact extends React.Component<IArtifactProps> {
private tooltip(artifact: IArtifact, isDefault: boolean): string {
const tooltipEntries = [];
if (isDefault) {
tooltipEntries.push('Default Artifact');
}
if (artifact.name) {
tooltipEntries.push(`Name: ${artifact.name}`);
}
if (artifact.type) {
tooltipEntries.push(`Type: ${artifact.type}`);
}
if (artifact.version) {
tooltipEntries.push(`Version: ${artifact.version}`);
}
if (artifact.reference) {
tooltipEntries.push(`Reference: ${artifact.reference}`);
}
return tooltipEntries.join('\n');
}
public render() {
const { artifact, isDefault } = this.props;
const { name, reference, version, type } = artifact;
return (
<div className="artifact-details">
<dl title={this.tooltip(artifact, isDefault)}>
<div className="artifact-detail">
<dt>
{ArtifactIconService.getPath(type) ? (
<img className="artifact-icon" src={ArtifactIconService.getPath(type)} width="18" height="18" />
) : (
<span>[{type}] </span>
)}
</dt>
<dd>
<div className="artifact-name">{name || reference}</div>
{version && <div className="artifact-version"> - {version}</div>}
</dd>
</div>
</dl>
</div>
);
}
}
| apache-2.0 |
JunaidSarfraz/SchoolSystem | SchoolSystem/Exam_Management/ClassWiseExamSchedule.Designer.cs | 1131 | namespace SchoolSystem.Exam_Management
{
partial class ClassWiseExamSchedule
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
components = new System.ComponentModel.Container();
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
}
#endregion
}
}
| apache-2.0 |
WestCoastInformatics/UMLS-Terminology-Server | jpa-model/src/main/java/com/wci/umls/server/jpa/helpers/meta/GeneralMetadataEntryListJpa.java | 923 | /*
* Copyright 2016 West Coast Informatics, LLC
*/
package com.wci.umls.server.jpa.helpers.meta;
import java.util.List;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import com.wci.umls.server.helpers.AbstractResultList;
import com.wci.umls.server.helpers.meta.GeneralMetadataEntryList;
import com.wci.umls.server.jpa.meta.GeneralMetadataEntryJpa;
import com.wci.umls.server.model.meta.GeneralMetadataEntry;
/**
* JAXB enabled implementation of {@link GeneralMetadataEntryList}.
*/
@XmlRootElement(name = "relationshipTypeList")
public class GeneralMetadataEntryListJpa extends
AbstractResultList<GeneralMetadataEntry> implements
GeneralMetadataEntryList {
/* see superclass */
@Override
@XmlElement(type = GeneralMetadataEntryJpa.class, name = "types")
public List<GeneralMetadataEntry> getObjects() {
return super.getObjectsTransient();
}
}
| apache-2.0 |
JavaMoney/javamoney-shelter | retired/currencies/src/main/java/org/javamoney/currencies/internal/data/package-info.java | 963 | /*
* Copyright (c) 2012, 2013, Credit Suisse (Anatole Tresch), Werner Keil.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
/**
* Contains the currency data provider implementations. Basically clients should
* not use this functionality directly, instead of access regions using the
* Region API, e.g. by calling methods on the
* {@code org.javamoney.currencies.MonetaryCurrencies} singleton.
*/
package org.javamoney.currencies.internal.data; | apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-sqs/src/main/java/com/amazonaws/services/sqs/package-info.java | 2907 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
/**
* <p>
* Welcome to the <i>Amazon SQS API Reference</i>.
* </p>
* <p>
* Amazon SQS is a reliable, highly-scalable hosted queue for storing messages as they travel between applications or
* microservices. Amazon SQS moves data between distributed application components and helps you decouple these
* components.
* </p>
* <p>
* For information on the permissions you need to use this API, see <a href=
* "https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-authentication-and-access-control.html"
* >Identity and access management</a> in the <i>Amazon SQS Developer Guide.</i>
* </p>
* <p>
* You can use <a href="http://aws.amazon.com/tools/#sdk">Amazon Web Services SDKs</a> to access Amazon SQS using your
* favorite programming language. The SDKs perform tasks such as the following automatically:
* </p>
* <ul>
* <li>
* <p>
* Cryptographically sign your service requests
* </p>
* </li>
* <li>
* <p>
* Retry requests
* </p>
* </li>
* <li>
* <p>
* Handle error responses
* </p>
* </li>
* </ul>
* <p>
* <b>Additional information</b>
* </p>
* <ul>
* <li>
* <p>
* <a href="http://aws.amazon.com/sqs/">Amazon SQS Product Page</a>
* </p>
* </li>
* <li>
* <p>
* <i>Amazon SQS Developer Guide</i>
* </p>
* <ul>
* <li>
* <p>
* <a href="https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-making-api-requests.html">
* Making API Requests</a>
* </p>
* </li>
* <li>
* <p>
* <a href=
* "https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-message-metadata.html#sqs-message-attributes"
* >Amazon SQS Message Attributes</a>
* </p>
* </li>
* <li>
* <p>
* <a href="https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/sqs-dead-letter-queues.html">
* Amazon SQS Dead-Letter Queues</a>
* </p>
* </li>
* </ul>
* </li>
* <li>
* <p>
* <a href="http://docs.aws.amazon.com/cli/latest/reference/sqs/index.html">Amazon SQS in the <i>Command Line
* Interface</i> </a>
* </p>
* </li>
* <li>
* <p>
* <i>Amazon Web Services General Reference</i>
* </p>
* <ul>
* <li>
* <p>
* <a href="https://docs.aws.amazon.com/general/latest/gr/rande.html#sqs_region">Regions and Endpoints</a>
* </p>
* </li>
* </ul>
* </li>
* </ul>
*/
package com.amazonaws.services.sqs;
| apache-2.0 |
rfcx/rfcx-guardian-android | lib-audio/src/main/java/org/rfcx/guardian/audio/wav/ReadExample.java | 1154 | package org.rfcx.guardian.audio.wav;
import java.io.*;
public class ReadExample
{
public static void main(String[] args)
{
try
{
// Open the wav file specified as the first argument
WavFile wavFile = WavFile.openWavFile(new File(args[0]));
// Display information about the wav file
wavFile.display();
// Get the number of audio channels in the wav file
int numChannels = wavFile.getNumChannels();
// Create a buffer of 100 frames
double[] buffer = new double[100 * numChannels];
int framesRead;
double min = Double.MAX_VALUE;
double max = Double.MIN_VALUE;
do
{
// Read frames into buffer
framesRead = wavFile.readFrames(buffer, 100);
// Loop through frames and look for minimum and maximum value
for (int s=0 ; s<framesRead * numChannels ; s++)
{
if (buffer[s] > max) max = buffer[s];
if (buffer[s] < min) min = buffer[s];
}
}
while (framesRead != 0);
// Close the wavFile
wavFile.close();
// Output the minimum and maximum value
System.out.printf("Min: %f, Max: %f\n", min, max);
}
catch (Exception e)
{
System.err.println(e);
}
}
}
| apache-2.0 |
o3project/openflowj-otn | src/main/java/org/projectfloodlight/openflow/protocol/ver14/OFOxmUdpDstVer14.java | 9080 | // Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver14;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import org.jboss.netty.buffer.ChannelBuffer;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFOxmUdpDstVer14 implements OFOxmUdpDst {
private static final Logger logger = LoggerFactory.getLogger(OFOxmUdpDstVer14.class);
// version: 1.4
final static byte WIRE_VERSION = 5;
final static int LENGTH = 6;
private final static TransportPort DEFAULT_VALUE = TransportPort.NONE;
// OF message fields
private final TransportPort value;
//
// Immutable default instance
final static OFOxmUdpDstVer14 DEFAULT = new OFOxmUdpDstVer14(
DEFAULT_VALUE
);
// package private constructor - used by readers, builders, and factory
OFOxmUdpDstVer14(TransportPort value) {
if(value == null) {
throw new NullPointerException("OFOxmUdpDstVer14: property value cannot be null");
}
this.value = value;
}
// Accessors for OF message fields
@Override
public long getTypeLen() {
return 0x80002002L;
}
@Override
public TransportPort getValue() {
return value;
}
@Override
public MatchField<TransportPort> getMatchField() {
return MatchField.UDP_DST;
}
@Override
public boolean isMasked() {
return false;
}
public OFOxm<TransportPort> getCanonical() {
// exact match OXM is always canonical
return this;
}
@Override
public TransportPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
public OFOxmUdpDst.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFOxmUdpDst.Builder {
final OFOxmUdpDstVer14 parentMessage;
// OF message fields
private boolean valueSet;
private TransportPort value;
BuilderWithParent(OFOxmUdpDstVer14 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public long getTypeLen() {
return 0x80002002L;
}
@Override
public TransportPort getValue() {
return value;
}
@Override
public OFOxmUdpDst.Builder setValue(TransportPort value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<TransportPort> getMatchField() {
return MatchField.UDP_DST;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public TransportPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
@Override
public OFOxmUdpDst build() {
TransportPort value = this.valueSet ? this.value : parentMessage.value;
if(value == null)
throw new NullPointerException("Property value must not be null");
//
return new OFOxmUdpDstVer14(
value
);
}
}
static class Builder implements OFOxmUdpDst.Builder {
// OF message fields
private boolean valueSet;
private TransportPort value;
@Override
public long getTypeLen() {
return 0x80002002L;
}
@Override
public TransportPort getValue() {
return value;
}
@Override
public OFOxmUdpDst.Builder setValue(TransportPort value) {
this.value = value;
this.valueSet = true;
return this;
}
@Override
public MatchField<TransportPort> getMatchField() {
return MatchField.UDP_DST;
}
@Override
public boolean isMasked() {
return false;
}
@Override
public OFOxm<TransportPort> getCanonical()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property canonical not supported in version 1.4");
}
@Override
public TransportPort getMask()throws UnsupportedOperationException {
throw new UnsupportedOperationException("Property mask not supported in version 1.4");
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_14;
}
//
@Override
public OFOxmUdpDst build() {
TransportPort value = this.valueSet ? this.value : DEFAULT_VALUE;
if(value == null)
throw new NullPointerException("Property value must not be null");
return new OFOxmUdpDstVer14(
value
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFOxmUdpDst> {
@Override
public OFOxmUdpDst readFrom(ChannelBuffer bb) throws OFParseError {
// fixed value property typeLen == 0x80002002L
int typeLen = bb.readInt();
if(typeLen != (int) 0x80002002)
throw new OFParseError("Wrong typeLen: Expected=0x80002002L(0x80002002L), got="+typeLen);
TransportPort value = TransportPort.read2Bytes(bb);
OFOxmUdpDstVer14 oxmUdpDstVer14 = new OFOxmUdpDstVer14(
value
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", oxmUdpDstVer14);
return oxmUdpDstVer14;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFOxmUdpDstVer14Funnel FUNNEL = new OFOxmUdpDstVer14Funnel();
static class OFOxmUdpDstVer14Funnel implements Funnel<OFOxmUdpDstVer14> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFOxmUdpDstVer14 message, PrimitiveSink sink) {
// fixed value property typeLen = 0x80002002L
sink.putInt((int) 0x80002002);
message.value.putTo(sink);
}
}
public void writeTo(ChannelBuffer bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFOxmUdpDstVer14> {
@Override
public void write(ChannelBuffer bb, OFOxmUdpDstVer14 message) {
// fixed value property typeLen = 0x80002002L
bb.writeInt((int) 0x80002002);
message.value.write2Bytes(bb);
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFOxmUdpDstVer14(");
b.append("value=").append(value);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFOxmUdpDstVer14 other = (OFOxmUdpDstVer14) obj;
if (value == null) {
if (other.value != null)
return false;
} else if (!value.equals(other.value))
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((value == null) ? 0 : value.hashCode());
return result;
}
}
| apache-2.0 |
marcovc/procure | procure/kernel/interval.hpp | 8320 | /*
* interval.hpp
*
* Created on: May 15, 2013
* Author: marco
*/
#ifndef PROCURE_KERNEL_INTERVAL_HPP_
#define PROCURE_KERNEL_INTERVAL_HPP_
#include <procure/kernel/common.hpp>
#include <procure/kernel/rounding.hpp>
#include <string>
#include <iostream>
#include <sstream>
#include <assert.h>
#include <limits>
#include <iterator>
#include <utility>
namespace Procure {
namespace Detail {
template<class Impl>
struct IntervalWrapper
{
enum OutputFormat {
bounds, // [lb,ub]
width, // median (+/- width/2)
prefix // common_prefix[ltail,rtail]
};
/// Initializes an interval to the universe [-inf, +inf].
IntervalWrapper();
/// Initializes an interval to [val,val].
IntervalWrapper(const Real& val);
/// Initializes an interval to [lb,ub].
IntervalWrapper(const Real& lb,const Real& ub);
/// Initializes an interval to [p.first,p.second].
IntervalWrapper(const std::pair<Real,Real>& p) :
IntervalWrapper(p.first,p.second) {}
/// Initializes an interval from an interval of the wrapped type.
IntervalWrapper(const Impl&);
/**
* Initializes an interval the enclosure of the real number
* represented by a string in some floating-point format.
*/
IntervalWrapper(const std::string& s);
/// Copy constructor.
IntervalWrapper(const IntervalWrapper& i);
/// Get lower bound.
Real getLb() const;
/// Get upper bound.
Real getUb() const;
/// Get size of interval.
Real getWidth() const;
/// Get mignitude of interval.
Real getMig() const;
/// Get magnitude of interval.
Real getMag() const;
/// Get signed mignitude of interval.
Real getSignedMig() const;
/// Get median of interval.
Real getMedian() const;
/// Get an interval enclosure of the median of interval.
IntervalWrapper getMedianInterval() const;
/// Check whether interval is empty.
bool isEmpty() const;
/// Check whether interval is a point.
bool isSingleton() const;
/// Check whether interval has finite bounds.
bool isFinite() const;
/// Check whether interval contains at most two floating point numbers.
bool isCanonical() const;
/// Check whether the lower bound is the opposite of the upper bound.
bool isSymmetric() const;
/// Check whether interval \a i is a subset of this interval.
bool contains(const IntervalWrapper& i) const;
/// Check whether real \a r is a member of this interval.
bool contains(const Real& r) const;
/// Check whether interval \a i is a proper subset of this interval.
bool strictlyContains(const IntervalWrapper& i) const;
/// Check whether this interval and interval \a i are disjoint.
bool isDisjoint (const IntervalWrapper& i) const;
/// Assignment operator
IntervalWrapper& operator=(const IntervalWrapper& s);
/// Addition with assignment.
IntervalWrapper& operator+=(const IntervalWrapper& i);
/// Subtraction with assignment.
IntervalWrapper& operator-=(const IntervalWrapper& i);
/// Multiplication with assignment.
IntervalWrapper& operator*=(const IntervalWrapper& i);
/// Division with assignment.
IntervalWrapper& operator/=(const IntervalWrapper& i);
/// Intersection with assignment.
IntervalWrapper& operator&=(const IntervalWrapper& i);
/// Hull of the union with assignment.
IntervalWrapper& operator|=(const IntervalWrapper& i);
/// Unary plus (identity).
const IntervalWrapper& operator+() const;
/// Symmetric.
IntervalWrapper operator-() const;
/// Inverse.
IntervalWrapper inverse() const;
/// Returns an empty interval.
static IntervalWrapper empty();
/// Returns the interval [-inf,+inf].
static IntervalWrapper whole();
/// Bisects interval using median().
std::pair<IntervalWrapper,IntervalWrapper> bisect() const;
/// Bisects interval using median() and returns lower interval.
IntervalWrapper getLowerHalf() const;
/// Bisects interval using median() and returns upper interval.
IntervalWrapper getUpperHalf() const;
/// Set output format
static void setOutputFormat(const OutputFormat& format);
/// Get output format
static OutputFormat getOutputFormat();
/// Set output precision
static void setOutputPrecision(unsigned int precision);
/// Get output precision
static unsigned int getOutputPrecision();
/// Return wrapped interval
Impl& getImpl();
/// Return wrapped interval
const Impl& getImpl() const;
private:
friend struct Detail::StaticInitializer;
/// Used to initialize wrapped library
static void initLib();
Impl impl;
static OutputFormat outputFormat;
static unsigned int outputPrecision;
};
// default output format
template<class Impl>
typename IntervalWrapper<Impl>::OutputFormat IntervalWrapper<Impl>::
outputFormat = IntervalWrapper<Impl>::bounds;
// default output precision
template<class Impl>
unsigned int IntervalWrapper<Impl>::outputPrecision =
std::numeric_limits<Real>::digits10;
template<class Impl>
void IntervalWrapper<Impl>::setOutputFormat(const OutputFormat& format)
{
outputFormat = format;
}
template<class Impl>
typename IntervalWrapper<Impl>::OutputFormat IntervalWrapper<Impl>::
getOutputFormat()
{
return outputFormat;
}
template<class Impl>
void IntervalWrapper<Impl>::setOutputPrecision(unsigned int precision)
{
outputPrecision = precision;
}
template<class Impl>
unsigned int IntervalWrapper<Impl>::getOutputPrecision()
{
return outputPrecision;
}
template<class Impl>
Impl& IntervalWrapper<Impl>::getImpl()
{ return impl; }
template<class Impl>
const Impl& IntervalWrapper<Impl>::getImpl() const
{ return impl; }
std::ostream& outputLb(std::ostream& os, const Real& lb,
unsigned int precision);
std::ostream& outputUb(std::ostream& os, const Real& ub,
unsigned int precision);
template<class Impl>
std::ostream& outputBounds(std::ostream& os, const IntervalWrapper<Impl>& i)
{
assert(not i.isEmpty() and not i.isSingleton());
os << "[";
outputLb(os,i.getLb(),IntervalWrapper<Impl>::getOutputPrecision());
os << ", ";
outputUb(os,i.getUb(),IntervalWrapper<Impl>::getOutputPrecision());
os << "]";
return os;
}
template<class Impl>
std::ostream& outputWidth(std::ostream& os, const IntervalWrapper<Impl>& i)
{
//round_nearest();
assert(not i.isEmpty() and not i.isSingleton());
os.precision(IntervalWrapper<Impl>::getOutputPrecision());
os << i.getMedian() << " (+/- " << i.getWidth()/2.0 << ")";
return os;
}
template<class Impl>
std::ostream& outputPrefix(std::ostream& os, const IntervalWrapper<Impl>& i)
{
assert(not i.isEmpty() and not i.isSingleton());
std::ostringstream oslb,osub;
oslb.precision(IntervalWrapper<Impl>::getOutputPrecision());
osub.precision(IntervalWrapper<Impl>::getOutputPrecision());
Rounding rnd;
rnd.setDownward();
oslb << i.getLb();
std::string slb = oslb.str();
rnd.setUpward();
osub << i.getUb();
std::string sub = osub.str();
auto mit = std::mismatch(slb.begin(),slb.end(),sub.begin());
os.precision(IntervalWrapper<Impl>::getOutputPrecision());
std::copy(slb.begin(),mit.first,std::ostream_iterator<char>(os));
os << "[";
std::copy(mit.first,slb.end(),std::ostream_iterator<char>(os));
os << ", ";
std::copy(mit.second,sub.end(),std::ostream_iterator<char>(os));
os << "]";
return os;
}
template<class Impl>
std::ostream& operator<<(std::ostream& os, const IntervalWrapper<Impl>& i)
{
if (i.isEmpty())
os << "[]";
else
if (i.isSingleton())
os << i.getLb();
else
{
switch (IntervalWrapper<Impl>::getOutputFormat())
{
case IntervalWrapper<Impl>::bounds:
return outputBounds(os,i);
case IntervalWrapper<Impl>::width:
return outputWidth(os,i);
case IntervalWrapper<Impl>::prefix:
return outputPrefix(os,i);
}
}
return os;
}
} // Detail
} // Procure
#if PROCURE_HAVE_GAOL
#include <procure/kernel/interval/interval_gaol.hpp>
#endif
#if PROCURE_HAVE_BOOSTINTERVAL
#include <procure/kernel/interval/interval_boost.hpp>
#endif
#if PROCURE_HAVE_PROFIL
#include <procure/kernel/interval/interval_profil.hpp>
#endif
namespace Procure {
typedef Procure::Detail::IntervalWrapper<
#if defined(PROCURE_INTERVAL_USE_GAOL)
Procure::Detail::GaolInterval
#elif defined(PROCURE_INTERVAL_USE_BOOST)
Procure::Detail::BoostInterval
#elif defined(PROCURE_INTERVAL_USE_PROFIL)
Procure::Detail::ProfilInterval
#endif
> Interval;
} // Procure
#endif /* PROCURE_KERNEL_INTERVAL_HPP_ */
| apache-2.0 |
gstevey/gradle | subprojects/build-cache/src/main/java/org/gradle/caching/internal/controller/BuildCacheController.java | 1100 | /*
* Copyright 2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.caching.internal.controller;
import org.gradle.caching.BuildCacheService;
import javax.annotation.Nullable;
import java.io.Closeable;
/**
* Internal coordinator of build cache operations.
*
* Wraps user {@link BuildCacheService} implementations.
*/
public interface BuildCacheController extends Closeable {
@Nullable
<T> T load(BuildCacheLoadCommand<T> command);
void store(BuildCacheStoreCommand command);
@Override
void close();
}
| apache-2.0 |
google-research/rigl | rigl/experimental/jax/models/mnist_cnn_test.py | 2067 | # coding=utf-8
# Copyright 2022 RigL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for weight_symmetry.models.mnist_cnn."""
from absl.testing import absltest
import flax
import jax
import jax.numpy as jnp
from rigl.experimental.jax.models import mnist_cnn
class MNISTCNNTest(absltest.TestCase):
"""Tests the MNISTCNN model."""
def setUp(self):
super().setUp()
self._rng = jax.random.PRNGKey(42)
self._num_classes = 10
self._batch_size = 2
self._input_shape = ((self._batch_size, 28, 28, 1), jnp.float32)
self._input = jnp.zeros(*self._input_shape)
def test_output_shapes(self):
"""Tests the output shapes of the model."""
with flax.deprecated.nn.stateful() as initial_state:
_, initial_params = mnist_cnn.MNISTCNN.init_by_shape(
self._rng, (self._input_shape,), num_classes=self._num_classes)
model = flax.deprecated.nn.Model(mnist_cnn.MNISTCNN, initial_params)
with flax.deprecated.nn.stateful(initial_state, mutable=False):
logits = model(self._input, num_classes=self._num_classes, train=False)
self.assertTupleEqual(logits.shape, (self._batch_size, self._num_classes))
def test_invalid_depth(self):
"""Tests model mask with the incorrect depth for the given model."""
with self.assertRaisesRegex(ValueError, 'Input spatial size, '):
mnist_cnn.MNISTCNN.init_by_shape(
self._rng, (self._input_shape,),
num_classes=self._num_classes,
filters=10 * (32,))
if __name__ == '__main__':
absltest.main()
| apache-2.0 |
sindicate/solidstack.old | src/solidstack/script/operators/Member.java | 5309 | /*--
* Copyright 2012 René M. de Bloois
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package solidstack.script.operators;
import java.lang.reflect.InvocationTargetException;
import java.util.Map;
import solidstack.lang.Assert;
import solidstack.script.JavaException;
import solidstack.script.Returning;
import solidstack.script.ThreadContext;
import solidstack.script.ThrowException;
import solidstack.script.UndefinedPropertyException;
import solidstack.script.expressions.Expression;
import solidstack.script.expressions.Identifier;
import solidstack.script.java.Java;
import solidstack.script.java.MissingFieldException;
import solidstack.script.objects.Type;
import solidstack.script.objects.Util;
import solidstack.script.scopes.Scope;
import solidstack.script.scopes.ScopeException;
import solidstack.script.scopes.UndefinedException;
import funny.Symbol;
public class Member extends Operator
{
public Member( String name, Expression left, Expression right )
{
super( name, left, right );
}
public Object evaluate( ThreadContext thread )
{
try
{
Object left = this.left.evaluate( thread );
Assert.isInstanceOf( this.right, Identifier.class );
Symbol right = ( (Identifier)this.right ).getSymbol();
if( left == null )
// TODO Use the Java exception hierarchy
throw new ThrowException( "null reference: member: " + right.toString(), thread.cloneStack( getLocation() ) );
if( left instanceof Scope ) // TODO This is part of the OO we want
{
Scope scope = (Scope)left;
try
{
return scope.get( right );
}
catch( UndefinedException e )
{
throw new UndefinedPropertyException( right.toString(), thread.cloneStack( getLocation() ) );
}
}
if( left instanceof Map )
return ( (Map)left ).get( right.toString() );
try
{
if( left instanceof Type )
return Java.getStatic( ( (Type)left ).theClass(), right.toString() );
return Java.get( left, right.toString() );
}
catch( InvocationTargetException e )
{
Throwable t = e.getCause();
if( t instanceof Returning )
throw (Returning)t;
throw new JavaException( t, thread.cloneStack( getLocation() ) );
}
catch( MissingFieldException e )
{
throw new ThrowException( e.getMessage(), thread.cloneStack( getLocation() ) );
}
}
catch( ScopeException e )
{
throw new ThrowException( e.getMessage(), thread.cloneStack( getLocation() ) );
}
}
public Object assign( ThreadContext thread, Object value )
{
Object object = this.left.evaluate( thread );
Symbol symbol = ( (Identifier)this.right ).getSymbol();
if( object instanceof Map )
{
( (Map)object ).put( symbol.toString(), value );
return value;
}
if( object instanceof Scope )
{
( (Scope)object ).set( symbol, value );
return value;
}
try
{
if( object instanceof Type )
Java.setStatic( ( (Type)object ).theClass(), symbol.toString(), value );
else
Java.set( object, symbol.toString(), value );
return value;
}
catch( InvocationTargetException e )
{
Throwable t = e.getCause();
if( t instanceof Returning )
throw (Returning)t;
throw new JavaException( t, thread.cloneStack( getLocation() ) );
}
catch( Returning e )
{
throw e;
}
catch( Exception e )
{
throw new ThrowException( e.getMessage() != null ? e.getMessage() : e.toString(), thread.cloneStack( getLocation() ) );
// throw new JavaException( e, thread.cloneStack( getLocation() ) ); // TODO Debug flag or something?
}
}
public Object apply( ThreadContext thread, Object[] pars )
{
Object object = this.left.evaluate( thread );
Symbol symbol = ( (Identifier)this.right ).getSymbol();
if( object instanceof Scope ) // TODO And Map?
{
try
{
return ( (Scope)object ).apply( symbol, pars );
}
catch( UndefinedException e )
{
throw new UndefinedPropertyException( symbol.toString(), thread.cloneStack() );
}
}
pars = Util.toJavaParameters( pars );
try
{
if( object instanceof Type )
return Java.invokeStatic( ( (Type)object ).theClass(), symbol.toString(), pars );
return Java.invoke( object, symbol.toString(), pars );
}
catch( InvocationTargetException e )
{
Throwable t = e.getCause();
if( t instanceof Returning )
throw (Returning)t;
throw new JavaException( t, thread.cloneStack( getLocation() ) );
}
catch( Returning e )
{
throw e;
}
catch( Exception e )
{
throw new ThrowException( e.getMessage() != null ? e.getMessage() : e.toString(), thread.cloneStack( getLocation() ) );
// throw new JavaException( e, thread.cloneStack( getLocation() ) ); // TODO Debug flag or something?
}
}
public Object apply( ThreadContext thread, Map args )
{
throw new UnsupportedOperationException();
}
}
| apache-2.0 |
AlexeyLA0509/PSTTrader | src/ProSecuritiesTrading.MOEX.FIX/Base/Field/CFICode.cs | 1042 | using System;
namespace ProSecuritiesTrading.MOEX.FIX.Base.Field
{
public class CFICode
{
public const int Tag = 461;
public static readonly byte[] TagBytes;
public static readonly byte[] CFICodeMRCXXXWithSOH;
static CFICode()
{
TagBytes = new byte[3];
TagBytes[0] = 52;
TagBytes[1] = 54;
TagBytes[2] = 49;
CFICodeMRCXXXWithSOH = new byte[11];
CFICodeMRCXXXWithSOH[0] = 52; // 4
CFICodeMRCXXXWithSOH[1] = 54; // 6
CFICodeMRCXXXWithSOH[2] = 49; // 1
CFICodeMRCXXXWithSOH[3] = 61; // =
CFICodeMRCXXXWithSOH[4] = 77; // M
CFICodeMRCXXXWithSOH[5] = 82; // R
CFICodeMRCXXXWithSOH[6] = 67; // C
CFICodeMRCXXXWithSOH[7] = 88; // X
CFICodeMRCXXXWithSOH[8] = 88; // X
CFICodeMRCXXXWithSOH[9] = 88; // X
CFICodeMRCXXXWithSOH[10] = ProSecuritiesTrading.MOEX.FIX.Base.Message.Messages.SOH;
}
}
}
| apache-2.0 |
yorkhci/chartex | application/views/chartex/document_vis_with_links.php | 3113 | <?php
////////////////////////////////////////////////////////
//DOCUMENT VISUALISATION
//RECEIVES THE DATA FOR THE DOCUMENT VISUALISATION
//ENCODES IT IN JSON
//PREPARES THE STRUCTURE FOR THE DOCUMENT VISUALISATION
//NOT CURRENTLY USED
////////////////////////////////////////////////////////
echo heading('Transactions Visualisation', 3);
echo '<p>Below is a visualisation of the transactions that are described in this document:</p>';
$current_trans = '';
$current_rel = '';
$current_subject = '';
$current_related = '';
foreach($document_vis as $row) {
//If it is a transaction entity...
if ($row['entity_type_id'] == 12) {
$tree_root['id'] = 'Vis'.$row['document_id'].'~Document~'.$row['document_id'];
$tree_root['name'] = "Document";
$tree_root['children'] = array();
break;
}
}
foreach($document_vis as $row) {
//If it is a transaction...
if ($row['entity_type_id'] == 12) {
//If it is a new transaction...
if ($row['entity_id'] != $current_trans) {
//Create a new transaction branch, hang it off the document root, and reset the relation branch variable
$current_trans = $row['entity_id'];
$trans_array['id'] = 'Vis'.$row['document_id'].'~Transaction~'.$row['dm_entity_id'].'~'.$row['entity_id'];
$trans_array['name'] = $row['object_name'];
$trans_array['children'] = array();
$tree_root['children'][] = $trans_array;
$current_rel = '';
}
//If it is a new relationship type...
if ($row['relation_id'] != $current_rel) {
//Create a new relation branch, hang it off the transaction branch, and reset the subject branch variable
$current_rel = $row['relation_id'];
$rel_array['id'] = 'Vis'.$row['document_id'].'~Relationship~'.$row['relation_id'];
$rel_array['name'] = $row['relation_name'];
$rel_array['children'] = array();
end($tree_root['children']);
$relation_key = key($tree_root['children']);
$tree_root['children'][$relation_key]['children'][] = $rel_array;
$current_subject = '';
}
//If it is a new subject...
if ($row['subject_entity_id'] != $current_subject) {
//Create a new subject branch, hang it off the relation branch, and reset the related branch variable
$current_subject = $row['subject_entity_id'];
$subj_array['id'] = 'Vis'.$row['document_id'].'~'.$row['subject_type_name'].'~'.$row['dm_entity_id'].'~'.$row['subject_entity_id'];
$subj_array['name'] = $row['subject_name'];
$subj_array['children'] = array();
end($tree_root['children'][$relation_key]['children']);
$subj_key = key($tree_root['children'][$relation_key]['children']);
$tree_root['children'][$relation_key]['children'][$subj_key]['children'][] = $subj_array;
$current_related = '';
}
}
}
$document_json = json_encode($tree_root);
echo '<div id="document_log_'.$row['document_id'].'" class="vis_log"></div>';
echo '<div id="document_vis_'.$row['document_id'].'" class="vis_container"></div>';
echo '<script>';
echo 'document_json['.$row['document_id'].'] = '.$document_json.';';
echo '</script>';
?> | apache-2.0 |
aglne/shifu | src/main/java/ml/shifu/shifu/core/dvarsel/AbstractMasterConductor.java | 1500 | /**
* Copyright [2012-2014] PayPal Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ml.shifu.shifu.core.dvarsel;
import ml.shifu.shifu.container.obj.ColumnConfig;
import ml.shifu.shifu.container.obj.ModelConfig;
import ml.shifu.shifu.core.dvarsel.VarSelWorkerResult;
import java.util.List;
/**
* Created on 11/24/2014.
*/
public abstract class AbstractMasterConductor {
protected ModelConfig modelConfig;
protected List<ColumnConfig> columnConfigList;
public AbstractMasterConductor(ModelConfig modelConfig, List<ColumnConfig> columnConfigList) {
this.modelConfig = modelConfig;
this.columnConfigList = columnConfigList;
}
public abstract int getEstimateIterationCnt();
public abstract boolean isToStop();
public abstract List<CandidateSeed> getNextWorkingSet();
public abstract void consumeWorkerResults(Iterable<VarSelWorkerResult> workerResults);
public abstract CandidateSeed voteBestSeed();
}
| apache-2.0 |
mylovemaliang/newBmAndroid | app/src/main/java/cn/fuyoushuo/fqbb/view/flagment/SilentLoginTbFragment.java | 5504 | package cn.fuyoushuo.fqbb.view.flagment;
import android.app.Activity;
import android.os.Build;
import android.os.Bundle;
import android.text.TextUtils;
import android.util.Log;
import android.webkit.CookieManager;
import android.webkit.WebChromeClient;
import android.webkit.WebSettings;
import android.webkit.WebView;
import com.github.lzyzsd.jsbridge.BridgeUtil;
import com.github.lzyzsd.jsbridge.BridgeWebView;
import com.github.lzyzsd.jsbridge.BridgeWebViewClient;
import com.github.lzyzsd.jsbridge.CallBackFunction;
import com.trello.rxlifecycle.components.support.RxFragment;
import cn.fuyoushuo.fqbb.commonlib.utils.LoginInfoStore;
import cn.fuyoushuo.fqbb.presenter.impl.TaobaoInterPresenter;
/**
* Created by QA on 2016/12/14.
*/
public class SilentLoginTbFragment extends RxFragment{
BridgeWebView myWebView;
private boolean isDetched = true;
public static final String TAOBAOKE_LOGIN_URL = "http://login.taobao.com/member/login.jhtml?style=common&from=alimama&redirectURL=http%3A%2F%2Flogin.taobao.com%2Fmember%2Ftaobaoke%2Flogin.htm%3Fis_login%3d1&full_redirect=true&disableQuickLogin=true&qq-pf-to=pcqq.discussion";
public static SilentLoginTbFragment newInstance() {
Bundle args = new Bundle();
SilentLoginTbFragment fragment = new SilentLoginTbFragment();
fragment.setArguments(args);
return fragment;
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
myWebView = new BridgeWebView(getActivity());
myWebView.getSettings().setJavaScriptEnabled(true);
//myWebView.getSettings().setBuiltInZoomControls(true);//是否显示缩放按钮,默认false
myWebView.getSettings().setSupportZoom(true);//是否可以缩放,默认true
myWebView.getSettings().setDomStorageEnabled(true);
myWebView.getSettings().setAllowFileAccess(true);
myWebView.getSettings().setUseWideViewPort(true);// 设置此属性,可任意比例缩放。大视图模式
myWebView.getSettings().setLoadWithOverviewMode(true);// 和setUseWideViewPort(true)一起解决网页自适应问题
myWebView.requestFocusFromTouch();
myWebView.setWebChromeClient(new WebChromeClient());
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP){
myWebView.getSettings().setMixedContentMode(WebSettings.MIXED_CONTENT_ALWAYS_ALLOW);
}
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP)
CookieManager.getInstance().setAcceptThirdPartyCookies(myWebView, true);
if(Build.VERSION.SDK_INT <= 18){
myWebView.getSettings().setSavePassword(false);
}
myWebView.setWebViewClient(new BridgeWebViewClient(myWebView){
@Override
public boolean shouldOverrideUrlLoading(WebView view, String url) {
if(!TextUtils.isEmpty(url) && url.equals(TAOBAOKE_LOGIN_URL)){
view.loadUrl(url);
return true;
}
return super.shouldOverrideUrlLoading(view,url);
}
@Override
public void onPageFinished(final WebView view, String url) {
super.onPageFinished(view,url);
if(url.startsWith("http://www.alimama.com/index.htm")
|| url.startsWith("http://media.alimama.com/account/overview.htm")
|| url.startsWith("http://media.alimama.com/account/account.htm")){
Log.i("autoLogin","aready autoLogin");
// 已登录
//保存淘宝登录的COOKIE
TaobaoInterPresenter.saveLoginCookie(url);
view.stopLoading();
}
//加载需要回调的JS
if(!url.equals("https://login.m.taobao.com/login.htm?_input_charset=utf-8")){
BridgeUtil.webViewLoadLocalJs(view,"autoLogin.js");
myWebView.postDelayed(new Runnable() {
@Override
public void run() {
myWebView.callHandler("autoLogin", LoginInfoStore.getIntance().getAliInfoJson(), new CallBackFunction() {
@Override
public void onCallBack(String data) {
}
});
}
},1000);
}else{
Log.i("autoLogin","login failed");
}
}});
}
//自动登录
public void autoLogin(){
if(!isDetched && myWebView != null){
myWebView.loadUrl(TAOBAOKE_LOGIN_URL);
}
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
isDetched = false;
}
@Override
public void onDetach() {
super.onDetach();
isDetched = true;
}
@Override
public void onStart() {
super.onStart();
}
@Override
public void onResume() {
super.onResume();
}
@Override
public void onPause() {
super.onPause();
}
@Override
public void onDestroy() {
super.onDestroy();
if(myWebView != null){
myWebView.removeAllViews();
myWebView.destroy();
}
}
//----------------------------------------总线事件--------------------------------------------------------
}
| apache-2.0 |
MRivas-XumaK/slingBuild | testing/resourceresolver-mock/src/main/java/org/apache/sling/testing/resourceresolver/MockResourceResolver.java | 11609 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.testing.resourceresolver;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.http.HttpServletRequest;
import org.apache.sling.api.SlingConstants;
import org.apache.sling.api.resource.LoginException;
import org.apache.sling.api.resource.PersistenceException;
import org.apache.sling.api.resource.Resource;
import org.apache.sling.api.resource.ResourceResolver;
import org.osgi.service.event.Event;
public class MockResourceResolver implements ResourceResolver {
private final Map<String, Map<String, Object>> resources;
private final Map<String, Map<String, Object>> temporaryResources = new LinkedHashMap<String, Map<String,Object>>();
private final Set<String> deletedResources = new HashSet<String>();
private final MockResourceResolverFactoryOptions options;
public MockResourceResolver(final MockResourceResolverFactoryOptions options,
final Map<String, Map<String, Object>> resources) {
this.options = options;
this.resources = resources;
}
@Override
public <AdapterType> AdapterType adaptTo(Class<AdapterType> type) {
return null;
}
@Override
public Resource resolve(final HttpServletRequest request, final String absPath) {
return this.getResource(absPath);
}
@Override
public Resource resolve(final String absPath) {
return this.getResource(absPath);
}
@Override
@Deprecated
public Resource resolve(final HttpServletRequest request) {
return null;
}
@Override
public String map(final String resourcePath) {
return resourcePath;
}
@Override
public String map(final HttpServletRequest request, final String resourcePath) {
return resourcePath;
}
@Override
public Resource getResource(final String path) {
if ( path.startsWith("/") ) {
if ( this.deletedResources.contains(path) ) {
return null;
}
final Map<String, Object> tempProps = this.temporaryResources.get(path);
if ( tempProps != null ) {
final Resource rsrc = new MockResource(path, tempProps, this);
return rsrc;
}
synchronized ( this.resources ) {
final Map<String, Object> props = this.resources.get(path);
if ( props != null ) {
final Resource rsrc = new MockResource(path, props, this);
return rsrc;
}
}
} else {
for(final String s : this.getSearchPath() ) {
final Resource rsrc = this.getResource(s + '/' + path);
if ( rsrc != null ) {
return rsrc;
}
}
}
return null;
}
@Override
public Resource getResource(Resource base, String path) {
if ( path == null || path.length() == 0 ) {
path = "/";
}
if ( path.startsWith("/") ) {
return getResource(path);
}
if ( base.getPath().equals("/") ) {
return getResource(base.getPath() + path);
}
return getResource(base.getPath() + '/' + path);
}
@Override
public String[] getSearchPath() {
return this.options.getSearchPaths();
}
@Override
public Iterator<Resource> listChildren(final Resource parent) {
final String prefixPath = parent.getPath() + "/";
final Map<String, Map<String, Object>> candidates = new HashMap<String, Map<String,Object>>();
synchronized ( this.resources ) {
for(final Map.Entry<String, Map<String, Object>> e : this.resources.entrySet()) {
if (e.getKey().startsWith(prefixPath) && e.getKey().lastIndexOf('/') < prefixPath.length() ) {
if ( !this.deletedResources.contains(e.getKey()) ) {
candidates.put(e.getKey(), e.getValue());
}
}
}
for(final Map.Entry<String, Map<String, Object>> e : this.temporaryResources.entrySet()) {
if (e.getKey().startsWith(prefixPath) && e.getKey().lastIndexOf('/') < prefixPath.length() ) {
if ( !this.deletedResources.contains(e.getKey()) ) {
candidates.put(e.getKey(), e.getValue());
}
}
}
}
final List<Resource> children = new ArrayList<Resource>();
for(final Map.Entry<String, Map<String, Object>> e : candidates.entrySet()) {
children.add(new MockResource(e.getKey(), e.getValue(), this));
}
return children.iterator();
}
@Override
public Iterable<Resource> getChildren(final Resource parent) {
return new Iterable<Resource>() {
@Override
public Iterator<Resource> iterator() {
return listChildren(parent);
}
};
}
@Override
public Iterator<Resource> findResources(final String query, final String language) {
final List<Resource> emptyList = Collections.emptyList();
return emptyList.iterator();
}
@Override
public Iterator<Map<String, Object>> queryResources(String query,
String language) {
final List<Map<String, Object>> emptyList = Collections.emptyList();
return emptyList.iterator();
}
@Override
public ResourceResolver clone(Map<String, Object> authenticationInfo)
throws LoginException {
// TODO Auto-generated method stub
return null;
}
@Override
public boolean isLive() {
return true;
}
@Override
public void close() {
// nothing to do
}
@Override
public String getUserID() {
// TODO Auto-generated method stub
return null;
}
@Override
public Iterator<String> getAttributeNames() {
final List<String> emptyList = Collections.emptyList();
return emptyList.iterator();
}
@Override
public Object getAttribute(final String name) {
return null;
}
@Override
public void delete(final Resource resource) throws PersistenceException {
this.deletedResources.add(resource.getPath());
this.temporaryResources.remove(resource.getPath());
final String prefixPath = resource.getPath() + '/';
synchronized ( this.resources ) {
for(final Map.Entry<String, Map<String, Object>> e : this.resources.entrySet()) {
if (e.getKey().startsWith(prefixPath)) {
this.deletedResources.add(e.getKey());
}
}
final Iterator<Map.Entry<String, Map<String, Object>>> i = this.temporaryResources.entrySet().iterator();
while ( i.hasNext() ) {
final Map.Entry<String, Map<String, Object>> e = i.next();
if (e.getKey().startsWith(prefixPath) ) {
i.remove();
}
}
}
}
@Override
public Resource create(Resource parent, String name,
Map<String, Object> properties) throws PersistenceException {
final String path = (parent.getPath().equals("/") ? parent.getPath() + name : parent.getPath() + '/' + name);
if ( this.temporaryResources.containsKey(path) ) {
throw new PersistenceException("Path already exists: " + path);
}
synchronized ( this.resources ) {
if ( this.resources.containsKey(path) ) {
throw new PersistenceException("Path already exists: " + path);
}
}
this.deletedResources.remove(path);
if ( properties == null ) {
properties = new HashMap<String, Object>();
}
this.temporaryResources.put(path, properties);
return new MockResource(path, properties, this);
}
@Override
public void revert() {
this.deletedResources.clear();
this.temporaryResources.clear();
}
@Override
public void commit() throws PersistenceException {
synchronized ( this.resources ) {
for(final String path : this.deletedResources ) {
if ( this.resources.remove(path) != null && this.options.getEventAdmin() != null ) {
final Map<String, Object> props = new HashMap<String, Object>();
props.put(SlingConstants.PROPERTY_PATH, path);
final Event e = new Event(SlingConstants.TOPIC_RESOURCE_REMOVED, props);
this.options.getEventAdmin().sendEvent(e);
}
this.temporaryResources.remove(path);
}
for(final String path : this.temporaryResources.keySet() ) {
final boolean changed = this.resources.containsKey(path);
this.resources.put(path, this.temporaryResources.get(path));
if ( this.options.getEventAdmin() != null ) {
final Map<String, Object> props = new HashMap<String, Object>();
props.put(SlingConstants.PROPERTY_PATH, path);
if ( this.resources.get(path).get(ResourceResolver.PROPERTY_RESOURCE_TYPE) != null ) {
props.put(SlingConstants.PROPERTY_RESOURCE_TYPE, this.resources.get(path).get(ResourceResolver.PROPERTY_RESOURCE_TYPE));
}
final Event e = new Event(changed ? SlingConstants.TOPIC_RESOURCE_CHANGED : SlingConstants.TOPIC_RESOURCE_ADDED, props);
this.options.getEventAdmin().sendEvent(e);
}
}
}
this.revert();
}
@Override
public boolean hasChanges() {
return this.temporaryResources.size() > 0 || this.deletedResources.size() > 0;
}
@Override
public String getParentResourceType(Resource resource) {
return null;
}
@Override
public String getParentResourceType(String resourceType) {
return null;
}
@Override
public boolean isResourceType(Resource resource, String resourceType) {
return resource.getResourceType().equals(resourceType);
}
@Override
public void refresh() {
// nothing to do
}
public void addChanged(final String path, final Map<String, Object> props) {
this.temporaryResources.put(path, props);
}
@Override
public boolean hasChildren(Resource resource) {
return this.listChildren(resource).hasNext();
}
}
| apache-2.0 |
FreemapSlovakia/freemap-v3-nodejs-backend | src/routers/deviceTracking/getAllDevicesHandler.ts | 615 | import Router from '@koa/router';
import { SQL } from 'sql-template-strings';
import { pool } from '../../database';
import { acceptValidator } from '../../requestValidators';
import { authenticator } from '../../authenticator';
export function attachGetAllDevicesHandler(router: Router) {
router.get(
'/devices',
acceptValidator('application/json'),
authenticator(true),
async (ctx) => {
ctx.body = await pool.query(SQL`
SELECT id, name, token, createdAt, maxCount, maxAge, userId
FROM trackingDevice
WHERE userId = ${ctx.state.user.id}
`);
},
);
}
| apache-2.0 |
daxiong0226/sunny | src/main/java/cn/dx/sunny/dao/RoleMapper.java | 1004 | package cn.dx.sunny.dao;
import cn.dx.sunny.domain.model.Role;
import cn.dx.sunny.domain.vo.FilterParam;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.Map;
@Component("RoleMapper")
public interface RoleMapper {
int deleteByPrimaryKey(Integer id);
int insert(Role record);
int insertSelective(Role record);
Role selectByPrimaryKey(Integer id);
int updateByPrimaryKeySelective(Role record);
int updateByPrimaryKey(Role record);
/**
* 通过用户id 获取所属角色
* @param userID
* @return
*/
List<Role> selectRoleListByUserID(int userID);
/**
*
* @param filter
* @return
*/
List<Map> selectRoleByBootstrapTable(FilterParam filter);
/**
* 通过role_code 获取role
* @param role_code
* @return
*/
List<Role> selectRoleByRoleCode(String role_code);
/**
* 查询所有角色
* @return
*/
List<Role> selectAllRole();
} | apache-2.0 |
turbokongen/home-assistant | homeassistant/components/homeassistant/triggers/numeric_state.py | 5879 | """Offer numeric state listening automation rules."""
import logging
import voluptuous as vol
from homeassistant import exceptions
from homeassistant.const import (
CONF_ABOVE,
CONF_ATTRIBUTE,
CONF_BELOW,
CONF_ENTITY_ID,
CONF_FOR,
CONF_PLATFORM,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import CALLBACK_TYPE, HassJob, callback
from homeassistant.helpers import condition, config_validation as cv, template
from homeassistant.helpers.event import (
async_track_same_state,
async_track_state_change_event,
)
# mypy: allow-incomplete-defs, allow-untyped-calls, allow-untyped-defs
# mypy: no-check-untyped-defs
def validate_above_below(value):
"""Validate that above and below can co-exist."""
above = value.get(CONF_ABOVE)
below = value.get(CONF_BELOW)
if above is None or below is None:
return value
if isinstance(above, str) or isinstance(below, str):
return value
if above > below:
raise vol.Invalid(
f"A value can never be above {above} and below {below} at the same time. You probably want two different triggers.",
)
return value
TRIGGER_SCHEMA = vol.All(
vol.Schema(
{
vol.Required(CONF_PLATFORM): "numeric_state",
vol.Required(CONF_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_BELOW): cv.NUMERIC_STATE_THRESHOLD_SCHEMA,
vol.Optional(CONF_ABOVE): cv.NUMERIC_STATE_THRESHOLD_SCHEMA,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
vol.Optional(CONF_FOR): cv.positive_time_period_template,
vol.Optional(CONF_ATTRIBUTE): cv.match_all,
}
),
cv.has_at_least_one_key(CONF_BELOW, CONF_ABOVE),
validate_above_below,
)
_LOGGER = logging.getLogger(__name__)
async def async_attach_trigger(
hass, config, action, automation_info, *, platform_type="numeric_state"
) -> CALLBACK_TYPE:
"""Listen for state changes based on configuration."""
entity_ids = config.get(CONF_ENTITY_ID)
below = config.get(CONF_BELOW)
above = config.get(CONF_ABOVE)
time_delta = config.get(CONF_FOR)
template.attach(hass, time_delta)
value_template = config.get(CONF_VALUE_TEMPLATE)
unsub_track_same = {}
entities_triggered = set()
period: dict = {}
attribute = config.get(CONF_ATTRIBUTE)
job = HassJob(action)
_variables = {}
if automation_info:
_variables = automation_info.get("variables") or {}
if value_template is not None:
value_template.hass = hass
def variables(entity_id):
"""Return a dict with trigger variables."""
trigger_info = {
"trigger": {
"platform": "numeric_state",
"entity_id": entity_id,
"below": below,
"above": above,
"attribute": attribute,
}
}
return {**_variables, **trigger_info}
@callback
def check_numeric_state(entity_id, from_s, to_s):
"""Return True if criteria are now met."""
try:
return condition.async_numeric_state(
hass,
to_s,
below,
above,
value_template,
variables(entity_id),
attribute,
)
except exceptions.ConditionError as err:
_LOGGER.warning("%s", err)
return False
@callback
def state_automation_listener(event):
"""Listen for state changes and calls action."""
entity_id = event.data.get("entity_id")
from_s = event.data.get("old_state")
to_s = event.data.get("new_state")
@callback
def call_action():
"""Call action with right context."""
hass.async_run_hass_job(
job,
{
"trigger": {
"platform": platform_type,
"entity_id": entity_id,
"below": below,
"above": above,
"from_state": from_s,
"to_state": to_s,
"for": time_delta if not time_delta else period[entity_id],
"description": f"numeric state of {entity_id}",
}
},
to_s.context,
)
matching = check_numeric_state(entity_id, from_s, to_s)
if not matching:
entities_triggered.discard(entity_id)
elif entity_id not in entities_triggered:
entities_triggered.add(entity_id)
if time_delta:
try:
period[entity_id] = cv.positive_time_period(
template.render_complex(time_delta, variables(entity_id))
)
except (exceptions.TemplateError, vol.Invalid) as ex:
_LOGGER.error(
"Error rendering '%s' for template: %s",
automation_info["name"],
ex,
)
entities_triggered.discard(entity_id)
return
unsub_track_same[entity_id] = async_track_same_state(
hass,
period[entity_id],
call_action,
entity_ids=entity_id,
async_check_same_func=check_numeric_state,
)
else:
call_action()
unsub = async_track_state_change_event(hass, entity_ids, state_automation_listener)
@callback
def async_remove():
"""Remove state listeners async."""
unsub()
for async_remove in unsub_track_same.values():
async_remove()
unsub_track_same.clear()
return async_remove
| apache-2.0 |
hhj0325/pystock | com/hhj/pystock/snakecoin/block.py | 535 | import hashlib as hasher
class Block:
def __init__(self, index, timestamp, data, previous_hash):
self.index = index
self.timestamp = timestamp
self.data = data
self.previous_hash = previous_hash
self.hash = self.hash_block()
def hash_block(self):
sha = hasher.sha256()
sha.update((str(self.index) +
str(self.timestamp) +
str(self.data) +
str(self.previous_hash)).encode('utf-8'))
return sha.hexdigest()
| apache-2.0 |
read-write-web/react-foaf | js/lib/rsvp-latest.js | 41290 | (function(globals) {
var define, requireModule, require, requirejs;
(function() {
var registry = {}, seen = {};
define = function(name, deps, callback) {
registry[name] = { deps: deps, callback: callback };
};
requirejs = require = requireModule = function(name) {
requirejs._eak_seen = registry;
if (seen[name]) { return seen[name]; }
seen[name] = {};
if (!registry[name]) {
throw new Error("Could not find module " + name);
}
var mod = registry[name],
deps = mod.deps,
callback = mod.callback,
reified = [],
exports;
for (var i=0, l=deps.length; i<l; i++) {
if (deps[i] === 'exports') {
reified.push(exports = {});
} else {
reified.push(requireModule(resolve(deps[i])));
}
}
var value = callback.apply(this, reified);
return seen[name] = exports || value;
function resolve(child) {
if (child.charAt(0) !== '.') { return child; }
var parts = child.split("/");
var parentBase = name.split("/").slice(0, -1);
for (var i=0, l=parts.length; i<l; i++) {
var part = parts[i];
if (part === '..') { parentBase.pop(); }
else if (part === '.') { continue; }
else { parentBase.push(part); }
}
return parentBase.join("/");
}
};
})();
define("rsvp/all",
["./promise","./utils","exports"],
function(__dependency1__, __dependency2__, __exports__) {
"use strict";
/* global toString */
var Promise = __dependency1__.Promise;
var isArray = __dependency2__.isArray;
var isFunction = __dependency2__.isFunction;
/**
Returns a promise that is fulfilled when all the given promises have been
fulfilled, or rejected if any of them become rejected. The return promise
is fulfilled with an array that gives all the values in the order they were
passed in the `promises` array argument.
Example:
```javascript
var promise1 = RSVP.resolve(1);
var promise2 = RSVP.resolve(2);
var promise3 = RSVP.resolve(3);
var promises = [ promise1, promise2, promise3 ];
RSVP.all(promises).then(function(array){
// The array here would be [ 1, 2, 3 ];
});
```
If any of the `promises` given to `RSVP.all` are rejected, the first promise
that is rejected will be given as an argument to the returned promises's
rejection handler. For example:
Example:
```javascript
var promise1 = RSVP.resolve(1);
var promise2 = RSVP.reject(new Error("2"));
var promise3 = RSVP.reject(new Error("3"));
var promises = [ promise1, promise2, promise3 ];
RSVP.all(promises).then(function(array){
// Code here never runs because there are rejected promises!
}, function(error) {
// error.message === "2"
});
```
@method all
@for RSVP
@param {Array} promises
@param {String} label
@return {Promise} promise that is fulfilled when all `promises` have been
fulfilled, or rejected if any of them become rejected.
*/
function all(promises, label) {
if (!isArray(promises)) {
throw new TypeError('You must pass an array to all.');
}
return new Promise(function(resolve, reject) {
var results = [], remaining = promises.length,
promise;
if (remaining === 0) {
resolve([]);
}
function resolver(index) {
return function(value) {
resolveAll(index, value);
};
}
function resolveAll(index, value) {
results[index] = value;
if (--remaining === 0) {
resolve(results);
}
}
for (var i = 0; i < promises.length; i++) {
promise = promises[i];
if (promise && isFunction(promise.then)) {
promise.then(resolver(i), reject, "RSVP: RSVP#all");
} else {
resolveAll(i, promise);
}
}
}, label);
}
__exports__.all = all;
});
define("rsvp/asap",
["exports"],
function(__exports__) {
"use strict";
var browserGlobal = (typeof window !== 'undefined') ? window : {};
var BrowserMutationObserver = browserGlobal.MutationObserver || browserGlobal.WebKitMutationObserver;
var local = (typeof global !== 'undefined') ? global : this;
// node
function useNextTick() {
return function() {
process.nextTick(flush);
};
}
function useMutationObserver() {
var observer = new BrowserMutationObserver(flush);
var element = document.createElement('div');
observer.observe(element, { attributes: true });
// Chrome Memory Leak: https://bugs.webkit.org/show_bug.cgi?id=93661
window.addEventListener('unload', function(){
observer.disconnect();
observer = null;
}, false);
return function() {
element.setAttribute('drainQueue', 'drainQueue');
};
}
function useSetTimeout() {
return function() {
local.setTimeout(flush, 1);
};
}
var queue = [];
function flush() {
for (var i = 0; i < queue.length; i++) {
var tuple = queue[i];
var callback = tuple[0], arg = tuple[1];
callback(arg);
}
queue = [];
}
var scheduleFlush;
// Decide what async method to use to triggering processing of queued callbacks:
if (typeof process !== 'undefined' && {}.toString.call(process) === '[object process]') {
scheduleFlush = useNextTick();
} else if (BrowserMutationObserver) {
scheduleFlush = useMutationObserver();
} else {
scheduleFlush = useSetTimeout();
}
function asap(callback, arg) {
var length = queue.push([callback, arg]);
if (length === 1) {
// If length is 1, that means that we need to schedule an async flush.
// If additional callbacks are queued before the queue is flushed, they
// will be processed by this flush that we are scheduling.
scheduleFlush();
}
}
__exports__.asap = asap;
});
define("rsvp/cast",
["exports"],
function(__exports__) {
"use strict";
/**
`RSVP.Promise.cast` returns the same promise if that promise shares a constructor
with the promise being casted.
Example:
```javascript
var promise = RSVP.resolve(1);
var casted = RSVP.Promise.cast(promise);
console.log(promise === casted); // true
```
In the case of a promise whose constructor does not match, it is assimilated.
The resulting promise will fulfill or reject based on the outcome of the
promise being casted.
In the case of a non-promise, a promise which will fulfill with that value is
returned.
Example:
```javascript
var value = 1; // could be a number, boolean, string, undefined...
var casted = RSVP.Promise.cast(value);
console.log(value === casted); // false
console.log(casted instanceof RSVP.Promise) // true
casted.then(function(val) {
val === value // => true
});
```
`RSVP.Promise.cast` is similar to `RSVP.resolve`, but `RSVP.Promise.cast` differs in the
following ways:
* `RSVP.Promise.cast` serves as a memory-efficient way of getting a promise, when you
have something that could either be a promise or a value. RSVP.resolve
will have the same effect but will create a new promise wrapper if the
argument is a promise.
* `RSVP.Promise.cast` is a way of casting incoming thenables or promise subclasses to
promises of the exact class specified, so that the resulting object's `then` is
ensured to have the behavior of the constructor you are calling cast on (i.e., RSVP.Promise).
@method cast
@for RSVP
@param {Object} object to be casted
@return {Promise} promise that is fulfilled when all properties of `promises`
have been fulfilled, or rejected if any of them become rejected.
*/
function cast(object) {
/*jshint validthis:true */
if (object && typeof object === 'object' && object.constructor === this) {
return object;
}
var Promise = this;
return new Promise(function(resolve) {
resolve(object);
});
}
__exports__.cast = cast;
});
define("rsvp/config",
["./events","exports"],
function(__dependency1__, __exports__) {
"use strict";
var EventTarget = __dependency1__.EventTarget;
var config = {
instrument: false
};
EventTarget.mixin(config);
function configure(name, value) {
if (name === 'onerror') {
// handle for legacy users that expect the actual
// error to be passed to their function added via
// `RSVP.configure('onerror', someFunctionHere);`
config.on('error', value);
return;
}
if (arguments.length === 2) {
config[name] = value;
} else {
return config[name];
}
}
__exports__.config = config;
__exports__.configure = configure;
});
define("rsvp/defer",
["./promise","exports"],
function(__dependency1__, __exports__) {
"use strict";
var Promise = __dependency1__.Promise;
/**
`RSVP.defer` returns an object similar to jQuery's `$.Deferred` objects.
`RSVP.defer` should be used when porting over code reliant on `$.Deferred`'s
interface. New code should use the `RSVP.Promise` constructor instead.
The object returned from `RSVP.defer` is a plain object with three properties:
* promise - an `RSVP.Promise`.
* reject - a function that causes the `promise` property on this object to
become rejected
* resolve - a function that causes the `promise` property on this object to
become fulfilled.
Example:
```javascript
var deferred = RSVP.defer();
deferred.resolve("Success!");
defered.promise.then(function(value){
// value here is "Success!"
});
```
@method defer
@for RSVP
@param {String} -
@return {Object}
*/
function defer(label) {
var deferred = {
// pre-allocate shape
resolve: undefined,
reject: undefined,
promise: undefined
};
deferred.promise = new Promise(function(resolve, reject) {
deferred.resolve = resolve;
deferred.reject = reject;
}, label);
return deferred;
}
__exports__.defer = defer;
});
define("rsvp/events",
["exports"],
function(__exports__) {
"use strict";
var indexOf = function(callbacks, callback) {
for (var i=0, l=callbacks.length; i<l; i++) {
if (callbacks[i] === callback) { return i; }
}
return -1;
};
var callbacksFor = function(object) {
var callbacks = object._promiseCallbacks;
if (!callbacks) {
callbacks = object._promiseCallbacks = {};
}
return callbacks;
};
/**
//@module RSVP
//@class EventTarget
*/
var EventTarget = {
/**
@private
`RSVP.EventTarget.mixin` extends an object with EventTarget methods. For
Example:
```javascript
var object = {};
RSVP.EventTarget.mixin(object);
object.on("finished", function(event) {
// handle event
});
object.trigger("finished", { detail: value });
```
`EventTarget.mixin` also works with prototypes:
```javascript
var Person = function() {};
RSVP.EventTarget.mixin(Person.prototype);
var yehuda = new Person();
var tom = new Person();
yehuda.on("poke", function(event) {
console.log("Yehuda says OW");
});
tom.on("poke", function(event) {
console.log("Tom says OW");
});
yehuda.trigger("poke");
tom.trigger("poke");
```
@method mixin
@param {Object} object object to extend with EventTarget methods
*/
mixin: function(object) {
object.on = this.on;
object.off = this.off;
object.trigger = this.trigger;
object._promiseCallbacks = undefined;
return object;
},
/**
@private
Registers a callback to be executed when `eventName` is triggered
```javascript
object.on('event', function(eventInfo){
// handle the event
});
object.trigger('event');
```
@method on
@param {String} eventName name of the event to listen for
@param {Function} callback function to be called when the event is triggered.
*/
on: function(eventName, callback) {
var allCallbacks = callbacksFor(this), callbacks;
callbacks = allCallbacks[eventName];
if (!callbacks) {
callbacks = allCallbacks[eventName] = [];
}
if (indexOf(callbacks, callback) === -1) {
callbacks.push(callback);
}
},
/**
@private
You can use `off` to stop firing a particular callback for an event:
```javascript
function doStuff() { // do stuff! }
object.on('stuff', doStuff);
object.trigger('stuff'); // doStuff will be called
// Unregister ONLY the doStuff callback
object.off('stuff', doStuff);
object.trigger('stuff'); // doStuff will NOT be called
```
If you don't pass a `callback` argument to `off`, ALL callbacks for the
event will not be executed when the event fires. For example:
```javascript
var callback1 = function(){};
var callback2 = function(){};
object.on('stuff', callback1);
object.on('stuff', callback2);
object.trigger('stuff'); // callback1 and callback2 will be executed.
object.off('stuff');
object.trigger('stuff'); // callback1 and callback2 will not be executed!
```
@method off
@param {String} eventName event to stop listening to
@param {Function} callback optional argument. If given, only the function
given will be removed from the event's callback queue. If no `callback`
argument is given, all callbacks will be removed from the event's callback
queue.
*/
off: function(eventName, callback) {
var allCallbacks = callbacksFor(this), callbacks, index;
if (!callback) {
allCallbacks[eventName] = [];
return;
}
callbacks = allCallbacks[eventName];
index = indexOf(callbacks, callback);
if (index !== -1) { callbacks.splice(index, 1); }
},
/**
@private
Use `trigger` to fire custom events. For example:
```javascript
object.on('foo', function(){
console.log('foo event happened!');
});
object.trigger('foo');
// 'foo event happened!' logged to the console
```
You can also pass a value as a second argument to `trigger` that will be
passed as an argument to all event listeners for the event:
```javascript
object.on('foo', function(value){
console.log(value.name);
});
object.trigger('foo', { name: 'bar' });
// 'bar' logged to the console
```
@method trigger
@param {String} eventName name of the event to be triggered
@param {Any} options optional value to be passed to any event handlers for
the given `eventName`
*/
trigger: function(eventName, options) {
var allCallbacks = callbacksFor(this),
callbacks, callbackTuple, callback, binding;
if (callbacks = allCallbacks[eventName]) {
// Don't cache the callbacks.length since it may grow
for (var i=0; i<callbacks.length; i++) {
callback = callbacks[i];
callback(options);
}
}
}
};
__exports__.EventTarget = EventTarget;
});
define("rsvp/hash",
["./promise","./utils","exports"],
function(__dependency1__, __dependency2__, __exports__) {
"use strict";
var Promise = __dependency1__.Promise;
var isFunction = __dependency2__.isFunction;
var keysOf = Object.keys || function(object) {
var result = [];
for (var prop in object) {
result.push(prop);
}
return result;
};
/**
`RSVP.hash` is similar to `RSVP.all`, but takes an object instead of an array
for its `promises` argument.
Returns a promise that is fulfilled when all the given promises have been
fulfilled, or rejected if any of them become rejected. The returned promise
is fulfilled with a hash that has the same key names as the `promises` object
argument. If any of the values in the object are not promises, they will
simply be copied over to the fulfilled object.
Example:
```javascript
var promises = {
myPromise: RSVP.resolve(1),
yourPromise: RSVP.resolve(2),
theirPromise: RSVP.resolve(3),
notAPromise: 4
};
RSVP.hash(promises).then(function(hash){
// hash here is an object that looks like:
// {
// myPromise: 1,
// yourPromise: 2,
// theirPromise: 3,
// notAPromise: 4
// }
});
````
If any of the `promises` given to `RSVP.hash` are rejected, the first promise
that is rejected will be given as as the first argument, or as the reason to
the rejection handler. For example:
```javascript
var promises = {
myPromise: RSVP.resolve(1),
rejectedPromise: RSVP.reject(new Error("rejectedPromise")),
anotherRejectedPromise: RSVP.reject(new Error("anotherRejectedPromise")),
};
RSVP.hash(promises).then(function(hash){
// Code here never runs because there are rejected promises!
}, function(reason) {
// reason.message === "rejectedPromise"
});
```
An important note: `RSVP.hash` is intended for plain JavaScript objects that
are just a set of keys and values. `RSVP.hash` will NOT preserve prototype
chains.
Example:
```javascript
function MyConstructor(){
this.example = RSVP.resolve("Example");
}
MyConstructor.prototype = {
protoProperty: RSVP.resolve("Proto Property")
};
var myObject = new MyConstructor();
RSVP.hash(myObject).then(function(hash){
// protoProperty will not be present, instead you will just have an
// object that looks like:
// {
// example: "Example"
// }
//
// hash.hasOwnProperty('protoProperty'); // false
// 'undefined' === typeof hash.protoProperty
});
```
@method hash
@for RSVP
@param {Object} promises
@param {String} label - optional string that describes the promise.
Useful for tooling.
@return {Promise} promise that is fulfilled when all properties of `promises`
have been fulfilled, or rejected if any of them become rejected.
*/
function hash(object, label) {
var results = {},
keys = keysOf(object),
remaining = keys.length;
return new Promise(function(resolve, reject){
var promise, prop;
if (remaining === 0) {
resolve({});
return;
}
var resolver = function(prop) {
return function(value) {
resolveAll(prop, value);
};
};
var resolveAll = function(prop, value) {
results[prop] = value;
if (--remaining === 0) {
resolve(results);
}
};
for (var i = 0, l = keys.length; i < l; i ++) {
prop = keys[i];
promise = object[prop];
if (promise && isFunction(promise.then)) {
promise.then(resolver(prop), reject, "RSVP: RSVP#hash");
} else {
resolveAll(prop, promise);
}
}
});
}
__exports__.hash = hash;
});
define("rsvp/instrument",
["./config","./utils","exports"],
function(__dependency1__, __dependency2__, __exports__) {
"use strict";
var config = __dependency1__.config;
var now = __dependency2__.now;
function instrument(eventName, promise, child) {
// instrumentation should not disrupt normal usage.
try {
config.trigger(eventName, {
guid: promise._guidKey + promise._id,
eventName: eventName,
detail: promise._detail,
childGuid: child && promise._guidKey + child._id,
label: promise._label,
timeStamp: now()
});
} catch(error) {
setTimeout(function(){
throw error;
}, 0);
}
}
__exports__.instrument = instrument;
});
define("rsvp/node",
["./promise","./all","exports"],
function(__dependency1__, __dependency2__, __exports__) {
"use strict";
var Promise = __dependency1__.Promise;
var all = __dependency2__.all;
var slice = Array.prototype.slice;
function makeNodeCallbackFor(resolve, reject) {
return function (error, value) {
if (error) {
reject(error);
} else if (arguments.length > 2) {
resolve(slice.call(arguments, 1));
} else {
resolve(value);
}
};
}
/**
`RSVP.denodeify` takes a "node-style" function and returns a function that
will return an `RSVP.Promise`. You can use `denodeify` in Node.js or the
browser when you'd prefer to use promises over using callbacks. For example,
`denodeify` transforms the following:
```javascript
var fs = require('fs');
fs.readFile('myfile.txt', function(err, data){
if (err) return handleError(err);
handleData(data);
});
```
into:
```javascript
var fs = require('fs');
var readFile = RSVP.denodeify(fs.readFile);
readFile('myfile.txt').then(handleData, handleError);
```
Using `denodeify` makes it easier to compose asynchronous operations instead
of using callbacks. For example, instead of:
```javascript
var fs = require('fs');
var log = require('some-async-logger');
fs.readFile('myfile.txt', function(err, data){
if (err) return handleError(err);
fs.writeFile('myfile2.txt', data, function(err){
if (err) throw err;
log('success', function(err) {
if (err) throw err;
});
});
});
```
You can chain the operations together using `then` from the returned promise:
```javascript
var fs = require('fs');
var denodeify = RSVP.denodeify;
var readFile = denodeify(fs.readFile);
var writeFile = denodeify(fs.writeFile);
var log = denodeify(require('some-async-logger'));
readFile('myfile.txt').then(function(data){
return writeFile('myfile2.txt', data);
}).then(function(){
return log('SUCCESS');
}).then(function(){
// success handler
}, function(reason){
// rejection handler
});
```
@method denodeify
@for RSVP
@param {Function} nodeFunc a "node-style" function that takes a callback as
its last argument. The callback expects an error to be passed as its first
argument (if an error occurred, otherwise null), and the value from the
operation as its second argument ("function(err, value){ }").
@param {Any} binding optional argument for binding the "this" value when
calling the `nodeFunc` function.
@return {Function} a function that wraps `nodeFunc` to return an
`RSVP.Promise`
*/
function denodeify(nodeFunc, binding) {
return function() {
var nodeArgs = slice.call(arguments), resolve, reject;
var thisArg = this || binding;
return new Promise(function(resolve, reject) {
all(nodeArgs).then(function(nodeArgs) {
try {
nodeArgs.push(makeNodeCallbackFor(resolve, reject));
nodeFunc.apply(thisArg, nodeArgs);
} catch(e) {
reject(e);
}
});
});
};
}
__exports__.denodeify = denodeify;
});
define("rsvp/promise",
["./config","./events","./cast","./instrument","./utils","exports"],
function(__dependency1__, __dependency2__, __dependency3__, __dependency4__, __dependency5__, __exports__) {
"use strict";
var config = __dependency1__.config;
var EventTarget = __dependency2__.EventTarget;
var cast = __dependency3__.cast;
var instrument = __dependency4__.instrument;
var objectOrFunction = __dependency5__.objectOrFunction;
var isFunction = __dependency5__.isFunction;
var now = __dependency5__.now;
var guidKey = 'rsvp_' + now() + '-';
var counter = 0;
function Promise(resolver, label) {
if (!isFunction(resolver)) {
throw new TypeError('You must pass a resolver function as the first argument to the promise constructor');
}
if (!(this instanceof Promise)) {
throw new TypeError("Failed to construct 'Promise': Please use the 'new' operator, this object constructor cannot be called as a function.");
}
this._id = counter++;
this._label = label;
this._subscribers = [];
if (config.instrument) {
instrument('created', this);
}
invokeResolver(resolver, this);
}
function invokeResolver(resolver, promise) {
function resolvePromise(value) {
resolve(promise, value);
}
function rejectPromise(reason) {
reject(promise, reason);
}
try {
resolver(resolvePromise, rejectPromise);
} catch(e) {
rejectPromise(e);
}
}
function invokeCallback(settled, promise, callback, detail) {
var hasCallback = isFunction(callback),
value, error, succeeded, failed;
if (hasCallback) {
try {
value = callback(detail);
succeeded = true;
} catch(e) {
failed = true;
error = e;
}
} else {
value = detail;
succeeded = true;
}
if (handleThenable(promise, value)) {
return;
} else if (hasCallback && succeeded) {
resolve(promise, value);
} else if (failed) {
reject(promise, error);
} else if (settled === FULFILLED) {
resolve(promise, value);
} else if (settled === REJECTED) {
reject(promise, value);
}
}
var PENDING = void 0;
var SEALED = 0;
var FULFILLED = 1;
var REJECTED = 2;
function subscribe(parent, child, onFulfillment, onRejection) {
var subscribers = parent._subscribers;
var length = subscribers.length;
subscribers[length] = child;
subscribers[length + FULFILLED] = onFulfillment;
subscribers[length + REJECTED] = onRejection;
}
function publish(promise, settled) {
var child, callback, subscribers = promise._subscribers, detail = promise._detail;
if (config.instrument) {
instrument(settled === FULFILLED ? 'fulfilled' : 'rejected', promise);
}
for (var i = 0; i < subscribers.length; i += 3) {
child = subscribers[i];
callback = subscribers[i + settled];
invokeCallback(settled, child, callback, detail);
}
promise._subscribers = null;
}
Promise.prototype = {
constructor: Promise,
_id: undefined,
_guidKey: guidKey,
_label: undefined,
_state: undefined,
_detail: undefined,
_subscribers: undefined,
_onerror: function (reason) {
config.trigger('error', reason);
},
then: function(onFulfillment, onRejection, label) {
var promise = this;
this._onerror = null;
var thenPromise = new this.constructor(function() {}, label);
if (this._state) {
var callbacks = arguments;
config.async(function invokePromiseCallback() {
invokeCallback(promise._state, thenPromise, callbacks[promise._state - 1], promise._detail);
});
} else {
subscribe(this, thenPromise, onFulfillment, onRejection);
}
if (config.instrument) {
instrument('chained', promise, thenPromise);
}
return thenPromise;
},
'catch': function(onRejection, label) {
return this.then(null, onRejection, label);
},
'finally': function(callback, label) {
var constructor = this.constructor;
return this.then(function(value) {
return constructor.cast(callback()).then(function(){
return value;
});
}, function(reason) {
return constructor.cast(callback()).then(function(){
throw reason;
});
}, label);
}
};
Promise.cast = cast;
function handleThenable(promise, value) {
var then = null,
resolved;
try {
if (promise === value) {
throw new TypeError("A promises callback cannot return that same promise.");
}
if (objectOrFunction(value)) {
then = value.then;
if (isFunction(then)) {
then.call(value, function(val) {
if (resolved) { return true; }
resolved = true;
if (value !== val) {
resolve(promise, val);
} else {
fulfill(promise, val);
}
}, function(val) {
if (resolved) { return true; }
resolved = true;
reject(promise, val);
}, 'Locked onto ' + (promise._label || ' unknown promise'));
return true;
}
}
} catch (error) {
if (resolved) { return true; }
reject(promise, error);
return true;
}
return false;
}
function resolve(promise, value) {
if (promise === value) {
fulfill(promise, value);
} else if (!handleThenable(promise, value)) {
fulfill(promise, value);
}
}
function fulfill(promise, value) {
if (promise._state !== PENDING) { return; }
promise._state = SEALED;
promise._detail = value;
config.async(publishFulfillment, promise);
}
function reject(promise, reason) {
if (promise._state !== PENDING) { return; }
promise._state = SEALED;
promise._detail = reason;
config.async(publishRejection, promise);
}
function publishFulfillment(promise) {
publish(promise, promise._state = FULFILLED);
}
function publishRejection(promise) {
if (promise._onerror) {
promise._onerror(promise._detail);
}
publish(promise, promise._state = REJECTED);
}
__exports__.Promise = Promise;
});
define("rsvp/race",
["./promise","./utils","exports"],
function(__dependency1__, __dependency2__, __exports__) {
"use strict";
/* global toString */
var Promise = __dependency1__.Promise;
var isArray = __dependency2__.isArray;
/**
`RSVP.race` allows you to watch a series of promises and act as soon as the
first promise given to the `promises` argument fulfills or rejects.
Example:
```javascript
var promise1 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve("promise 1");
}, 200);
});
var promise2 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve("promise 2");
}, 100);
});
RSVP.race([promise1, promise2]).then(function(result){
// result === "promise 2" because it was resolved before promise1
// was resolved.
});
```
`RSVP.race` is deterministic in that only the state of the first completed
promise matters. For example, even if other promises given to the `promises`
array argument are resolved, but the first completed promise has become
rejected before the other promises became fulfilled, the returned promise
will become rejected:
```javascript
var promise1 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
resolve("promise 1");
}, 200);
});
var promise2 = new RSVP.Promise(function(resolve, reject){
setTimeout(function(){
reject(new Error("promise 2"));
}, 100);
});
RSVP.race([promise1, promise2]).then(function(result){
// Code here never runs because there are rejected promises!
}, function(reason){
// reason.message === "promise2" because promise 2 became rejected before
// promise 1 became fulfilled
});
```
@method race
@for RSVP
@param {Array} promises array of promises to observe
@param {String} label optional string for describing the promise returned.
Useful for tooling.
@return {Promise} a promise that becomes fulfilled with the value the first
completed promises is resolved with if the first completed promise was
fulfilled, or rejected with the reason that the first completed promise
was rejected with.
*/
function race(promises, label) {
if (!isArray(promises)) {
throw new TypeError('You must pass an array to race.');
}
return new Promise(function(resolve, reject) {
var results = [], promise;
for (var i = 0; i < promises.length; i++) {
promise = promises[i];
if (promise && typeof promise.then === 'function') {
promise.then(resolve, reject, "RSVP: RSVP#race");
} else {
resolve(promise);
}
}
}, label);
}
__exports__.race = race;
});
define("rsvp/reject",
["./promise","exports"],
function(__dependency1__, __exports__) {
"use strict";
var Promise = __dependency1__.Promise;
/**
`RSVP.reject` returns a promise that will become rejected with the passed
`reason`. `RSVP.reject` is essentially shorthand for the following:
```javascript
var promise = new RSVP.Promise(function(resolve, reject){
reject(new Error('WHOOPS'));
});
promise.then(function(value){
// Code here doesn't run because the promise is rejected!
}, function(reason){
// reason.message === 'WHOOPS'
});
```
Instead of writing the above, your code now simply becomes the following:
```javascript
var promise = RSVP.reject(new Error('WHOOPS'));
promise.then(function(value){
// Code here doesn't run because the promise is rejected!
}, function(reason){
// reason.message === 'WHOOPS'
});
```
@method reject
@for RSVP
@param {Any} reason value that the returned promise will be rejected with.
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise that will become rejected with the given
`reason`.
*/
function reject(reason, label) {
return new Promise(function (resolve, reject) {
reject(reason);
}, label);
}
__exports__.reject = reject;
});
define("rsvp/resolve",
["./promise","exports"],
function(__dependency1__, __exports__) {
"use strict";
var Promise = __dependency1__.Promise;
/**
`RSVP.resolve` returns a promise that will become fulfilled with the passed
`value`. `RSVP.resolve` is essentially shorthand for the following:
```javascript
var promise = new RSVP.Promise(function(resolve, reject){
resolve(1);
});
promise.then(function(value){
// value === 1
});
```
Instead of writing the above, your code now simply becomes the following:
```javascript
var promise = RSVP.resolve(1);
promise.then(function(value){
// value === 1
});
```
@method resolve
@for RSVP
@param {Any} value value that the returned promise will be resolved with
@param {String} label optional string for identifying the returned promise.
Useful for tooling.
@return {Promise} a promise that will become fulfilled with the given
`value`
*/
function resolve(value, label) {
return new Promise(function(resolve, reject) {
resolve(value);
}, label);
}
__exports__.resolve = resolve;
});
define("rsvp/rethrow",
["exports"],
function(__exports__) {
"use strict";
var local = (typeof global === "undefined") ? this : global;
/**
`RSVP.rethrow` will rethrow an error on the next turn of the JavaScript event
loop in order to aid debugging.
Promises A+ specifies that any exceptions that occur with a promise must be
caught by the promises implementation and bubbled to the last handler. For
this reason, it is recommended that you always specify a second rejection
handler function to `then`. However, `RSVP.rethrow` will throw the exception
outside of the promise, so it bubbles up to your console if in the browser,
or domain/cause uncaught exception in Node. `rethrow` will throw the error
again so the error can be handled by the promise.
```javascript
function throws(){
throw new Error('Whoops!');
}
var promise = new RSVP.Promise(function(resolve, reject){
throws();
});
promise.fail(RSVP.rethrow).then(function(){
// Code here doesn't run because the promise became rejected due to an
// error!
}, function (err){
// handle the error here
});
```
The 'Whoops' error will be thrown on the next turn of the event loop
and you can watch for it in your console. You can also handle it using a
rejection handler given to `.then` or `.fail` on the returned promise.
@method rethrow
@for RSVP
@param {Error} reason reason the promise became rejected.
@throws Error
*/
function rethrow(reason) {
local.setTimeout(function() {
throw reason;
});
throw reason;
}
__exports__.rethrow = rethrow;
});
define("rsvp/utils",
["exports"],
function(__exports__) {
"use strict";
function objectOrFunction(x) {
return isFunction(x) || (typeof x === "object" && x !== null);
}
function isFunction(x) {
return typeof x === "function";
}
function isArray(x) {
return Object.prototype.toString.call(x) === "[object Array]";
}
// Date.now is not available in browsers < IE9
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/now#Compatibility
var now = Date.now || function() { return new Date().getTime(); };
__exports__.objectOrFunction = objectOrFunction;
__exports__.isFunction = isFunction;
__exports__.isArray = isArray;
__exports__.now = now;
});
define("rsvp",
["./rsvp/events","./rsvp/promise","./rsvp/node","./rsvp/all","./rsvp/race","./rsvp/hash","./rsvp/rethrow","./rsvp/defer","./rsvp/config","./rsvp/resolve","./rsvp/reject","./rsvp/asap","exports"],
function(__dependency1__, __dependency2__, __dependency3__, __dependency4__, __dependency5__, __dependency6__, __dependency7__, __dependency8__, __dependency9__, __dependency10__, __dependency11__, __dependency12__, __exports__) {
"use strict";
var EventTarget = __dependency1__.EventTarget;
var Promise = __dependency2__.Promise;
var denodeify = __dependency3__.denodeify;
var all = __dependency4__.all;
var race = __dependency5__.race;
var hash = __dependency6__.hash;
var rethrow = __dependency7__.rethrow;
var defer = __dependency8__.defer;
var config = __dependency9__.config;
var configure = __dependency9__.configure;
var resolve = __dependency10__.resolve;
var reject = __dependency11__.reject;
var asap = __dependency12__.asap;
config.async = asap; // default async is asap;
function async(callback, arg) {
config.async(callback, arg);
}
function on() {
config.on.apply(config, arguments);
}
function off() {
config.off.apply(config, arguments);
}
__exports__.Promise = Promise;
__exports__.EventTarget = EventTarget;
__exports__.all = all;
__exports__.race = race;
__exports__.hash = hash;
__exports__.rethrow = rethrow;
__exports__.defer = defer;
__exports__.denodeify = denodeify;
__exports__.configure = configure;
__exports__.on = on;
__exports__.off = off;
__exports__.resolve = resolve;
__exports__.reject = reject;
__exports__.async = async;
});
window.RSVP = requireModule('rsvp');
}(window)); | apache-2.0 |
annarev/tensorflow | tensorflow/compiler/xla/service/dynamic_dimension_inference.cc | 73525 | /* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/compiler/xla/service/dynamic_dimension_inference.h"
#include <vector>
#include "absl/container/flat_hash_map.h"
#include "absl/strings/match.h"
#include "tensorflow/compiler/xla/literal_util.h"
#include "tensorflow/compiler/xla/service/dfs_hlo_visitor_with_default.h"
#include "tensorflow/compiler/xla/service/dynamic_window_utils.h"
#include "tensorflow/compiler/xla/service/hlo_casting_utils.h"
#include "tensorflow/compiler/xla/service/hlo_computation.h"
#include "tensorflow/compiler/xla/service/hlo_instruction.h"
#include "tensorflow/compiler/xla/service/hlo_instructions.h"
#include "tensorflow/compiler/xla/service/hlo_module.h"
#include "tensorflow/compiler/xla/service/tuple_util.h"
#include "tensorflow/compiler/xla/service/while_util.h"
#include "tensorflow/compiler/xla/shape_tree.h"
#include "tensorflow/compiler/xla/shape_util.h"
#include "tensorflow/compiler/xla/status_macros.h"
#include "tensorflow/compiler/xla/util.h"
#include "tensorflow/compiler/xla/window_util.h"
namespace xla {
namespace {
// Replace `narrow_comp` with a new computation with `wide_shape` as input.
StatusOr<HloComputation*> WidenComputation(HloComputation* narrow_comp,
const Shape& wide_shape) {
TF_RET_CHECK(wide_shape.IsTuple());
const Shape& narrow_shape = narrow_comp->parameter_instruction(0)->shape();
if (Shape::Equal()(wide_shape, narrow_shape)) {
// No need to widen the computation.
return narrow_comp;
}
HloComputation* wide_comp = [&]() {
HloComputation::Builder builder(absl::StrCat("wide.", narrow_comp->name()));
builder.AddInstruction(
HloInstruction::CreateParameter(0, wide_shape, "wide_param"));
return narrow_comp->parent()->AddEmbeddedComputation(builder.Build());
}();
HloInstruction* wide_parameter = wide_comp->parameter_instruction(0);
HloInstruction* truncated_parameter = TupleUtil::ExtractPrefix(
wide_parameter, narrow_shape.tuple_shapes_size());
HloInstruction* call_narrow_comp = wide_comp->AddInstruction(
HloInstruction::CreateCall(narrow_comp->root_instruction()->shape(),
{truncated_parameter}, narrow_comp));
wide_comp->set_root_instruction(call_narrow_comp,
/*accept_different_shape=*/true);
TF_RETURN_IF_ERROR(CallInliner::Inline(call_narrow_comp).status());
return wide_comp;
}
} // namespace
class DynamicDimensionInferenceVisitor : public DfsHloVisitorWithDefault {
public:
explicit DynamicDimensionInferenceVisitor(
const DynamicParameterBinding& param_bindings,
DynamicDimensionInference* parent,
DynamicDimensionInference::CustomCallInferenceHandler custom_call_handler)
: param_bindings_(param_bindings),
parent_(parent),
custom_call_handler_(std::move(custom_call_handler)) {}
Status DefaultAction(HloInstruction* hlo) override;
static Status Run(HloComputation* computation,
const DynamicParameterBinding& param_bindings,
DynamicDimensionInference* parent,
DynamicDimensionInference::CustomCallInferenceHandler
custom_call_handler = nullptr) {
DynamicDimensionInferenceVisitor visitor(param_bindings, parent,
std::move(custom_call_handler));
return computation->Accept(&visitor);
}
Status HandleParameter(HloInstruction* hlo) override;
Status HandleReduce(HloInstruction* hlo) override;
Status HandleDot(HloInstruction* hlo) override;
Status HandleTuple(HloInstruction* hlo) override;
Status HandleTranspose(HloInstruction* hlo) override;
Status HandleDynamicReshape(HloInstruction* hlo) override;
Status HandleReshape(HloInstruction* hlo) override;
Status HandleSort(HloInstruction* hlo) override;
Status HandlePad(HloInstruction* hlo) override;
Status HandleCustomCall(HloInstruction* hlo) override;
Status HandleBroadcast(HloInstruction* hlo) override;
Status HandleGetDimensionSize(HloInstruction* hlo) override;
Status HandleSetDimensionSize(HloInstruction* hlo) override;
Status HandleSelect(HloInstruction* hlo) override;
Status HandleConvolution(HloInstruction* hlo) override;
Status HandleConcatenate(HloInstruction* hlo) override;
Status HandleReduceWindow(HloInstruction* hlo) override;
Status HandleReverse(HloInstruction* hlo) override;
Status HandleSelectAndScatter(HloInstruction* hlo) override;
Status HandleGetTupleElement(HloInstruction* hlo) override;
Status HandleElementwiseUnary(HloInstruction* hlo) override;
Status HandleElementwiseBinary(HloInstruction* hlo) override;
Status HandleClamp(HloInstruction* hlo) override;
Status HandleConditional(HloInstruction* hlo) override;
Status HandleWhile(HloInstruction* hlo) override;
Status HandleSlice(HloInstruction* hlo) override;
Status HandleDynamicSlice(HloInstruction* hlo) override;
Status HandleDynamicUpdateSlice(HloInstruction* hlo) override;
Status HandleGather(HloInstruction* hlo) override;
Status HandleScatter(HloInstruction* hlo) override;
Status HandleDomain(HloInstruction* hlo) override;
private:
using OperandDynamicDimensionFn = std::function<Status(
HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size)>;
using DynamicDimensionFn = std::function<Status(
ShapeIndex index, int64 dimension, HloInstruction* dynamic_size)>;
Status HandleDynamicConvolutionForward(HloInstruction* hlo,
int64 operand_index, int64 dimension,
HloInstruction* dynamic_size);
Status HandleDynamicConvolutionKernelGrad(HloInstruction* hlo,
int64 operand_index,
int64 dimension);
Status HandleDynamicConvolutionInputGrad(HloInstruction* hlo,
int64 operand_index,
int64 dimension);
Status HandleDynamicWindowSamePadding(HloInstruction* hlo,
HloInstruction* dynamic_size,
int64 operand_index, int64 dimension);
Status ForEachOperandDynamicDimension(HloInstruction* inst,
const OperandDynamicDimensionFn&);
Status ForEachDynamicDimensionInOperand(HloInstruction* inst,
int64 operand_index,
const OperandDynamicDimensionFn&);
Status ForEachDynamicDimension(HloInstruction* inst,
const DynamicDimensionFn& fn);
// Pass through a dynamic dimension from the input to the output with the
// same value and index in the shape. This is a helper function to handle
// trivial instructions like elementwise operations.
Status PassThroughDynamicDimension(HloInstruction*);
// The dynamic parameter bindings of this computation.
const DynamicParameterBinding& param_bindings_;
// A pointer to DynamicDimensionInference, used to update the dynamic mapping.
DynamicDimensionInference* parent_;
// A handler for custom calls.
DynamicDimensionInference::CustomCallInferenceHandler custom_call_handler_;
};
Status DynamicDimensionInferenceVisitor::DefaultAction(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
return UnimplementedStrCat(
"Asked to propagate a dynamic dimension from hlo ", operand->name(),
"@", index.ToString(), "@", dimension, " to hlo ", hlo->ToString(),
", which is not implemented.");
});
}
Status DynamicDimensionInferenceVisitor::HandleGetTupleElement(
HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
if (hlo->tuple_index() == index[0]) {
ShapeIndex new_index =
ShapeIndexView(index).ConsumeFront().ToShapeIndex();
parent_->SetDynamicSize(hlo, new_index, dimension, dynamic_size);
}
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleTuple(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction*, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
index.push_front(operand_index);
parent_->SetDynamicSize(hlo, index, dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleBroadcast(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
int64 broadcast_dim = hlo->dimensions(dimension);
parent_->SetDynamicSize(hlo, {}, broadcast_dim, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleCustomCall(HloInstruction* hlo) {
if (hlo->custom_call_target() == "PadToStatic") {
for (int64 i = 0; i < hlo->operand(0)->shape().rank(); ++i) {
if (hlo->operand(0)->shape().is_dynamic_dimension(i)) {
HloInstruction* dynamic_size =
hlo->parent()->AddInstruction(HloInstruction::CreateGetTupleElement(
ShapeUtil::MakeScalarShape(S32), hlo, i + 1));
// PadToStatic converts a dynamic dimension to static dimension. It then
// returns the padded data output and the dynamic sizes of input
// dimensions.
ShapeIndex data_output = {0};
parent_->SetDynamicSize(hlo, data_output, i, dynamic_size);
}
}
return Status::OK();
}
if (custom_call_handler_) {
return custom_call_handler_(hlo, parent_);
}
if (hlo->custom_call_target() == "DynamicConvolutionForward") {
// If input feature is dynamic and kernel feature is static, we can infer
// that input feature is also static.
// E.g.,:
// lhs = [B, X, Y, ?]
// rhs = [X, Y, I, O]
// dim_labels = b01f_01io
// We can infer that the dynamic dimension in rhs is static I.
const ConvolutionDimensionNumbers& dnums =
hlo->convolution_dimension_numbers();
HloInstruction* input_feature = parent_->GetDynamicSize(
hlo->mutable_operand(0), {}, dnums.input_feature_dimension());
HloInstruction* kernel_feature = parent_->GetDynamicSize(
hlo->mutable_operand(1), {}, dnums.kernel_input_feature_dimension());
if (input_feature != nullptr && kernel_feature == nullptr) {
if (hlo->mutable_operand(0)->shape().dimensions(
dnums.input_feature_dimension()) ==
hlo->mutable_operand(1)->shape().dimensions(
dnums.kernel_input_feature_dimension()))
parent_->SetDynamicSize(hlo->mutable_operand(0), {},
dnums.input_feature_dimension(), nullptr);
}
}
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
// Resize custom call should propagate dynamic batch (0) and channel (3)
// dimensions.
if (hlo->custom_call_target() == "SliceToDynamic" ||
hlo->custom_call_target() == "Sharding" ||
(absl::StartsWith(hlo->custom_call_target(), "Resize") &&
(dimension == 0 || dimension == 3))) {
parent_->SetDynamicSize(hlo, {}, dimension, dynamic_size);
return Status::OK();
}
if (hlo->custom_call_target() == "DynamicReduceWindowSamePadding") {
if (hlo->operand_count() > 2) {
return Unimplemented(
"DynamicReduceWindowSamePadding doesn't support variadic "
"reduce window %s",
hlo->ToString());
}
return HandleDynamicWindowSamePadding(hlo, dynamic_size,
operand_index, dimension);
}
if (hlo->custom_call_target() == "DynamicSelectAndScatterSamePadding") {
if (operand_index == 1) {
// Operand 0 (input) determines dynamic output size. We ignore the
// dynamic size in the operand 1 (output gradient).
return Status::OK();
}
parent_->SetDynamicSize(hlo, {}, dimension, dynamic_size);
return Status::OK();
}
if (hlo->custom_call_target() == "DynamicConvolutionInputGrad") {
return HandleDynamicConvolutionInputGrad(hlo, operand_index,
dimension);
}
if (hlo->custom_call_target() == "DynamicConvolutionKernelGrad") {
return HandleDynamicConvolutionKernelGrad(hlo, operand_index,
dimension);
}
if (hlo->custom_call_target() == "DynamicConvolutionForward") {
return HandleDynamicConvolutionForward(hlo, operand_index, dimension,
dynamic_size);
}
return Unimplemented(
"CustomCall \"%s\" is not supported to have a dynamic dimension",
hlo->custom_call_target());
});
}
Status DynamicDimensionInferenceVisitor::HandleSort(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo,
[&](HloInstruction* operand, ShapeIndex index, int64 dynamic_dimension,
int64 operand_index, HloInstruction* dynamic_size) {
HloSortInstruction* sort = Cast<HloSortInstruction>(hlo);
if (sort->values_count() == 0) {
parent_->SetDynamicSize(hlo, {}, dynamic_dimension, dynamic_size);
} else {
parent_->SetDynamicSize(hlo, {operand_index}, dynamic_dimension,
dynamic_size);
}
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandlePad(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
if (operand_index != 0) {
return Unimplemented(
"Dynamic dimension on padding value is not supported");
}
const PaddingConfig_PaddingConfigDimension& padding_config =
hlo->padding_config().dimensions(dimension);
HloInstruction* dynamic_size_adjusted = dynamic_size;
if (padding_config.interior_padding() != 0) {
// Adjust for interior padding :
// Size' = max((Size - 1), 0) * interior_padding + Size
HloInstruction* one = hlo->parent()->AddInstruction(
HloInstruction::CreateConstant(LiteralUtil::CreateR0<int32>(1)));
HloInstruction* zero = hlo->parent()->AddInstruction(
HloInstruction::CreateConstant(LiteralUtil::CreateR0<int32>(0)));
HloInstruction* interior_padding = hlo->parent()->AddInstruction(
HloInstruction::CreateConstant(LiteralUtil::CreateR0<int32>(
padding_config.interior_padding())));
dynamic_size_adjusted =
hlo->parent()->AddInstruction(HloInstruction::CreateBinary(
dynamic_size_adjusted->shape(), HloOpcode::kSubtract,
dynamic_size_adjusted, one));
dynamic_size_adjusted =
hlo->parent()->AddInstruction(HloInstruction::CreateBinary(
dynamic_size_adjusted->shape(), HloOpcode::kMaximum,
dynamic_size_adjusted, zero));
dynamic_size_adjusted =
hlo->parent()->AddInstruction(HloInstruction::CreateBinary(
dynamic_size_adjusted->shape(), HloOpcode::kMultiply,
dynamic_size_adjusted, interior_padding));
dynamic_size_adjusted =
hlo->parent()->AddInstruction(HloInstruction::CreateBinary(
dynamic_size_adjusted->shape(), HloOpcode::kAdd,
dynamic_size_adjusted, dynamic_size));
}
HloInstruction* adjustment = hlo->parent()->AddInstruction(
HloInstruction::CreateConstant(LiteralUtil::CreateR0<int32>(
padding_config.edge_padding_low() +
padding_config.edge_padding_high())));
dynamic_size_adjusted =
hlo->parent()->AddInstruction(HloInstruction::CreateBinary(
dynamic_size_adjusted->shape(), HloOpcode::kAdd,
dynamic_size_adjusted, adjustment));
parent_->SetDynamicSize(hlo, {}, dimension, dynamic_size_adjusted);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleReduce(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
HloInstruction* reduce = hlo;
int64 operand_count = reduce->operand_count();
bool is_variadic_reduce = operand_count > 2;
CHECK_EQ(operand_count % 2, 0);
if (operand_index >= operand_count / 2) {
// Init values doesn't have dynamic size.
return Status::OK();
}
if ((absl::c_count(reduce->dimensions(), dimension) != 0)) {
// Dimension is to be reduced, stop tracing.
return Status::OK();
}
// Find out the new dynamic dimension after reduce.
int64 dimensions_not_reduced_count = 0;
for (int i = 0; i < operand->shape().rank(); ++i) {
if (dimension == i) {
ShapeIndex result_index = {};
if (is_variadic_reduce) {
// The dimensions of all data operands of a variadic reduce have
// to be the same. This means that if one operand of variadic
// reduce has a dynamic dimension, we set all outputs to use the
// same dynamic size in corresponding dimensions.
for (int64 i = 0; i < operand_count / 2; ++i) {
parent_->SetDynamicSize(
reduce, {i}, dimensions_not_reduced_count, dynamic_size);
}
} else {
parent_->SetDynamicSize(reduce, {}, dimensions_not_reduced_count,
dynamic_size);
}
return Status::OK();
}
if (absl::c_count(reduce->dimensions(), i) == 0) {
dimensions_not_reduced_count++;
}
}
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleDot(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex operand_shape_index,
int64 operand_dimension, int64 operand_index,
HloInstruction* dynamic_size) {
// There are three types of dimensions in a dot:
// A. batch dims
// B. contracting dims
// C. non-batch non-contracting dims.
// The output dimensions of a dot has three parts with the following
// order:
// [(type A), (lhs type C), (rhs type C)]
//
// Note that both lhs and rhs have the same dimension sizes for batch,
// but the dimension index could be different.
//
// Given one dynamic input dimension, either lhs or rhs, we use a
// mapping to find the corresponding output dimension.
HloInstruction* dot = hlo;
const DotDimensionNumbers& dimension_numbers =
dot->dot_dimension_numbers();
// A map from the operand dimensions to result dimension.
absl::flat_hash_map<int64, int64> result_dim_mapping;
int64 current_result_dims = 0;
bool lhs = operand_index == 0;
// The first loop keep tracks of batch dimension. RHS and LHS could have
// different batch dimension numbers.
if (lhs) {
for (int64 i : dimension_numbers.lhs_batch_dimensions()) {
result_dim_mapping[i] = current_result_dims++;
}
} else {
for (int64 i : dimension_numbers.rhs_batch_dimensions()) {
result_dim_mapping[i] = current_result_dims++;
}
}
// Handle dimensions in the lhs.
for (int64 i = 0; i < dot->operand(0)->shape().rank(); i++) {
// Look for non-contracting and non-batching dimension.
if (absl::c_linear_search(
dimension_numbers.lhs_contracting_dimensions(), i)) {
continue;
}
if (absl::c_linear_search(dimension_numbers.lhs_batch_dimensions(),
i)) {
continue;
}
if (lhs) {
result_dim_mapping[i] = current_result_dims;
}
current_result_dims++;
}
// Handle dimensions in the rhs.
for (int64 i = 0; i < dot->operand(1)->shape().rank(); i++) {
// Look for non-contracting and non-batching dimension.
if (absl::c_linear_search(
dimension_numbers.rhs_contracting_dimensions(), i)) {
continue;
}
if (absl::c_linear_search(dimension_numbers.rhs_batch_dimensions(),
i)) {
continue;
}
if (!lhs) {
result_dim_mapping[i] = current_result_dims;
}
current_result_dims++;
}
// Check if the operand dim is in the result shape. If so, add another
// work item to trace that dimension.
auto iter = result_dim_mapping.find(operand_dimension);
if (iter != result_dim_mapping.end()) {
parent_->SetDynamicSize(dot, {}, iter->second, dynamic_size);
}
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleTranspose(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo,
[&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) -> Status {
int64 permuted_dim = -1;
for (int64 i = 0; i < hlo->dimensions().size(); ++i) {
if (hlo->dimensions()[i] == dimension) {
TF_RET_CHECK(permuted_dim == -1);
permuted_dim = i;
}
}
parent_->SetDynamicSize(hlo, {}, permuted_dim, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleConvolution(
HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
HloInstruction* conv = hlo;
const ConvolutionDimensionNumbers& dimension_numbers =
conv->convolution_dimension_numbers();
if (operand_index == 0) {
if (dimension == dimension_numbers.input_batch_dimension()) {
parent_->SetDynamicSize(conv, {},
dimension_numbers.output_batch_dimension(),
dynamic_size);
return Status::OK();
}
if (dimension == dimension_numbers.input_feature_dimension()) {
return Status::OK();
}
} else {
if (dimension == dimension_numbers.kernel_input_feature_dimension()) {
return Status::OK();
}
}
return Unimplemented("Dynamic Spatial Convolution is not supported: %s",
conv->ToString());
});
}
Status DynamicDimensionInferenceVisitor::HandleConcatenate(
HloInstruction* hlo) {
// First handle concatenate dimensions. We do this by iterating through all
// operands while tracking both dynamic and static dimensions.
// static_size is used to keep track of the concated size of static
// dimensions.
int64 static_size = 0;
std::vector<HloInstruction*> dynamic_concat_dims;
for (int64 i = 0; i < hlo->operand_count(); ++i) {
HloInstruction* dynamic_size = parent_->GetDynamicSize(
hlo->mutable_operand(i), {}, hlo->concatenate_dimension());
if (dynamic_size == nullptr) {
// This is a static dimension.
static_size +=
hlo->operand(i)->shape().dimensions(hlo->concatenate_dimension());
} else {
dynamic_concat_dims.push_back(dynamic_size);
}
}
// If concat dimension is dynamic, calculate its size by summing up static
// dims and dynamic dims together.
if (!dynamic_concat_dims.empty()) {
HloInstruction* dim_size_total =
hlo->parent()->AddInstruction(HloInstruction::CreateConstant(
LiteralUtil::CreateR0<int32>(static_size)));
for (HloInstruction* dynamic_dim : dynamic_concat_dims) {
dim_size_total = hlo->parent()->AddInstruction(
HloInstruction::CreateBinary(dim_size_total->shape(), HloOpcode::kAdd,
dim_size_total, dynamic_dim));
}
parent_->SetDynamicSize(hlo, {}, hlo->concatenate_dimension(),
dim_size_total);
}
// Simply pass through non-concat dynamic dimensions.
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
int64 concatenate_dimension = hlo->concatenate_dimension();
if (concatenate_dimension == dimension) {
return Status::OK();
}
parent_->SetDynamicSize(hlo, index, dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleGetDimensionSize(
HloInstruction*) {
// Dynamic dimension doesn't propagate through GetDimensionSize:
//
// Input: F32[x, y, z]
// |
// GetDimensionSize(1): S32[]
//
// The returned value is a scalar, which doesn't have any dynamic dimension in
// the shape (although the value contains the real size of the dynamic
// dimension of the input).
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::HandleSetDimensionSize(
HloInstruction* hlo) {
bool dimension_is_static = false;
const HloInstruction* size = hlo->operand(1);
if (size->opcode() == HloOpcode::kConstant) {
// Check if we are setting a dimension size to its static size. If so,
// removes the dynamic dimension.
//
// size = s32[] constant(5)
// s32[2, 5] = set-dimension-size(s32[2,<=5]{1,0} %param, s32[] %size),
// dimensions={1}
// The result shape has no dynamic dimension.
TF_RET_CHECK(size->shape().rank() == 0);
if (size->literal().Get<int32>({}) ==
hlo->shape().dimensions(hlo->dimension())) {
dimension_is_static = true;
}
}
if (!dimension_is_static) {
// Propagate dynamic dimension indicated by this set dimension size
// instruction.
parent_->SetDynamicSize(hlo, {}, hlo->dimension(), hlo->mutable_operand(1));
}
// Also Propagate dynamic dimension already set by operands.
TF_RETURN_IF_ERROR(ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
if (dimension != hlo->dimension()) {
parent_->SetDynamicSize(hlo, index, dimension, dynamic_size);
}
return Status::OK();
}));
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::HandleDynamicConvolutionForward(
HloInstruction* hlo, int64 operand_index, int64 dimension,
HloInstruction* dynamic_size) {
TF_RET_CHECK(operand_index == 0);
const ConvolutionDimensionNumbers& dimension_numbers =
hlo->convolution_dimension_numbers();
if (dimension == dimension_numbers.input_batch_dimension()) {
// Batch dimension is propagated without any changes.
parent_->SetDynamicSize(hlo, {}, dimension_numbers.output_batch_dimension(),
dynamic_size);
return Status::OK();
}
for (int64 spatial_dim_index = 0;
spatial_dim_index < dimension_numbers.input_spatial_dimensions_size();
++spatial_dim_index) {
int64 input_spatial_dim =
dimension_numbers.input_spatial_dimensions(spatial_dim_index);
int64 output_spatial_dim =
dimension_numbers.output_spatial_dimensions(spatial_dim_index);
if (dimension == input_spatial_dim) {
// This is a dynamic spatial dimension. Calculate the output size.
WindowDimension window_dim = hlo->window().dimensions(spatial_dim_index);
DynamicWindowDims dynamic_window_dims = GetWindowedOutputSize(
dynamic_size, window_dim.size(), window_dim.window_dilation(),
window_dim.stride(), hlo->padding_type());
TF_RET_CHECK(window_dim.base_dilation() == 1);
parent_->SetDynamicSize(hlo, {}, output_spatial_dim,
dynamic_window_dims.output_size);
return Status::OK();
}
}
// Input Feature dim disappears after convolution.
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::HandleDynamicWindowSamePadding(
HloInstruction* hlo, HloInstruction* dynamic_size, int64 operand_index,
int64 dimension) {
const Window& window = hlo->window();
const WindowDimension& window_dim = window.dimensions(dimension);
if (!window_util::IsTrivialWindowDimension(window_dim)) {
DynamicWindowDims dynamic_window_dims = GetWindowedOutputSize(
dynamic_size, window_dim.size(), window_dim.window_dilation(),
window_dim.stride(), PaddingType::PADDING_SAME);
parent_->SetDynamicSize(hlo, {}, dimension,
dynamic_window_dims.output_size);
return Status::OK();
}
parent_->SetDynamicSize(hlo, {}, dimension, dynamic_size);
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::HandleDynamicConvolutionInputGrad(
HloInstruction* hlo, int64 operand_index, int64 dimension) {
// The output size of convolution input grad is corresponding input size.
HloInstruction* input_sizes = hlo->mutable_operand(0);
HloComputation* comp = hlo->parent();
TF_RET_CHECK(input_sizes->shape().rank() == 1) << hlo->ToString();
TF_RET_CHECK(input_sizes->shape().element_type() == S32) << hlo->ToString();
TF_RET_CHECK(input_sizes->shape().dimensions(0) ==
hlo->shape().dimensions_size())
<< hlo->ToString();
// Slice to get corresponding input size.
HloInstruction* slice = comp->AddInstruction(
HloInstruction::CreateSlice(ShapeUtil::MakeShape(S32, {1}), input_sizes,
{dimension}, {dimension + 1}, {1}));
HloInstruction* reshape = comp->AddInstruction(
HloInstruction::CreateReshape(ShapeUtil::MakeScalarShape(S32), slice));
parent_->SetDynamicSize(hlo, {}, dimension, reshape);
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::HandleDynamicConvolutionKernelGrad(
HloInstruction* hlo, int64 operand_index, int64 dimension) {
// Dynamic convolution kernel grad produces static shape outputs.
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::PassThroughDynamicDimension(
HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
parent_->SetDynamicSize(hlo, index, dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleDomain(HloInstruction* hlo) {
return PassThroughDynamicDimension(hlo);
}
Status DynamicDimensionInferenceVisitor::HandleElementwiseUnary(
HloInstruction* hlo) {
return PassThroughDynamicDimension(hlo);
}
Status DynamicDimensionInferenceVisitor::HandleSelect(HloInstruction* hlo) {
return PassThroughDynamicDimension(hlo);
}
Status DynamicDimensionInferenceVisitor::HandleElementwiseBinary(
HloInstruction* hlo) {
return PassThroughDynamicDimension(hlo);
}
Status DynamicDimensionInferenceVisitor::HandleClamp(HloInstruction* hlo) {
return PassThroughDynamicDimension(hlo);
}
Status DynamicDimensionInferenceVisitor::HandleDynamicReshape(
HloInstruction* hlo) {
HloDynamicReshapeInstruction* dynamic_reshape =
Cast<HloDynamicReshapeInstruction>(hlo);
for (int64 i = 0; i < hlo->shape().rank(); ++i) {
if (hlo->shape().is_dynamic_dimension(i)) {
parent_->SetDynamicSize(hlo, {}, i, dynamic_reshape->dim_sizes(i));
}
}
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::HandleReshape(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo,
[&](HloInstruction* operand, ShapeIndex index,
int64 input_dynamic_dimension, int64 operand_index,
HloInstruction* operand_dynamic_size) -> Status {
HloInstruction* reshape = hlo;
if (reshape->shape().rank() == 0) {
VLOG(0) << "Reshaping a dynamic dimension into a scalar, which has "
"undefined behavior when input size is 0. The offending "
"instruction is: "
<< reshape->ToString();
return Status::OK();
}
auto common_factors = CommonFactors(operand->shape().dimensions(),
reshape->shape().dimensions());
int64 input_dim_start = -1;
int64 input_dim_end = -1;
int64 output_dim_start = -1;
int64 output_dim_end = -1;
// Find common_factors that the input belongs to.
for (int64 i = 0; i < common_factors.size() - 1; ++i) {
auto start = common_factors[i];
auto end = common_factors[i + 1];
if (input_dynamic_dimension >= start.first &&
input_dynamic_dimension < end.first) {
// Found the common_factor group that the input_dim belongs to.
input_dim_start = start.first;
input_dim_end = end.first;
output_dim_start = start.second;
output_dim_end = end.second;
}
}
VLOG(2) << "Input dim start: " << input_dim_start
<< " Input dim end: " << input_dim_end
<< " output dim start: " << output_dim_start
<< " output dim end: " << output_dim_end;
if ((input_dim_end - input_dim_start) > 1 &&
(output_dim_end - output_dim_start) > 1) {
// We don't support the case when a dynamic dimension is both combined
// with and splitted into other dimensions:
//
// [x, yz]
// | Reshape
// [xy, z]
//
// TODO(yunxing): This can be supported by canonicalizing
// the offending reshape into two reshapes:
//
// [x,yz]
// | Reshape
// [x, y, z]
// | Reshape
// [xy, z]
//
return Unimplemented(
"Dynamic input dimension to reshape that is both splitted and "
"combined is not supported %s",
hlo->ToString());
}
for (auto common_factor : common_factors) {
// Expand common factor to include degenerated output dimensions.
if (common_factor.first == input_dim_start) {
output_dim_start = std::min(output_dim_start, common_factor.second);
}
if (common_factor.first == input_dim_end) {
output_dim_end = std::max(output_dim_end, common_factor.second);
}
}
int64 output_dynamic_dimension = -1;
if (operand->shape().dimensions(input_dynamic_dimension) == 1) {
// If dynamic dimension is 1, it can only be most-major or
// most-minor.
if (input_dynamic_dimension == 0) {
output_dynamic_dimension = 0;
}
if (input_dynamic_dimension == operand->shape().rank() - 1) {
output_dynamic_dimension = reshape->shape().rank() - 1;
}
if (output_dynamic_dimension == -1) {
return Unimplemented(
"Dynamic degenerated dimension that's not most-minor nor "
"most-major is not supported %s",
reshape->ToString());
}
}
if (output_dynamic_dimension == -1 &&
output_dim_end - output_dim_start == 1) {
// Only one possible output dimension.
output_dynamic_dimension = output_dim_start;
}
if (output_dynamic_dimension == -1 &&
output_dim_end - output_dim_start > 1) {
// One input dimension is splitted into multiple output dimensions.
// Output dimension is decomposed from input most major dimension.
// In this case, we don't know which one is dynamic, e.g., when we
// have:
//
// [<=a/c, c, b]
// | Reshape
// [<=a, b] // a is dynamic, has to be multiple of c.
// | Reshape
// [1, 1, ... , a/c, c, b]
//
// Any dimension from the first '1' to 'a/c' can be dynamic.
//
// We use the following logics to disambiguate:
// 1. If the user sets "inferred_dimension", then use that as
// dynamic dimension.
// 2. If the one dimension in the reshape is dynamic, use that as
// dynamic dimension.
// E.g.:
// [<=4]
// |
// reshape
// |
// [1, <=2, 2]
// We use second dim as dynamic dimension.
//
// 3. If all logics above cannot disambiguate, e.g.,:
//
// [<=1]
// |
// reshape
// |
// [1, 1, 1]
//
// We bail out and return an error.
// TODO(yunxing): Further simplify this, remove 1. and fully rely
// on 2.
output_dynamic_dimension = reshape->inferred_dimension();
if (output_dynamic_dimension == -1) {
// Try find dynamic dimension from the result shape.
for (int64 i = output_dim_start; i < output_dim_end; ++i) {
if (reshape->shape().is_dynamic_dimension(i)) {
output_dynamic_dimension = i;
}
}
}
if (output_dynamic_dimension == -1) {
std::vector<int64> output_non_degenerated;
for (int64 i = output_dim_start; i < output_dim_end; ++i) {
if (reshape->shape().dimensions(i) != 1) {
output_non_degenerated.push_back(i);
}
}
if (output_non_degenerated.size() == 1) {
output_dynamic_dimension = output_non_degenerated[0];
}
}
if (output_dynamic_dimension == -1) {
return InvalidArgument(
"Reshape's input dynamic dimension is decomposed into "
"multiple output dynamic dimensions, but the constraint is "
"ambiguous and XLA can't infer the output dimension %s. ",
hlo->ToString());
}
}
CHECK_NE(output_dynamic_dimension, -1);
const int64 input_dim_size =
operand->shape().dimensions(input_dynamic_dimension);
const int64 output_dim_size =
reshape->shape().dimensions(output_dynamic_dimension);
VLOG(2) << "input_dim_size: " << input_dim_size
<< " output_dim_size: " << output_dim_size;
if (input_dim_size == output_dim_size) {
// Simply forward dynamic dimension.
parent_->SetDynamicSize(reshape, {}, output_dynamic_dimension,
operand_dynamic_size);
}
if (input_dim_size > output_dim_size) {
TF_RET_CHECK(input_dim_size % output_dim_size == 0)
<< reshape->ToString();
const int64 divisor = input_dim_size / output_dim_size;
HloInstruction* divisor_hlo =
hlo->parent()->AddInstruction(HloInstruction::CreateConstant(
LiteralUtil::CreateR0<int32>(divisor)));
HloInstruction* new_dynamic_size =
hlo->parent()->AddInstruction(HloInstruction::CreateBinary(
operand_dynamic_size->shape(), HloOpcode::kDivide,
operand_dynamic_size, divisor_hlo));
parent_->SetDynamicSize(reshape, {}, output_dynamic_dimension,
new_dynamic_size);
}
if (input_dim_size < output_dim_size) {
// Input dimension is combined with other input dimensions.
//
// Adjust the output size by the ratio of dynamic_input_dim /
// static_input_dim.
//
// For example if we have [<=3, 3] -> [9], if the dynamic size is 2,
// the new output dynamic isze is 9 / 3 * 2 = 6.
//
// If it turns out the second dimension is also dynamic:
// [<=3, <=3] -> [9], and the dynamic size is also 2, the new output
// dynamic size is 6 / 3 * 2 = 4.
//
//
HloInstruction* output_dynamic_size =
parent_->GetDynamicSize(reshape, {}, output_dynamic_dimension);
if (output_dynamic_size == nullptr) {
output_dynamic_size =
hlo->parent()->AddInstruction(HloInstruction::CreateConstant(
LiteralUtil::CreateR0<int32>(output_dim_size)));
}
HloInstruction* divisor_hlo = hlo->parent()->AddInstruction(
HloInstruction::CreateConstant(LiteralUtil::CreateR0<int32>(
operand->shape().dimensions(input_dynamic_dimension))));
HloInstruction* new_dynamic_size =
hlo->parent()->AddInstruction(HloInstruction::CreateBinary(
output_dynamic_size->shape(), HloOpcode::kDivide,
output_dynamic_size, divisor_hlo));
new_dynamic_size =
hlo->parent()->AddInstruction(HloInstruction::CreateBinary(
output_dynamic_size->shape(), HloOpcode::kMultiply,
new_dynamic_size, operand_dynamic_size));
parent_->SetDynamicSize(reshape, {}, output_dynamic_dimension,
new_dynamic_size);
}
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleReduceWindow(
HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
HloInstruction* reduce_window = hlo;
const WindowDimension& window_dim =
reduce_window->window().dimensions(dimension);
if (!window_util::IsTrivialWindowDimension(window_dim)) {
DynamicWindowDims dynamic_window_dims = GetWindowedOutputSize(
dynamic_size, window_dim.size(), window_dim.window_dilation(),
window_dim.stride(), PaddingType::PADDING_VALID);
parent_->SetDynamicSize(hlo, {}, dimension,
dynamic_window_dims.output_size);
return Status::OK();
}
parent_->SetDynamicSize(reduce_window, {}, dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleSelectAndScatter(
HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
if (operand_index == 1) {
// Operand 0 (input) determines dynamic output size. We ignore the
// dynamic size in the operand 1 (output gradient).
return Status::OK();
}
parent_->SetDynamicSize(hlo, {}, dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleSlice(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex /*index*/, int64 dimension,
int64 /*operand_index*/, HloInstruction* dynamic_size) {
if (hlo->slice_starts(dimension) != 0 ||
hlo->slice_strides(dimension) != 1 ||
hlo->slice_limits(dimension) !=
operand->shape().dimensions(dimension)) {
// Slicing a partial element out eliminates the dynamic dimension.
return Status::OK();
}
parent_->SetDynamicSize(hlo, {}, dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleDynamicSlice(
HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction*, ShapeIndex /*index*/, int64 dimension,
int64 /*operand_index*/, HloInstruction* dynamic_size) {
if (hlo->shape().dimensions(dimension) !=
hlo->operand(0)->shape().dimensions(dimension)) {
// Slicing a single element out kills the dynamic dimension.
if (hlo->shape().dimensions(dimension) == 1) {
return Status::OK();
}
return Unimplemented(
"Dynamic dimension propagation on DynamicSlice where a partial "
"dimension is selected %s",
hlo->ToString());
}
parent_->SetDynamicSize(hlo, {}, dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleDynamicUpdateSlice(
HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo,
[&](HloInstruction* /*operand*/, ShapeIndex /*index*/, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
if (hlo->shape().dimensions(dimension) !=
hlo->operand(0)->shape().dimensions(dimension)) {
return Unimplemented(
"Dynamic dimension propagation on DynamicUpdateSlice where a "
"partial dimension is selected %s",
hlo->ToString());
}
if (operand_index == 1 &&
hlo->operand(1)->shape().dimensions(dimension) <
hlo->operand(0)->shape().dimensions(dimension)) {
// DUS(input=[A], update=[<=B])
//
// If update dim is smaller than input dim (B < A) , then we are doing
// a partial update, no need to set the output dynamic dimension.
//
// The dynamic shape in `update` doesn't change output dynamic shape.
return Status::OK();
}
parent_->SetDynamicSize(hlo, {}, dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleReverse(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo,
[&](HloInstruction* /*operand*/, ShapeIndex /*index*/, int64 dimension,
int64 /*operand_index*/, HloInstruction* dynamic_size) {
if (absl::c_linear_search(hlo->dimensions(), dimension)) {
return Unimplemented(
"Dynamic dimension propagation on reversed dimension is not "
"supported %s",
hlo->ToString());
}
parent_->SetDynamicSize(hlo, {}, dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleGather(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo, [&](HloInstruction* operand, ShapeIndex /*index*/,
int64 input_dynamic_dimension, int64 operand_index,
HloInstruction* dynamic_size) {
const GatherDimensionNumbers& gather_dims =
hlo->gather_dimension_numbers();
if (operand_index != 1) {
if (hlo->gather_slice_sizes()[input_dynamic_dimension] == 1) {
// Gathering a size 1 dimension out of a dynamic dimension removes
// the dynamicity.
return Status::OK();
}
if (hlo->gather_slice_sizes()[input_dynamic_dimension] ==
operand->shape().dimensions(input_dynamic_dimension)) {
// Gathering a full-sized dimension out of a dynamic dimension
// propagates the dynamicity to output.
int64 output_dimension = input_dynamic_dimension;
for (int64 collapsed_dim : gather_dims.collapsed_slice_dims()) {
if (collapsed_dim < input_dynamic_dimension) {
// This output dimension is collapsed.
output_dimension--;
}
}
parent_->SetDynamicSize(hlo, {}, output_dimension, dynamic_size);
return Status::OK();
}
return Unimplemented(
"Detects a dynamic dimension on the data input of gather, which "
"is not supported: %s, %lld",
hlo->ToString(), input_dynamic_dimension);
}
// A mapping from output to input batch dim number. -1 means not a batch
// dimension.
int64 indices_rank = hlo->operand(1)->shape().rank();
int64 output_rank = hlo->shape().rank();
// indices_dim is an iterator over indices dimensions.
int64 indices_dim = 0;
// Find the corresponding batch dimension in the output.
for (int64 output_dim = 0; output_dim < output_rank; ++output_dim) {
if (!absl::c_linear_search(gather_dims.offset_dims(), output_dim)) {
// Skips index vector dimension.
if (indices_dim == gather_dims.index_vector_dim()) {
indices_dim++;
}
if (indices_dim++ == input_dynamic_dimension) {
parent_->SetDynamicSize(hlo, {}, output_dim, dynamic_size);
return Status::OK();
}
}
}
CHECK(indices_dim == indices_rank);
return Unimplemented(
"Detects a non-batch dynamic dimension of gather, "
"which is not supported: %s",
hlo->ToString());
});
}
Status DynamicDimensionInferenceVisitor::HandleConditional(
HloInstruction* hlo) {
// Conditionals are handled by producing additional inputs and outputs of
// the conditional instruction.
std::vector<HloComputation*> new_branch_computations;
std::vector<HloInstruction*> new_operands;
// If the output of the conditional contains dynamic dimension. We send
// dynamic dimension size out by adding additional root element. A mapping
// from the root instruction's dynamic dimension index (represented by a shape
// index as output index and a int64 dimension number) to output index
// (represented by an int64) is tracked for the conditional intsruction (all
// branches should have the same mapping).
ShapeTree<absl::flat_hash_map<int64, int64>> dynamic_output_mapping(
hlo->shape());
bool need_rewrite = false;
for (int64 branch_index = 0; branch_index < hlo->branch_count();
++branch_index) {
std::vector<HloInstruction*> operands_to_add;
absl::flat_hash_map<HloInstruction*, int64>
dynamic_size_to_operand_id_index_map;
// Only look at branch_index + 1, the correct operand index for a
// given branch.
const int64 operand_index = branch_index + 1;
int64 operand_count =
hlo->operand(operand_index)->shape().tuple_shapes_size();
// Prepare to pass dynamic dimension into the new computation and add
// dynamic dimension sizes as parameters to the new tuple.
TF_RETURN_IF_ERROR(ForEachDynamicDimensionInOperand(
hlo, operand_index,
[&](HloInstruction*, ShapeIndex, int64, int64,
HloInstruction* dynamic_size) -> Status {
TF_RET_CHECK(hlo->operand(operand_index)->shape().IsTuple())
<< "Only tuple typed inputs can have dynamic dimension. Please "
"file a bug against XLA team.";
const HloInstruction* tuple_operand = hlo->operand(operand_index);
for (int64 i = 0; i < tuple_operand->operand_count(); ++i) {
// If the dynamic size is already an operand to the computation,
// skip adding it to the computation input again.
if (dynamic_size == tuple_operand->operand(i)) {
dynamic_size_to_operand_id_index_map[dynamic_size] = i;
return Status::OK();
}
}
auto iter = dynamic_size_to_operand_id_index_map.find(dynamic_size);
if (iter == dynamic_size_to_operand_id_index_map.end()) {
operands_to_add.push_back(dynamic_size);
dynamic_size_to_operand_id_index_map[dynamic_size] =
operand_count++;
}
return Status::OK();
}));
HloInstruction* original_input = hlo->mutable_operand(operand_index);
HloComputation* branch_computation = hlo->branch_computation(branch_index);
HloComputation* new_computation = branch_computation;
HloInstruction* new_operand = hlo->mutable_operand(operand_index);
if (!operands_to_add.empty()) {
TF_RET_CHECK(original_input->shape().IsTuple());
need_rewrite = true;
new_operand = TupleUtil::AppendSuffix(original_input, operands_to_add);
TF_ASSIGN_OR_RETURN(
new_computation,
WidenComputation(branch_computation, new_operand->shape()));
}
// Set the dynamic dimensions for the newly created branch computation's
// parameters so that the hlos inside the computation can see dynamic
// dimensions.
DynamicParameterBinding dynamic_parameter_binding;
TF_RETURN_IF_ERROR(ForEachDynamicDimensionInOperand(
hlo, operand_index,
[&](HloInstruction*, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) {
DynamicParameterBinding::DynamicParameter dynamic_parameter{
0, {dynamic_size_to_operand_id_index_map[dynamic_size]}};
DynamicParameterBinding::DynamicDimension dynamic_dimension{
0, {index}, dimension};
TF_RETURN_IF_ERROR(dynamic_parameter_binding.Bind(dynamic_parameter,
dynamic_dimension));
return Status::OK();
}));
VLOG(2) << "dynamic_parameter_binding for conditional branch"
<< dynamic_parameter_binding;
TF_RETURN_IF_ERROR(DynamicDimensionInferenceVisitor::Run(
new_computation, dynamic_parameter_binding, parent_));
std::vector<HloInstruction*> hlos_to_add_in_root;
int64 original_tuple_count = hlo->shape().tuple_shapes_size();
// There may be some dynamic dimensions coming out of the computation, wire
// that into the root instruction as additional tuple elements.
TF_RETURN_IF_ERROR(ForEachDynamicDimension(
new_computation->root_instruction(),
[&](ShapeIndex index, int64 dim,
HloInstruction* dynamic_size) -> Status {
TF_RET_CHECK(hlo->shape().IsTuple())
<< "Only tuple typed conditionals can have dynamic dimension. "
"Please file a bug against XLA team.";
dynamic_output_mapping.mutable_element(index)->emplace(
dim, original_tuple_count++);
hlos_to_add_in_root.push_back(dynamic_size);
return Status::OK();
}));
VLOG(2) << "hlos_to_add_in_root:" << hlos_to_add_in_root.size();
if (!hlos_to_add_in_root.empty()) {
need_rewrite = true;
HloInstruction* new_branch_root = TupleUtil::AppendSuffix(
new_computation->root_instruction(), hlos_to_add_in_root);
new_computation->set_root_instruction(new_branch_root,
/*accept_different_shape=*/true);
}
new_branch_computations.push_back(new_computation);
new_operands.push_back(new_operand);
}
if (!need_rewrite) {
return Status::OK();
}
// Create a new conditional with the new operations and computations.
HloInstruction* new_conditional =
hlo->parent()->AddInstruction(HloInstruction::CreateConditional(
new_branch_computations[0]->root_instruction()->shape(),
hlo->mutable_operand(0), new_branch_computations, new_operands));
HloInstruction* new_conditional_extracted = TupleUtil::ExtractPrefix(
new_conditional, hlo->shape().tuple_shapes_size());
// Now set the dynamic dimensions of the newly created conditional.
dynamic_output_mapping.ForEachElement(
[&](const ShapeIndex& index,
const absl::flat_hash_map<int64, int64>& dim_to_output) {
for (auto iter : dim_to_output) {
int64 dim = iter.first;
int64 output_index = iter.second;
HloInstruction* dynamic_size = hlo->parent()->AddInstruction(
HloInstruction::CreateGetTupleElement(
ShapeUtil::MakeScalarShape(S32), new_conditional,
output_index));
parent_->SetDynamicSize(new_conditional, index, dim, dynamic_size);
parent_->SetDynamicSize(new_conditional_extracted, index, dim,
dynamic_size);
}
});
TF_RETURN_IF_ERROR(hlo->ReplaceAllUsesWith(new_conditional_extracted));
// Remove the original instruction even if has side-effects.
TF_RETURN_IF_ERROR(hlo->parent()->RemoveInstruction(hlo));
SetVisited(*new_conditional);
SetVisited(*new_conditional_extracted);
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::HandleScatter(HloInstruction* hlo) {
return ForEachOperandDynamicDimension(
hlo,
[&](HloInstruction* /*operand*/, ShapeIndex /*index*/, int64 dimension,
int64 operand_index, HloInstruction* operand_dynamic_size) {
if (operand_index == 0) {
parent_->SetDynamicSize(hlo, {}, dimension, operand_dynamic_size);
return Status::OK();
}
const ScatterDimensionNumbers& scatter_dims =
hlo->scatter_dimension_numbers();
if (operand_index == 2 &&
absl::c_linear_search(scatter_dims.update_window_dims(),
dimension)) {
return Unimplemented(
"Dynamic dimension of update window dims is not supported: %s",
hlo->ToString());
}
// The dynamic dimension is collapsed and won't show up in the output.
// Do nothing here.
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::HandleWhile(HloInstruction* hlo) {
// If the output of the kWhile contains dynamic dimension, we send
// dynamic dimension size into the while body by adding additional root/body
// element. A mapping from the root instruction's dynamic dimension index
// (represented by a shape index as output index and an int64 dimension
// number) to output index (represented by an int64) is tracked for the
// conditional instruction.
ShapeTree<absl::flat_hash_map<int64, int64>> dynamic_output_mapping(
hlo->shape());
std::vector<HloInstruction*> operands_to_add;
const int64 original_tuple_count = hlo->shape().tuple_shapes_size();
int64 operand_count = original_tuple_count;
TF_RETURN_IF_ERROR(ForEachOperandDynamicDimension(
hlo, [&](HloInstruction*, ShapeIndex index, int64 dim, int64,
HloInstruction* dynamic_size) {
operands_to_add.push_back(dynamic_size);
dynamic_output_mapping.mutable_element(index)->emplace(dim,
operand_count++);
return Status::OK();
}));
DynamicParameterBinding binding_for_while;
if (!operands_to_add.empty()) {
// Only replace the while loop if there are new parameters to add.
HloInstruction* old_tuple_operand = hlo->mutable_operand(0);
TF_ASSIGN_OR_RETURN(
WhileUtil::MakeInstructionsLiveInResult result,
WhileUtil::MakeInstructionsLiveIn(hlo, operands_to_add));
// WhileUtil creates a new while hlo and tuple. Update the dynamic size
// mapping for the newly created tuple.
HloInstruction* new_tuple_operand =
result.new_while_instr->mutable_operand(0);
parent_->CopyMapping(/*from=*/old_tuple_operand,
/*to=*/new_tuple_operand);
hlo = result.new_while_instr;
// We have replaced the while loop, now set the dynamic dimensions for the
// newly created while loop so that the hlos that consumes the while loop
// can see the dynamic dimensions. Also sets the dynamic parameter binding
// for running inference in the while loop.
TF_RETURN_IF_ERROR(ForEachOperandDynamicDimension(
hlo,
[&](HloInstruction*, ShapeIndex index, int64 dimension,
int64 operand_index, HloInstruction* dynamic_size) -> Status {
TF_RET_CHECK(!operands_to_add.empty());
const int64 output_dynamic_size_index =
dynamic_output_mapping.element(index).at(dimension);
DynamicParameterBinding::DynamicParameter dynamic_parameter{
operand_index, {output_dynamic_size_index}};
DynamicParameterBinding::DynamicDimension dynamic_dimension{
operand_index, index, dimension};
TF_RETURN_IF_ERROR(
binding_for_while.Bind(dynamic_parameter, dynamic_dimension));
// This is the updated output dynamic size coming out of hlo while
// loop.
HloInstruction* output_dynamic_size = hlo->parent()->AddInstruction(
HloInstruction::CreateGetTupleElement(
ShapeUtil::MakeScalarShape(S32), hlo,
output_dynamic_size_index));
parent_->SetDynamicSize(result.replacement_instr, index, dimension,
output_dynamic_size);
return Status::OK();
}));
// Set the replacement instruction as visited to avoid visiting it again.
SetVisited(*result.replacement_instr);
}
// Run inference in while body and condition.
TF_RETURN_IF_ERROR(DynamicDimensionInferenceVisitor::Run(
hlo->while_body(), binding_for_while, parent_));
TF_RETURN_IF_ERROR(DynamicDimensionInferenceVisitor::Run(
hlo->while_condition(), binding_for_while, parent_));
if (operands_to_add.empty()) {
// No dynamic dimension in the inputs and outputs.
return Status::OK();
}
// The dynamic dimension size could have been changed in the loop body (e.g, A
// loop that inserts items in a stack, the stack size increases with each
// iteration). Rewrite the dynamic dimension size at the root.
HloInstruction* body_root = hlo->while_body()->root_instruction();
std::vector<HloInstruction*> new_root_operands(body_root->operand_count(),
nullptr);
// Original non-dynamic-dim operands of root are pass-through.
for (int64 i = 0; i < original_tuple_count; ++i) {
new_root_operands[i] =
hlo->while_body()->AddInstruction(HloInstruction::CreateGetTupleElement(
body_root->shape().tuple_shapes(i), body_root, i));
}
// Add dynamic dimension size as new parameters.
TF_RETURN_IF_ERROR(ForEachDynamicDimension(
hlo->while_body()->root_instruction(),
[&](ShapeIndex index, int64 dim, HloInstruction* dynamic_size) -> Status {
const int64 output_index =
dynamic_output_mapping.element(index).at(dim);
new_root_operands[output_index] = dynamic_size;
return Status::OK();
}));
for (auto operand : new_root_operands) {
TF_RET_CHECK(operand != nullptr);
}
HloInstruction* new_body_root = hlo->while_body()->AddInstruction(
HloInstruction::CreateTuple(new_root_operands));
hlo->while_body()->set_root_instruction(new_body_root);
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::HandleParameter(HloInstruction* hlo) {
return param_bindings_.ForEachBinding(
[&](const DynamicParameterBinding::DynamicParameter& dynamic_parameter,
const DynamicParameterBinding::DynamicDimension& dynamic_dimension) {
if (dynamic_dimension.parameter_num != hlo->parameter_number()) {
return Status::OK();
}
HloComputation* computation = hlo->parent();
HloInstruction* target_parameter =
computation->parameter_instruction(dynamic_dimension.parameter_num);
HloInstruction* dynamic_size =
computation->parameter_instruction(dynamic_parameter.parameter_num);
for (int64 i : dynamic_parameter.parameter_index) {
dynamic_size =
computation->AddInstruction(HloInstruction::CreateGetTupleElement(
ShapeUtil::GetSubshape(dynamic_size->shape(), {i}),
dynamic_size, i));
}
parent_->SetDynamicSize(target_parameter,
dynamic_dimension.parameter_index,
dynamic_dimension.dimension, dynamic_size);
return Status::OK();
});
}
Status DynamicDimensionInferenceVisitor::ForEachDynamicDimension(
HloInstruction* inst, const DynamicDimensionFn& fn) {
auto iter = parent_->per_hlo_dynamic_dimensions_.find(inst);
if (iter != parent_->per_hlo_dynamic_dimensions_.end()) {
for (auto& dynamic_dimension : iter->second) {
HloInstruction* dynamic_size = parent_->GetDynamicSize(
dynamic_dimension.inst, dynamic_dimension.index,
dynamic_dimension.dim);
TF_RETURN_IF_ERROR(
fn(dynamic_dimension.index, dynamic_dimension.dim, dynamic_size));
}
}
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::ForEachDynamicDimensionInOperand(
HloInstruction* inst, int64 operand_index,
const OperandDynamicDimensionFn& fn) {
auto iter =
parent_->per_hlo_dynamic_dimensions_.find(inst->operand(operand_index));
if (iter != parent_->per_hlo_dynamic_dimensions_.end()) {
for (auto& dynamic_dimension : iter->second) {
HloInstruction* dynamic_size = parent_->GetDynamicSize(
dynamic_dimension.inst, dynamic_dimension.index,
dynamic_dimension.dim);
TF_RETURN_IF_ERROR(fn(dynamic_dimension.inst, dynamic_dimension.index,
dynamic_dimension.dim, operand_index,
dynamic_size));
}
}
return Status::OK();
}
Status DynamicDimensionInferenceVisitor::ForEachOperandDynamicDimension(
HloInstruction* inst, const OperandDynamicDimensionFn& fn) {
for (int64 operand_index = 0; operand_index < inst->operand_count();
++operand_index) {
TF_RETURN_IF_ERROR(
ForEachDynamicDimensionInOperand(inst, operand_index, fn));
}
return Status::OK();
}
void DynamicDimensionInference::SetDynamicSize(HloInstruction* inst,
const ShapeIndex& index,
int64 dim,
HloInstruction* size) {
VLOG(1) << "Set dimension inst " << inst->ToString() << " index "
<< index.ToString() << "@" << dim << " to " << size->ToShortString();
Shape subshape = ShapeUtil::GetSubshape(inst->shape(), index);
CHECK(!subshape.IsTuple()) << "Can't set a tuple shape to dynamic dimension";
CHECK(dim < subshape.rank() && dim >= 0)
<< "Asked to set invalid dynamic dimension. Shape: "
<< subshape.ToString() << ", Dimension: " << dim;
DynamicDimension dynamic_dimension{inst, index, dim};
// Updating a dynamic dimension twice overwrites the previous one.
dynamic_mapping_[dynamic_dimension] = size;
auto iter = per_hlo_dynamic_dimensions_.try_emplace(inst);
iter.first->second.emplace(dynamic_dimension);
}
void DynamicDimensionInference::CopyMapping(HloInstruction* from,
HloInstruction* to) {
auto iter = per_hlo_dynamic_dimensions_.find(from);
if (iter != per_hlo_dynamic_dimensions_.end()) {
for (auto& dynamic_dimension : iter->second) {
HloInstruction* dynamic_size =
GetDynamicSize(dynamic_dimension.inst, dynamic_dimension.index,
dynamic_dimension.dim);
SetDynamicSize(to, dynamic_dimension.index, dynamic_dimension.dim,
dynamic_size);
}
}
}
/* static */
StatusOr<DynamicDimensionInference> DynamicDimensionInference::Run(
HloModule* module, CustomCallInferenceHandler custom_call_handler) {
VLOG(2) << "Param Config " << module->dynamic_parameter_binding().ToString();
DynamicDimensionInference inference(module, std::move(custom_call_handler));
TF_RETURN_IF_ERROR(inference.AnalyzeDynamicDimensions());
return inference;
}
string DynamicDimensionInference::ToString() const {
std::vector<string> pieces;
pieces.push_back("DynamicDimensionInference: ");
for (const auto& mapping : dynamic_mapping_) {
const DynamicDimension& dynamic_dimension = mapping.first;
pieces.push_back(absl::StrFormat(
" -- instruction %s at %s has dim %lld as dynamic"
" dimension, which is represented by instruction %s",
dynamic_dimension.inst->ToString(), dynamic_dimension.index.ToString(),
dynamic_dimension.dim, mapping.second->ToString()));
}
return absl::StrJoin(pieces, "\n");
}
DynamicDimensionInference::DynamicDimensionInference(
HloModule* module, CustomCallInferenceHandler custom_call_handler)
: module_(module), custom_call_handler_(std::move(custom_call_handler)) {}
Status DynamicDimensionInference::AnalyzeDynamicDimensions() {
return DynamicDimensionInferenceVisitor::Run(
module_->entry_computation(), module_->dynamic_parameter_binding(), this,
custom_call_handler_);
}
void DynamicDimensionInference::ReplaceAllDynamicDimensionUsesWith(
HloInstruction* replace, HloInstruction* with) {
CHECK(Shape::Equal().IgnoreLayout()(replace->shape(),
ShapeUtil::MakeScalarShape(S32)));
CHECK(Shape::Equal().IgnoreLayout()(with->shape(),
ShapeUtil::MakeScalarShape(S32)));
for (auto& kv : dynamic_mapping_) {
if (kv.second == replace) {
kv.second = with;
}
}
}
Status DynamicDimensionInference::ForwardDynamicSize(HloInstruction* inst,
HloInstruction* new_inst,
const ShapeIndex& index) {
CHECK(Shape::Equal()(inst->shape(), new_inst->shape()));
for (int64 dim = 0; dim < inst->shape().rank(); ++dim) {
DynamicDimension dynamic_dimension_new{new_inst, index, dim};
DynamicDimension dynamic_dimension{inst, index, dim};
auto iter = dynamic_mapping_.find(dynamic_dimension);
if (iter != dynamic_mapping_.end()) {
dynamic_mapping_.insert({dynamic_dimension_new, iter->second});
auto iter = per_hlo_dynamic_dimensions_.try_emplace(new_inst);
iter.first->second.emplace(dynamic_dimension_new);
}
}
return Status::OK();
}
bool DynamicDimensionInference::HasDynamicDimension(
HloInstruction* inst) const {
bool has_dynamic_dim = false;
ShapeUtil::ForEachSubshape(
inst->shape(), [&](const Shape& subshape, const ShapeIndex& index) {
if (subshape.IsTuple()) {
return;
}
for (int64 i = 0; i < subshape.dimensions_size(); ++i) {
HloInstruction* operand_dynamic_size = GetDynamicSize(inst, index, i);
if (operand_dynamic_size != nullptr) {
has_dynamic_dim = true;
}
}
});
return has_dynamic_dim;
}
HloInstruction* DynamicDimensionInference::GetDynamicSize(
HloInstruction* inst, const ShapeIndex& index, int64 dim) const {
auto iter = dynamic_mapping_.find(DynamicDimension{inst, index, dim});
if (iter != dynamic_mapping_.end()) {
return iter->second;
}
return nullptr;
}
std::vector<HloInstruction*> DynamicDimensionInference::GetDynamicSizes(
HloInstruction* inst, const ShapeIndex& index) const {
CHECK(ShapeUtil::IndexIsValid(inst->shape(), index));
const int64 rank = ShapeUtil::GetSubshape(inst->shape(), index).rank();
std::vector<HloInstruction*> result(rank, nullptr);
for (int64 i = 0; i < rank; ++i) {
result[i] = GetDynamicSize(inst, {}, i);
}
return result;
}
} // namespace xla
| apache-2.0 |
czyzby/gdx-lml | websocket/src/main/java/com/github/czyzby/websocket/WebSockets.java | 4517 | package com.github.czyzby.websocket;
import com.badlogic.gdx.Gdx;
import com.github.czyzby.websocket.data.WebSocketException;
import com.github.czyzby.websocket.serialization.Serializer;
import com.github.czyzby.websocket.serialization.impl.JsonSerializer;
/** Utilities for web sockets.
*
* @author MJ */
public class WebSockets {
/** ws:// */
public static final String WEB_SOCKET_ADDRESS_PREFIX = "ws://";
/** wss:// */
public static final String SECURE_WEB_SOCKET_ADDRESS_PREFIX = "wss://";
protected static WebSocketFactory FACTORY;
/** Assigned as the initial {@link Serializer} to new {@link WebSocket} instances. Handles serializing of objects to
* strings or byte arrays when using {@link WebSocket#send(Object)} method. By default, serializes objects to JSON
* format using {@link JsonSerializer}. */
public static Serializer DEFAULT_SERIALIZER = new JsonSerializer();
private WebSockets() {
}
/** @param url a valid URL.
* @return {@link WebSocket} instance, allowing to connect with the passed URL.
* @see #toWebSocketUrl(String, int) */
public static WebSocket newSocket(final String url) {
if (FACTORY == null) {
throw new WebSocketException("Web sockets are not initiated.");
}
return FACTORY.newWebSocket(url);
}
/** @param host IP or domain name of the server.
* @param port port of the application. Will be validated.
* @return web socket URL.
* @throws WebSocketException if port is invalid. */
public static String toWebSocketUrl(final String host, final int port) {
return toWebSocketUrl(host, port, null);
}
/** @param host IP or domain name of the server.
* @param port port of the application. Will be validated.
* @param contentPath path at which the connection is open. Optional.
* @return web socket URL.
* @throws WebSocketException if port is invalid. */
public static String toWebSocketUrl(final String host, final int port, final String contentPath) {
return toUrl(WEB_SOCKET_ADDRESS_PREFIX, host, port, contentPath);
}
/** @param host IP or domain name of the server.
* @param port port of the application. Will be validated.
* @return secure web socket URL.
* @throws WebSocketException if port is invalid. */
public static String toSecureWebSocketUrl(final String host, final int port) {
return toSecureWebSocketUrl(host, port, null);
}
/** @param host IP or domain name of the server.
* @param port port of the application. Will be validated.
* @param contentPath path at which the connection is open. Optional.
* @return secure web socket URL.
* @throws WebSocketException if port is invalid. */
public static String toSecureWebSocketUrl(final String host, final int port, final String contentPath) {
return toUrl(SECURE_WEB_SOCKET_ADDRESS_PREFIX, host, port, contentPath);
}
private static String toUrl(final String prefix, final String host, final int port, final String contentPath) {
if (isPortValid(port)) {
return prefix + host + ":" + port + "/" + (contentPath == null ? "" : contentPath);
}
throw new WebSocketException("Invalid port: " + port);
}
/** @param port will be validated.
* @return true if the port is valid. */
public static boolean isPortValid(final int port) {
return port > 0 && port <= 65535;
}
/** @param webSocket can be null. Will be closed using default {@link WebSocket#close()} method. Any exception that
* occurs during web socket closing will be caught and logged as debug log using current LibGDX
* application logging mechanism. (Basically, exception's message will be logged in the console if debug
* logs are on.) */
public static void closeGracefully(final WebSocket webSocket) {
if (webSocket != null) {
try {
webSocket.close();
} catch (final Exception exception) {
Gdx.app.debug("WebSocket", exception.getMessage());
}
}
}
/** Provides web socket instances.
*
* @author MJ */
protected static interface WebSocketFactory {
/** @param url URL to connect with. Factory can assume that the URL is not null and valid.
* @return platform-specific {@link WebSocket} instance. */
WebSocket newWebSocket(String url);
}
}
| apache-2.0 |
watson-developer-cloud/java-sdk | discovery/src/main/java/com/ibm/watson/discovery/v1/model/ListConfigurationsOptions.java | 2915 | /*
* (C) Copyright IBM Corp. 2017, 2020.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package com.ibm.watson.discovery.v1.model;
import com.ibm.cloud.sdk.core.service.model.GenericModel;
/** The listConfigurations options. */
public class ListConfigurationsOptions extends GenericModel {
protected String environmentId;
protected String name;
/** Builder. */
public static class Builder {
private String environmentId;
private String name;
private Builder(ListConfigurationsOptions listConfigurationsOptions) {
this.environmentId = listConfigurationsOptions.environmentId;
this.name = listConfigurationsOptions.name;
}
/** Instantiates a new builder. */
public Builder() {}
/**
* Instantiates a new builder with required properties.
*
* @param environmentId the environmentId
*/
public Builder(String environmentId) {
this.environmentId = environmentId;
}
/**
* Builds a ListConfigurationsOptions.
*
* @return the new ListConfigurationsOptions instance
*/
public ListConfigurationsOptions build() {
return new ListConfigurationsOptions(this);
}
/**
* Set the environmentId.
*
* @param environmentId the environmentId
* @return the ListConfigurationsOptions builder
*/
public Builder environmentId(String environmentId) {
this.environmentId = environmentId;
return this;
}
/**
* Set the name.
*
* @param name the name
* @return the ListConfigurationsOptions builder
*/
public Builder name(String name) {
this.name = name;
return this;
}
}
protected ListConfigurationsOptions(Builder builder) {
com.ibm.cloud.sdk.core.util.Validator.notEmpty(
builder.environmentId, "environmentId cannot be empty");
environmentId = builder.environmentId;
name = builder.name;
}
/**
* New builder.
*
* @return a ListConfigurationsOptions builder
*/
public Builder newBuilder() {
return new Builder(this);
}
/**
* Gets the environmentId.
*
* <p>The ID of the environment.
*
* @return the environmentId
*/
public String environmentId() {
return environmentId;
}
/**
* Gets the name.
*
* <p>Find configurations with the given name.
*
* @return the name
*/
public String name() {
return name;
}
}
| apache-2.0 |
imai-lm/nnvm | tests/python/frontend/onnx/test_graph.py | 1175 | """Test graph equality of onnx models."""
import nnvm
import onnx
from nnvm.compiler import graph_util, graph_attr
from model_zoo import super_resolution
def compare_graph(onnx_file, nnvm_sym, ishape):
onnx_vars = [int(n) for n in onnx.__version__.split('.')] if hasattr(onnx, "__version__") else []
if len(onnx_vars) >= 2 and (onnx_vars[0] > 0 or onnx_vars[1] >= 2): # version >= 0.2
onnx_model = onnx.load(onnx_file)
onnx_sym, params = nnvm.frontend.from_onnx(onnx_model.graph)
else:
onnx_graph = onnx.load(onnx_file)
onnx_sym, params = nnvm.frontend.from_onnx(onnx_graph)
g1 = nnvm.graph.create(onnx_sym)
g2 = nnvm.graph.create(nnvm_sym)
ishapes = {'input_0': ishape}
graph_attr.set_shape_inputs(g1, ishapes)
graph_attr.set_shape_inputs(g2, ishapes)
g1 = g1.apply("InferShape").apply("SimplifyInference")
g2 = g2.apply("InferShape").apply("SimplifyInference")
graph_util.check_graph_equal(g1, g2)
def test_super_resolution_example():
fname, symbol = super_resolution
compare_graph(fname, symbol, ishape=(1, 1, 224, 224))
if __name__ == '__main__':
test_super_resolution_example()
| apache-2.0 |
applicaster/scaler | app/controllers/application_controller.rb | 543 | class ApplicationController < ActionController::Base
# Prevent CSRF attacks by raising an exception.
# For APIs, you may want to use :null_session instead.
protect_from_forgery with: :exception
around_action :set_current_timezone
http_basic_authenticate_with(
name: ENV.fetch("SCALER_USER"),
password: ENV.fetch("SCALER_PASS"),
)
decent_configuration do
strategy DecentExposure::StrongParametersStrategy
end
protected
def set_current_timezone(&block)
Time.use_zone(Settings.timezone, &block)
end
end
| apache-2.0 |
discophp/framework | test/unit/classes/FormTest.php | 5202 | <?php
Class FormTest extends PHPUnit_Framework_TestCase {
public function setUp(){
$this->Form = \Form::instance();
}//setUp
public function testBasic(){
$form = $this->Form
->formProps(Array('method'=>'POST'))
->props(Array('class'=>'test'))
->make(Array('email'=>'test@email.com'));
$actual = '<form method="POST"><label>email<input name="email" value="test@email.com" type="text" class="test"/></label><input type="submit" value="send"/></form>';
$this->assertEquals($actual,$form);
$form = $this->Form
->force(function($k,$v){
return "$k $v";
})
->force('name',function($k,$v){
return "<div>$k $v</div>";
})
->wrap("<div>%1\$s %2\$s</div>")
->submitButton('<div>Submit</div>')
->make(Array('email'=>'test@email.com','name'=>'Test Name'));
$actual = '<form><div>email email test@email.com</div><div>name <div>name Test Name</div></div><div>Submit</div></form>';
$this->assertEquals($actual,$form);
}//testBasic
public function testToken(){
$form = $this->Form
->withToken()
->make(Array('name'=>'Test'));
$this->assertContains(Form::token(),$form);
$this->assertTrue(Form::validToken(Form::token()));
}//testToken
public function testFromModel(){
$dbTest = new DBTest;
$dbTest->setUp();
$form = $this->Form->from('PersonModelTest')
->where(Array('person_id'=>1))
->make();
$actual = '<form><input name="person_id" value="1" type="hidden"/><label>name<input name="name" value="Person One" type="text"/></label><label>age<input name="age" value="30" type="number"/></label><input type="submit" value="send"/></form>';
$this->assertEquals($actual,$form);
$form = $this->Form->from('PersonModelTest')
->where(Array('person_id'=>1))
->with(Array('name'))
->make();
$actual = '<form><label>name<input name="name" value="Person One" type="text"/></label><input type="submit" value="send"/></form>';
$this->assertEquals($actual,$form);
$form = $this->Form->from('PersonModelTest')
->where(Array('person_id'=>1))
->without(Array('age'))
->make();
$actual = '<form><input name="person_id" value="1" type="hidden"/><label>name<input name="name" value="Person One" type="text"/></label><input type="submit" value="send"/></form>';
$this->assertEquals($actual,$form);
$form = $this->Form->from('PersonModelTest')
->blank()
->make();
$actual = '<form><label>person_id<input name="person_id" value="" type="text"/></label><label>name<input name="name" value="" type="text"/></label><label>age<input name="age" value="" type="text"/></label><input type="submit" value="send"/></form>';
$this->assertEquals($actual,$form);
//TEST INSERT
$_POST['name'] = 'Test Person';
$_POST['age'] = 15;
$id = $this->Form->from('PersonModelTest')->post();
$result = $dbTest->DB->query('SELECT name,age FROM discophp_test_person WHERE person_id=?',$id);
$this->assertEquals(1,$result->rowCount());
//TEST UPDATE
$_POST['person_id'] = $id;
$_POST['name'] = 'Test Person1';
$this->Form->from('PersonModelTest')->post();
$row = $dbTest->DB->query('SELECT name FROM discophp_test_person WHERE person_id=?',$id)->fetch();
$this->assertEquals('Test Person1',$row['name']);
//TEST SELECT MENU
$data = Array('r'=>'red','g'=>'green');
$select = $this->Form->selectMenu($data,'color','g');
$actual = '<select name="color"><option value="r">red</option><option value="g" selected="selected">green</option></select>';
$this->assertEquals($actual,$select);
$data = $dbTest->DB->query('SELECT person_id AS option_value,name AS option_text FROM discophp_test_person ORDER BY person_id LIMIT 2');
$select = $this->Form->selectMenu($data,'names');
$actual = '<select name="names"><option value="1">Person One</option><option value="2">Person Two</option></select>';
$this->assertEquals($actual,$select);
//TEST RADIO BUTTONS
$data = Array('r'=>'red','g'=>'green');
$radio = $this->Form->radioButtons($data,'color','g');
$actual = '<label>red<input name="color" value="r" type="radio"/></label><label>green<input name="color" value="g" type="radio" checked="checked"/></label>';
$this->assertEquals($actual,$radio);
$data = $dbTest->DB->query('SELECT person_id AS button_value,name AS button_text FROM discophp_test_person ORDER BY person_id LIMIT 2');
$radio = $this->Form->radioButtons($data,'names');
$actual = '<label>Person One<input name="names" value="1" type="radio"/></label><label>Person Two<input name="names" value="2" type="radio"/></label>';
$this->assertEquals($actual,$radio);
$dbTest->tearDown();
}//testFromModel
}//CacheTest
| apache-2.0 |
handydannu/WEB_PROJECT | RPL2-master/includes/panel_menu.php | 908 | <?php
echo '
<div id="templatemo_content_right">
<div class="templatemo_section">
<div class="templatemo_section_2_top">
Panel Menu
</div>
<div class="templatemo_section_2_mid">
<ul>
<li><a href="panel_admin.php">Panel Administrasi</a> </li>
<li><a href="panel_perawat.php">Panel Perawat</a> </li>
<li><a href="panel_apoteker.php">Panel Apoteker</a> </li>
<li><a href="panel_menkes.php">Panel Dinas Kesehatan</a> </li>
</ul>
</div>
<div class="templatemo_section_2_bottom">
</div>
</div>
</div>';
?> | apache-2.0 |
andyvesel/manageiq | spec/lib/ansible/runner_spec.rb | 10150 | describe Ansible::Runner do
let(:uuid) { "201ac780-7bf4-0136-3b9e-54e1ad8b3cf4" }
let(:env_vars) { {"ENV1" => "VAL1", "ENV2" => "VAL2"} }
let(:extra_vars) { {"id" => uuid} }
let(:tags) { "tag" }
let(:result) { AwesomeSpawn::CommandResult.new("ansible-runner", "output", "", "0") }
describe ".run" do
let(:playbook) { "/path/to/my/playbook" }
before do
allow(File).to receive(:exist?).and_call_original
allow(File).to receive(:exist?).with(playbook).and_return(true)
end
it "calls run and writes the required files" do
expect(AwesomeSpawn).to receive(:run) do |command, options|
expect(command).to eq("ansible-runner")
expect(options[:env]).to eq(env_vars)
method, dir, json, args = options[:params]
expect(method).to eq("run")
expect(json).to eq(:json)
expect(args).to eq(:ident => "result", :playbook => "playbook", :project_dir => "/path/to/my")
hosts = File.read(File.join(dir, "inventory", "hosts"))
expect(hosts).to eq("localhost")
extravars = JSON.parse(File.read(File.join(dir, "env", "extravars")))
expect(extravars).to eq("id" => uuid, "ansible_connection" => "local")
expect(File.exist?(File.join(dir, "env", "cmdline"))).to be_falsey
end.and_return(result)
expect_galaxy_roles_fetched
described_class.run(env_vars, extra_vars, playbook)
end
it "calls launch with expected tag" do
expect(AwesomeSpawn).to receive(:run) do |command, options|
expect(command).to eq("ansible-runner")
expect(options[:env]).to eq(env_vars)
method, dir, json, args = options[:params]
expect(method).to eq("run")
expect(json).to eq(:json)
expect(args).to eq(:ident => "result", :playbook => "playbook", :project_dir => "/path/to/my")
hosts = File.read(File.join(dir, "inventory", "hosts"))
expect(hosts).to eq("localhost")
extravars = JSON.parse(File.read(File.join(dir, "env", "extravars")))
expect(extravars).to eq("id" => uuid, "ansible_connection" => "local")
cmdline = File.read(File.join(dir, "env", "cmdline"))
expect(cmdline).to eq("--tags #{tags}")
end.and_return(result)
expect_galaxy_roles_fetched
described_class.run(env_vars, extra_vars, playbook, :tags => tags)
end
it "calls run with the correct verbosity" do
expect(AwesomeSpawn).to receive(:run) do |command, options|
expect(command).to eq("ansible-runner")
_method, _dir, _json, args = options[:params]
expect(args).to eq(:ident => "result", :playbook => "playbook", :project_dir => "/path/to/my", "-vvvvv" => nil)
end.and_return(result)
described_class.run(env_vars, extra_vars, playbook, :verbosity => 6)
end
it "calls run with become options" do
expect(AwesomeSpawn).to receive(:run) do |command, options|
expect(command).to eq("ansible-runner")
_method, dir, _json, _args = options[:params]
cmdline = File.read(File.join(dir, "env", "cmdline"))
expect(cmdline).to eq("--become --ask-become-pass")
end.and_return(result)
described_class.run(env_vars, extra_vars, playbook, :become_enabled => true)
end
context "with special characters" do
let(:env_vars) { {"ENV1" => "pa$%w0rd!'"} }
let(:extra_vars) { {"name" => "john's server"} }
it "calls launch with expected arguments" do
expect(AwesomeSpawn).to receive(:run) do |command, options|
expect(command).to eq("ansible-runner")
expect(options[:env]).to eq(env_vars)
method, dir, json, args = options[:params]
expect(method).to eq("run")
expect(json).to eq(:json)
expect(args).to eq(:ident => "result", :playbook => "playbook", :project_dir => "/path/to/my")
hosts = File.read(File.join(dir, "inventory", "hosts"))
expect(hosts).to eq("localhost")
extravars = JSON.parse(File.read(File.join(dir, "env", "extravars")))
expect(extravars).to eq("name" => "john's server", "ansible_connection" => "local")
end.and_return(result)
expect_galaxy_roles_fetched
described_class.run(env_vars, extra_vars, playbook)
end
end
end
describe ".run_async" do
let(:playbook) { "/path/to/my/playbook" }
before do
allow(File).to receive(:exist?).and_call_original
allow(File).to receive(:exist?).with(playbook).and_return(true)
end
it "calls ansible-runner with start" do
expect(AwesomeSpawn).to receive(:run) do |command, options|
expect(command).to eq("ansible-runner")
expect(options[:env]).to eq(env_vars)
method, dir, json, args = options[:params]
expect(method).to eq("start")
expect(json).to eq(:json)
expect(args).to eq(:ident => "result", :playbook => "playbook", :project_dir => "/path/to/my")
hosts = File.read(File.join(dir, "inventory", "hosts"))
expect(hosts).to eq("localhost")
extravars = JSON.parse(File.read(File.join(dir, "env", "extravars")))
expect(extravars).to eq("id" => uuid, "ansible_connection" => "local")
expect(File.exist?(File.join(dir, "env", "cmdline"))).to be_falsey
end.and_return(result)
expect_galaxy_roles_fetched
runner_result = described_class.run_async(env_vars, extra_vars, playbook)
expect(runner_result).kind_of?(Ansible::Runner::ResponseAsync)
end
end
describe ".run_queue" do
let(:playbook) { "/path/to/my/playbook" }
let(:zone) { FactoryBot.create(:zone) }
let(:user) { FactoryBot.create(:user) }
it "queues Ansible::Runner.run in the right zone" do
described_class.run_queue(env_vars, extra_vars, playbook, user.name, :zone => zone.name)
expect(MiqQueue.count).to eq(1)
expect(MiqQueue.first.zone).to eq(zone.name)
end
end
describe ".run_role" do
let(:role_name) { "my-custom-role" }
let(:role_path) { "/path/to/my/roles" }
before do
allow(File).to receive(:exist?).and_call_original
allow(File).to receive(:exist?).with(role_path).and_return(true)
end
it "runs ansible-runner with the role" do
expect(AwesomeSpawn).to receive(:run) do |command, options|
expect(command).to eq("ansible-runner")
expect(options[:env]).to eq(env_vars)
method, dir, json, args = options[:params]
expect(method).to eq("run")
expect(json).to eq(:json)
expect(args).to eq(:ident => "result", :role => role_name, :roles_path => role_path, :role_skip_facts => nil)
hosts = File.read(File.join(dir, "inventory", "hosts"))
expect(hosts).to eq("localhost")
extravars = JSON.parse(File.read(File.join(dir, "env", "extravars")))
expect(extravars).to eq("id" => uuid, "ansible_connection" => "local")
expect(File.exist?(File.join(dir, "env", "cmdline"))).to be_falsey
end.and_return(result)
described_class.run_role(env_vars, extra_vars, role_name, :roles_path => role_path)
end
it "runs ansible-runner with role and tag" do
expect(AwesomeSpawn).to receive(:run) do |command, options|
expect(command).to eq("ansible-runner")
expect(options[:env]).to eq(env_vars)
method, dir, json, args = options[:params]
expect(method).to eq("run")
expect(json).to eq(:json)
expect(args).to eq(:ident => "result", :role => role_name, :roles_path => role_path, :role_skip_facts => nil)
hosts = File.read(File.join(dir, "inventory", "hosts"))
expect(hosts).to eq("localhost")
extravars = JSON.parse(File.read(File.join(dir, "env", "extravars")))
expect(extravars).to eq("id" => uuid, "ansible_connection" => "local")
cmdline = File.read(File.join(dir, "env", "cmdline"))
expect(cmdline).to eq("--tags #{tags}")
end.and_return(result)
described_class.run_role(env_vars, extra_vars, role_name, :roles_path => role_path, :tags => tags)
end
end
describe ".run_role_async" do
let(:role_name) { "my-custom-role" }
let(:role_path) { "/path/to/my/roles" }
before do
allow(File).to receive(:exist?).and_call_original
allow(File).to receive(:exist?).with(role_path).and_return(true)
end
it "runs ansible-runner with the role" do
expect(AwesomeSpawn).to receive(:run) do |command, options|
expect(command).to eq("ansible-runner")
expect(options[:env]).to eq(env_vars)
method, dir, json, args = options[:params]
expect(method).to eq("start")
expect(json).to eq(:json)
expect(args).to eq(:ident => "result", :role => role_name, :roles_path => role_path, :role_skip_facts => nil)
hosts = File.read(File.join(dir, "inventory", "hosts"))
expect(hosts).to eq("localhost")
extravars = JSON.parse(File.read(File.join(dir, "env", "extravars")))
expect(extravars).to eq("id" => uuid, "ansible_connection" => "local")
expect(File.exist?(File.join(dir, "env", "cmdline"))).to be_falsey
end.and_return(result)
described_class.run_role_async(env_vars, extra_vars, role_name, :roles_path => role_path)
end
end
describe ".run_role_queue" do
let(:role_name) { "my-custom-role" }
let(:role_path) { "/path/to/my/roles" }
let(:zone) { FactoryBot.create(:zone) }
let(:user) { FactoryBot.create(:user) }
it "queues Ansible::Runner.run in the right zone" do
queue_args = {:zone => zone.name}
described_class.run_role_queue(env_vars, extra_vars, role_name, user.name, queue_args, :roles_path => role_path)
expect(MiqQueue.count).to eq(1)
expect(MiqQueue.first.zone).to eq(zone.name)
end
end
def expect_galaxy_roles_fetched
content_double = instance_double(Ansible::Content)
expect(Ansible::Content).to receive(:new).with("/path/to/my").and_return(content_double)
expect(content_double).to receive(:fetch_galaxy_roles)
end
end
| apache-2.0 |
kenzanlabs/deck | app/scripts/modules/dcos/loadBalancer/configure/wizard/upsert.controller.js | 5252 | 'use strict';
const angular = require('angular');
import { ACCOUNT_SERVICE, LOAD_BALANCER_READ_SERVICE, LOAD_BALANCER_WRITE_SERVICE, V2_MODAL_WIZARD_SERVICE, TASK_MONITOR_BUILDER, SEARCH_SERVICE } from '@spinnaker/core';
module.exports = angular.module('spinnaker.dcos.loadBalancer.create.controller', [
LOAD_BALANCER_WRITE_SERVICE,
LOAD_BALANCER_READ_SERVICE,
ACCOUNT_SERVICE,
V2_MODAL_WIZARD_SERVICE,
TASK_MONITOR_BUILDER,
SEARCH_SERVICE,
require('../../transformer.js'),
])
.controller('dcosUpsertLoadBalancerController', function($scope, $uibModalInstance, $state,
application, loadBalancer, isNew, loadBalancerReader,
accountService, dcosLoadBalancerTransformer,
searchService, v2modalWizardService, loadBalancerWriter, taskMonitorBuilder) {
var ctrl = this;
$scope.isNew = isNew;
$scope.pages = {
basicSettings: require('./basicSettings.html'),
resources: require('./resources.html'),
ports: require('./ports.html')
};
$scope.state = {
accountsLoaded: false,
submitting: false
};
function onApplicationRefresh() {
// If the user has already closed the modal, do not navigate to the new details view
if ($scope.$$destroyed) {
return;
}
$uibModalInstance.close();
var newStateParams = {
name: $scope.loadBalancer.name,
accountId: $scope.loadBalancer.account,
region: $scope.loadBalancer.region,
provider: 'dcos',
};
if (!$state.includes('**.loadBalancerDetails')) {
$state.go('.loadBalancerDetails', newStateParams);
} else {
$state.go('^.loadBalancerDetails', newStateParams);
}
}
function onTaskComplete() {
application.loadBalancers.refresh();
application.loadBalancers.onNextRefresh($scope, onApplicationRefresh);
}
$scope.taskMonitor = taskMonitorBuilder.buildTaskMonitor({
application: application,
title: (isNew ? 'Creating ' : 'Updating ') + 'your load balancer',
modalInstance: $uibModalInstance,
onTaskComplete: onTaskComplete,
});
function initializeEditMode() {
$scope.state.accountsLoaded = true;
}
function initializeCreateMode() {
accountService.listAccounts('dcos').then(function (accounts) {
$scope.accounts = accounts;
$scope.state.accountsLoaded = true;
var accountNames = _.map($scope.accounts, 'name');
if (accountNames.length && !accountNames.includes($scope.loadBalancer.account)) {
$scope.loadBalancer.account = accountNames[0];
}
ctrl.accountUpdated();
});
}
function updateLoadBalancerNames() {
var account = $scope.loadBalancer.credentials,
region = $scope.loadBalancer.region;
const accountLoadBalancersByRegion = {};
application.getDataSource('loadBalancers').refresh(true).then(() => {
application.getDataSource('loadBalancers').data.forEach((loadBalancer) => {
if (loadBalancer.account === account) {
accountLoadBalancersByRegion[loadBalancer.region] = accountLoadBalancersByRegion[loadBalancer.region] || [];
accountLoadBalancersByRegion[loadBalancer.region].push(loadBalancer.name);
}
});
$scope.existingLoadBalancerNames = accountLoadBalancersByRegion[region] || [];
});
}
// initialize controller
if (loadBalancer) {
$scope.loadBalancer = dcosLoadBalancerTransformer.convertLoadBalancerForEditing(loadBalancer);
initializeEditMode();
initializeCreateMode();
} else {
$scope.loadBalancer = dcosLoadBalancerTransformer.constructNewLoadBalancerTemplate();
updateLoadBalancerNames();
initializeCreateMode();
}
// Controller API
this.updateName = function() {
$scope.loadBalancer.name = this.getName();
};
this.getName = function() {
var loadBalancer = $scope.loadBalancer;
var loadBalancerName = [application.name, (loadBalancer.stack || ''), (loadBalancer.detail || '')].join('-');
return _.trimEnd(loadBalancerName, '-');
};
this.accountUpdated = function() {
accountService.getAccountDetails($scope.loadBalancer.account).then(function(details) {
$scope.dcosClusters = details.dcosClusters;
ctrl.dcosClusterUpdated();
});
};
this.dcosClusterUpdated = function() {
updateLoadBalancerNames();
ctrl.updateName();
};
this.submit = function () {
var descriptor = isNew ? 'Create' : 'Update';
this.updateName();
$scope.taskMonitor.submit(
function() {
var zones = {};
zones[$scope.loadBalancer.region] = [$scope.loadBalancer.region];
let params = {
cloudProvider: 'dcos',
availabilityZones: zones
};
return loadBalancerWriter.upsertLoadBalancer($scope.loadBalancer, application, descriptor, params);
}
);
};
this.cancel = function () {
$uibModalInstance.dismiss();
};
});
| apache-2.0 |
googleapis/nodejs-recommender | .jsdoc.js | 1485 | // Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// ** This file is automatically generated by gapic-generator-typescript. **
// ** https://github.com/googleapis/gapic-generator-typescript **
// ** All changes to this file may be overwritten. **
'use strict';
module.exports = {
opts: {
readme: './README.md',
package: './package.json',
template: './node_modules/jsdoc-fresh',
recurse: true,
verbose: true,
destination: './docs/'
},
plugins: [
'plugins/markdown',
'jsdoc-region-tag'
],
source: {
excludePattern: '(^|\\/|\\\\)[._]',
include: [
'build/src',
'protos'
],
includePattern: '\\.js$'
},
templates: {
copyright: 'Copyright 2021 Google LLC',
includeDate: false,
sourceFiles: false,
systemName: '@google-cloud/recommender',
theme: 'lumen',
default: {
outputSourceFiles: false
}
},
markdown: {
idInHeadings: true
}
};
| apache-2.0 |
hortonworks/cloudbreak | freeipa/src/main/java/com/sequenceiq/freeipa/flow/freeipa/binduser/create/handler/LdapBindUserCreationHandler.java | 3691 | package com.sequenceiq.freeipa.flow.freeipa.binduser.create.handler;
import java.util.Optional;
import javax.inject.Inject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Component;
import com.sequenceiq.cloudbreak.common.event.Selectable;
import com.sequenceiq.cloudbreak.logger.MDCBuilder;
import com.sequenceiq.flow.event.EventSelectorUtil;
import com.sequenceiq.flow.reactor.api.handler.ExceptionCatcherEventHandler;
import com.sequenceiq.flow.reactor.api.handler.HandlerEvent;
import com.sequenceiq.freeipa.client.FreeIpaClientException;
import com.sequenceiq.freeipa.entity.Stack;
import com.sequenceiq.freeipa.flow.freeipa.binduser.create.event.CreateBindUserEvent;
import com.sequenceiq.freeipa.flow.freeipa.binduser.create.event.CreateBindUserFailureEvent;
import com.sequenceiq.freeipa.flow.freeipa.binduser.create.event.CreateBindUserFlowEvent;
import com.sequenceiq.freeipa.flow.freeipa.binduser.create.event.CreateLdapBindUserEvent;
import com.sequenceiq.freeipa.ldap.LdapConfig;
import com.sequenceiq.freeipa.ldap.LdapConfigService;
import com.sequenceiq.freeipa.ldap.v1.LdapConfigV1Service;
import com.sequenceiq.freeipa.service.stack.StackService;
import reactor.bus.Event;
@Component
public class LdapBindUserCreationHandler extends ExceptionCatcherEventHandler<CreateLdapBindUserEvent> {
private static final Logger LOGGER = LoggerFactory.getLogger(LdapBindUserCreationHandler.class);
private static final String SELECTOR = EventSelectorUtil.selector(CreateLdapBindUserEvent.class);
@Inject
private StackService stackService;
@Inject
private LdapConfigV1Service ldapConfigV1Service;
@Inject
private LdapConfigService ldapConfigService;
@Override
public String selector() {
return SELECTOR;
}
@Override
protected Selectable defaultFailureEvent(Long resourceId, Exception e, Event<CreateLdapBindUserEvent> event) {
CreateBindUserEvent eventData = event.getData();
String failureMsg = String.format("LDAP bind user creation failed for %s with %s", eventData.getSuffix(), e.getMessage());
return new CreateBindUserFailureEvent(CreateBindUserFlowEvent.CREATE_BIND_USER_FAILED_EVENT.event(), eventData, failureMsg, e);
}
@Override
protected Selectable doAccept(HandlerEvent<CreateLdapBindUserEvent> event) {
CreateBindUserEvent data = event.getData();
Optional<LdapConfig> ldapConfig = ldapConfigService.find(data.getEnvironmentCrn(), data.getAccountId(), data.getSuffix());
if (ldapConfig.isPresent()) {
LOGGER.info("LDAP configuration already exists: {}", ldapConfig.get());
return new CreateBindUserEvent(CreateBindUserFlowEvent.CREATE_LDAP_BIND_USER_FINISHED_EVENT.event(), data);
} else {
return createLdapBindUSer(event.getEvent(), data);
}
}
private Selectable createLdapBindUSer(Event<CreateLdapBindUserEvent> event, CreateBindUserEvent data) {
Stack stack = stackService.getByEnvironmentCrnAndAccountId(data.getEnvironmentCrn(), data.getAccountId());
MDCBuilder.buildMdcContext(stack);
LOGGER.info("Create LDAP bind user for [{}]", data.getSuffix());
try {
ldapConfigV1Service.createNewLdapConfig(data.getEnvironmentCrn(), data.getSuffix(), stack, true);
return new CreateBindUserEvent(CreateBindUserFlowEvent.CREATE_LDAP_BIND_USER_FINISHED_EVENT.event(), data);
} catch (FreeIpaClientException e) {
LOGGER.error("Couldn't create LDAP bind user: {}", data, e);
return defaultFailureEvent(data.getResourceId(), e, event);
}
}
}
| apache-2.0 |
ankyra/escape-core | script/type.go | 1405 | /*
Copyright 2017, 2018 Ankyra
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package script
type ValueType interface {
Name() string
IsFunc() bool
IsInteger() bool
IsList() bool
IsBool() bool
IsMap() bool
IsString() bool
IsLambda() bool
}
/*
Expression types
*/
type valueType struct {
Type string
}
func NewType(typ string) ValueType {
return &valueType{Type: typ}
}
func (typ *valueType) Name() string {
return typ.Type
}
func (typ *valueType) IsFunc() bool {
return typ.Type == "func"
}
func (typ *valueType) IsMap() bool {
return typ.Type == "map"
}
func (typ *valueType) IsString() bool {
return typ.Type == "string"
}
func (typ *valueType) IsInteger() bool {
return typ.Type == "integer"
}
func (typ *valueType) IsBool() bool {
return typ.Type == "bool"
}
func (typ *valueType) IsList() bool {
return typ.Type == "list"
}
func (typ *valueType) IsLambda() bool {
return typ.Type == "lambda"
}
| apache-2.0 |
ferluchin/Tienda-Online | index.php | 19673 | <!DOCTYPE html>
<?php
session_start();
include("static/site_config.php");
include("static/clase_mysql.php");
$miconexion = new clase_mysql;
$miconexion->conectar($db_name,$db_host, $db_user,$db_password);
?>
<html lang="en">
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<meta name="description" content="">
<meta name="author" content="">
<link rel="stylesheet" href="cssmenu/styles.css">
<link rel="stylesheet" href="css/style.css"/>
<link rel="stylesheet" href="css/style2.css"/>
<script type="text/javascript" src="css/jquery.min.js"></script>
<script src="cssmenu/script.js"></script>
<title>Compras en linea</title>
<!-- Bootstrap Core CSS -->
<link href="css/bootstrap.min.css" rel="stylesheet">
<!-- Custom CSS -->
<link href="css/shop-homepage.css" rel="stylesheet">
<!-- HTML5 Shim and Respond.js IE8 support of HTML5 elements and media queries -->
<!-- WARNING: Respond.js doesn't work if you view the page via file:// -->
<!--[if lt IE 9]>
<script src="https://oss.maxcdn.com/libs/html5shiv/3.7.0/html5shiv.js"></script>
<script src="https://oss.maxcdn.com/libs/respond.js/1.4.2/respond.min.js"></script>
<![endif]-->
<script type="text/javascript">
$(document).ready(function() {
$('a.login-window').click(function() {
// Getting the variable's value from a link
var loginBox = $(this).attr('href');
//Fade in the Popup and add close button
$(loginBox).fadeIn(600);
//Set the center alignment padding + border
var popMargTop = ($(loginBox).height() + 24) / 2;
var popMargLeft = ($(loginBox).width() + 24) / 2;
$(loginBox).css({
'margin-top' : -popMargTop,
'margin-left' : -popMargLeft
});
// Add the mask to body
$('body').append('<div id="mask"></div>');
$('#mask').fadeIn(300);
return false;
});
// When clicking on the button close or the mask layer the popup closed
$('a.close, #mask').live('click', function() {
$('#mask , .login-popup').fadeOut(300 , function() {
$('#mask').remove();
});
return false;
});
});
</script>
</head>
<body>
<!-- Navigation -->
<nav class="navbar navbar-inverse navbar-fixed-top" role="navigation">
<!-- Brand and toggle get grouped for better mobile display -->
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#bs-example-navbar-collapse-1">
<span class="sr-only">Toggle navigation</span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="#">Start Bootstrap</a>
</div>
<!-- Collect the nav links, forms, and other content for toggling -->
<div id='cssmenu'>
<ul>
<?php
$query = "SELECT distinct descripcion FROM categorias;";
$miconexion->consulta($query);
$result = mysql_query($query) or die("error". mysql_error());
while ($row = mysql_fetch_assoc($result)) {
echo "<li class='active has-sub'><a href='#'>" . $row['descripcion'] . "</a>";
}
?>
<ul>
<li class='last'>
<table class="table">
<tbody background="img/fonttabla.jpg">
<tr>
<a href=""><td style="cursor: pointer">Producto de ventas</td></a>
<td>Producto de ventas</td>
</tr>
<tr>
<td>Producto de ventas</td>
<td>Producto de ventas</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
<?php
if (@!$_SESSION['user']) {
echo "<li class='nav pull-right'>
<a href='#login-box' class='login-window'>Login / Sign In</a>
</li>";
}else{
echo "<li class='nav pull-right'>
<a href='static/desconectar_usuario.php'>Cerrar Cesión</a></li>";
echo "<li class='nav pull-right'>
<a href='#'>".$_SESSION['user']."</a></li>";
}
?>
</ul>
</div>
</nav>
<!-- /.navbar-collapse -->
<div class="container">
<div id="content">
<div id="login-box" class="login-popup">
<a href="index.php" class="close"><img src="close_pop.png" class="btn_close" title="Close Window" alt="Close" /></a>
<section class="login">
<div class="titulo">Iniciar Cesión</div>
<form action="validar.php" method="post" enctype="application/x-www-form-urlencoded">
<input id="username" name="user" type="text" required title="Username required" placeholder="Correo" data-icon="U">
<input id="password" name="pass" type="password" required title="Password required" placeholder="Password" data-icon="x">
<div class="olvido">
<div class="col"><a href="#" title="Ver Carásteres">Registrarse</a></div>
</div>
<div>
<center><input class="enviar" type="submit" value="Aceptar"></center>
</div>
</form>
</section>
</div>
</div>
</div>
<!-- Page Content -->
<p class="lead" align="center">PROMOCIONES DE LA SEMANA</p>
<div class="container">
<div class="row">
<div class="col-md-3">
<div class="list-group">
<table class="table table-hover" >
<tr>
<td cursor="pointer" align="center" data-target="#carousel-example-generic" data-slide-to="0"> COMBO ORDENADOR</a></td>
</tr>
<tr>
<td align="center" data-target="#carousel-example-generic" data-slide-to="1"> COMBO LINTERNA 2</a></td>
</tr>
<tr>
<td align="center" data-target="#carousel-example-generic" data-slide-to="2"> PROMOCION 3</a></td>
</tr>
<tr>
<td align="center" data-target="#carousel-example-generic" data-slide-to="3"> PROMOCION 4</a></td>
</tr>
<tr>
<td align="center" data-target="#carousel-example-generic" data-slide-to="4"> PROMOCION 5</a></td>
</tr>
<tr>
<td align="center" data-target="#carousel-example-generic" data-slide-to="5"> PROMOCION 6</a></td>
</tr>
<tr>
<td align="center" data-target="#carousel-example-generic" data-slide-to="6"> PROMOCION 7</a></td>
</tr>
<tr>
<td align="center" data-target="#carousel-example-generic" data-slide-to="7"> PROMOCION 8</a></td>
</tr>
<tr>
<td align="center" data-target="#carousel-example-generic" data-slide-to="8"> PROMOCION 9</a></td>
</tr>
<tr>
<td align="center" data-target="#carousel-example-generic" data-slide-to="9"> PROMOCION 10</a></td>
</tr>
</table>
</div>
</div>
<div class="col-md-9">
<div class="row carousel-holder">
<div class="col-md-12">
<div id="carousel-example-generic" class="carousel slide" data-ride="carousel">
<ol class="carousel-indicators">
<li data-target="#carousel-example-generic" data-slide-to="0" class="active"></li>
<li data-target="#carousel-example-generic" data-slide-to="1"></li>
<li data-target="#carousel-example-generic" data-slide-to="2"></li>
<li data-target="#carousel-example-generic" data-slide-to="3"></li>
<li data-target="#carousel-example-generic" data-slide-to="4"></li>
<li data-target="#carousel-example-generic" data-slide-to="5"></li>
<li data-target="#carousel-example-generic" data-slide-to="6"></li>
<li data-target="#carousel-example-generic" data-slide-to="7"></li>
<li data-target="#carousel-example-generic" data-slide-to="8"></li>
<li data-target="#carousel-example-generic" data-slide-to="8"></li>
</ol>
<div class="carousel-inner">
<div class="item active">
<div class="row">
<div class="col-md-6" align="center">POR LA COMPRA DE <br><img src="img/Computer.png" width="350" height="350"></div>
<div class="col-md-6"><center>RECIBE GRATIS <br><img src="img/07.jpg" width="350" height="350"> </center></div>
</div>
</div>
<div class="item">
<div class="row">
<div class="col-md-6" align="center">POR LA COMPRA DE <br><img src="img/linterna.jpg" width="350" height="350"></div>
<div class="col-md-6"><center>RECIBE GRATIS <br><img src="img/PILAS.jpg" width="350" height="350"> </center></div>
</div>
</div>
<div class="item">
<div class="row">
<div class="col-md-6" align="center">POR LA COMPRA DE <br><img src="img/oferta1.jpg" width="350" height="350"></div>
<div class="col-md-6"><center>RECIBE GRATIS <br><img src="img/regalo1.jpg" width="350" height="350"> </center></div>
</div>
</div>
<div class="item">
<div class="row">
<div class="col-md-6" align="center">POR LA COMPRA DE <br><img src="img/oferta2.jpg" width="350" height="350"></div>
<div class="col-md-6"><center>RECIBE GRATIS <br><img src="img/regalo2.jpg" width="350" height="350"> </center></div>
</div>
</div>
<div class="item">
<div class="row">
<div class="col-md-6" align="center">POR LA COMPRA DE <br><img src="img/oferta3.jpg" width="350" height="350"></div>
<div class="col-md-6"><center>RECIBE GRATIS <br><img src="img/regalo3.jpg" width="350" height="350"> </center></div>
</div>
</div>
<div class="item">
<div class="row">
<div class="col-md-6" align="center">POR LA COMPRA DE <br><img src="img/oferta4.jpg" width="350" height="350"></div>
<div class="col-md-6"><center>RECIBE GRATIS <br><img src="img/regalo4.jpg" width="350" height="350"> </center></div>
</div>
</div>
<div class="item">
<div class="row">
<div class="col-md-6" align="center">POR LA COMPRA DE <br><img src="img/oferta5.jpg" width="350" height="350"></div>
<div class="col-md-6"><center>RECIBE GRATIS <br><img src="img/regalo5.jpg" width="350" height="350"> </center></div>
</div>
</div>
<div class="item">
<img class="slide-image" src="text.png" alt="">
</div>
<div class="item">
<img class="slide-image" src="text.png" alt="">
</div>
<div class="item">
<img class="slide-image" src="text.png" alt="">
</div>
</div>
<a class="left carousel-control" href="#carousel-example-generic" data-slide="prev">
<span class="glyphicon glyphicon-chevron-left"></span>
</a>
<a class="right carousel-control" href="#carousel-example-generic" data-slide="next">
<span class="glyphicon glyphicon-chevron-right"></span>
</a>
</div>
</div>
</div>
</div>
<div class="col-md-12">
<div class="row">
<div class="col-sm-4 col-lg-4 col-md-4">
<div class="thumbnail">
<img src="img/pru.jpg" >
<div class="caption">
<h4 class="pull-right">$precio</h4>
<h4><a href="#">NOMBRE PRODUCTO</a>
</h4>
<p>descripcion del producto </p>
<input type="submit" class="btn btn-success" value="Añadir al carrito"></input>
</div>
<div class="ratings">
<p class="pull-right">15 vistas</p>
</div>
</div>
</div>
<div class="col-sm-4 col-lg-4 col-md-4">
<div class="thumbnail">
<img src="img/pru.jpg" >
<div class="caption">
<h4 class="pull-right">$precio</h4>
<h4><a href="#">NOMBRE PRODUCTO</a>
</h4>
<p>descripcion del producto </p>
<input type="submit" class="btn btn-success" value="Añadir al carrito"></input>
</div>
<div class="ratings">
<p class="pull-right">15 vistas</p>
</div>
</div>
</div>
<div class="col-sm-4 col-lg-4 col-md-4">
<div class="thumbnail">
<img src="img/pru.jpg" >
<div class="caption">
<h4 class="pull-right">$precio</h4>
<h4><a href="#">NOMBRE PRODUCTO</a>
</h4>
<p>descripcion del producto </p>
<input type="submit" class="btn btn-success" value="Añadir al carrito"></input>
</div>
<div class="ratings">
<p class="pull-right">15 vistas</p>
</div>
</div>
</div>
<div class="col-sm-4 col-lg-4 col-md-4">
<div class="thumbnail">
<img src="img/pru.jpg" >
<div class="caption">
<h4 class="pull-right">$precio</h4>
<h4><a href="#">NOMBRE PRODUCTO</a>
</h4>
<p>descripcion del producto </p>
<input type="submit" class="btn btn-success" value="Añadir al carrito"></input>
</div>
<div class="ratings">
<p class="pull-right">15 vistas</p>
</div>
</div>
</div>
<div class="col-sm-4 col-lg-4 col-md-4">
<div class="thumbnail">
<img src="img/pru.jpg" >
<div class="caption">
<h4 class="pull-right">$precio</h4>
<h4><a href="#">NOMBRE PRODUCTO</a>
</h4>
<p>descripcion del producto </p>
<input type="submit" class="btn btn-success" value="Añadir al carrito"></input>
</div>
<div class="ratings">
<p class="pull-right">15 vistas</p>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- /.container -->
<div class="container">
<hr>
<!-- Footer -->
<footer>
<div class="row">
<div class="col-lg-12">
<p>Copyright © Your Website 2014</p>
</div>
</div>
</footer>
</div>
<!-- /.container -->
<!-- jQuery -->
<script src="js/jquery.js"></script>
<!-- Bootstrap Core JavaScript -->
<script src="js/bootstrap.min.js"></script>
</body>
</html>
| apache-2.0 |
twitter/twitter-kit-android | twitter-core/src/main/java/com/twitter/sdk/android/core/services/AccountService.java | 1921 | /*
* Copyright (C) 2015 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.sdk.android.core.services;
import com.twitter.sdk.android.core.models.User;
import retrofit2.Call;
import retrofit2.http.GET;
import retrofit2.http.Query;
public interface AccountService {
/**
* Returns an HTTP 200 OK response code and a representation of the requesting user if
* authentication was successful; returns a 401 status code and an error message if not. Use
* this method to test if supplied user credentials are valid.
*
* @param includeEntities (optional) The entities node will not be included when set to false.
* @param skipStatus (optional) When set to either true statuses will not be included in
* the returned user objects.
* @param includeEmail (optional) When set to true email will be returned in the user object as
* a string. If the user does not have an email address on their account, or
* if the email address is not verified, null will be returned.
*/
@GET("/1.1/account/verify_credentials.json")
Call<User> verifyCredentials(@Query("include_entities") Boolean includeEntities,
@Query("skip_status") Boolean skipStatus,
@Query("include_email") Boolean includeEmail);
}
| apache-2.0 |
cexbrayat/camel | components/camel-apns/src/test/java/org/apache/camel/component/apns/ApnsProducerTest.java | 3379 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.apns;
import com.notnoop.apns.APNS;
import com.notnoop.apns.ApnsService;
import com.notnoop.apns.EnhancedApnsNotification;
import com.notnoop.apns.utils.ApnsServerStub;
import com.notnoop.apns.utils.FixedCertificates;
import org.apache.camel.CamelContext;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.apns.factory.ApnsServiceFactory;
import org.apache.camel.component.apns.model.ApnsConstants;
import org.apache.camel.component.apns.util.ApnsUtils;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Unit test that we can produce JMS message from files
*/
public class ApnsProducerTest extends CamelTestSupport {
private static final String FAKE_TOKEN = "19308314834701ACD8313AEBD92AEFDE192120371FE13982392831701318B943";
private ApnsServerStub server;
public ApnsProducerTest() {
super();
}
@Before
public void startup() {
server = ApnsUtils.prepareAndStartServer(FixedCertificates.TEST_GATEWAY_PORT, FixedCertificates.TEST_FEEDBACK_PORT);
}
@After
public void stop() {
server.stop();
}
@Test(timeout = 5000)
public void testProducer() throws Exception {
String message = "Hello World";
String messagePayload = APNS.newPayload().alertBody(message).build();
EnhancedApnsNotification apnsNotification = new EnhancedApnsNotification(1, EnhancedApnsNotification.MAXIMUM_EXPIRY, FAKE_TOKEN, messagePayload);
server.stopAt(apnsNotification.length());
template.sendBody("direct:test", message);
server.messages.acquire();
assertArrayEquals(apnsNotification.marshall(), server.received.toByteArray());
}
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
ApnsServiceFactory apnsServiceFactory = ApnsUtils.createDefaultTestConfiguration(camelContext);
ApnsService apnsService = apnsServiceFactory.getApnsService();
ApnsComponent apnsComponent = new ApnsComponent(apnsService);
camelContext.addComponent("apns", apnsComponent);
return camelContext;
}
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
from("direct:test").setHeader(ApnsConstants.HEADER_TOKENS, constant(FAKE_TOKEN)).to("apns:notify");
}
};
}
}
| apache-2.0 |
Appudo/Appudo.github.io | cli/source/class/appudo_cli_creator/view/desktop/I18nLangNode.js | 2814 | /* ************************************************************************
APPUDO CLI Creator
https://www.appudo.com
Copyright: 2018 Appudo UG (haftungsbeschränkt), https://www.appudo.com
License: MIT License, https://opensource.org/licenses/MIT
Authors: source@appudo.com
************************************************************************ */
qx.Class.define("appudo_cli_creator.view.desktop.I18nLangNode",
{
extend : qx.core.Object,
members : {
__map : null,
addData : function(lang, path, returnExist) {
if(path) {
var l = this.getMap()[lang];
if(l) {
var key = path.join("/");
if(l[key] === undefined) {
var d = new appudo_cli_creator.view.desktop.I18nDataNode();
l[key] = d
return d;
}
if(returnExist) {
return l[key];
}
}
}
return null;
},
getData : function(lang, path) {
if(path) {
var l = this.getMap()[lang];
if(l) {
var key = path.join("/");
return l[key];
}
}
return null;
},
removePrefix : function(lang, path) {
var r = [];
var l = this.getMap()[lang];
if(l) {
var key = path.join("/");
var pn = Object.getOwnPropertyNames(l);
for(var i = 0; i < pn.length; i++) {
var n = pn[i];
if(n.startsWith(key)) {
r.push(l[n]);
delete l[n];
}
}
}
return r;
},
removeData : function(lang, path) {
var l = this.getMap()[lang];
if(l) {
var key = path.join("/");
if(l[key] === undefined) {
return null;
}
var d = l[key];
delete l[key];
return d;
}
return null;
},
addLang : function(name) {
if(!this.getMap()[name]) {
this.getMap()[name] = {};
}
},
removeLang : function(name) {
delete this.getMap()[name];
},
setMap : function(m) {
if(typeof m == 'object') {
this.__map = m;
}
},
getMap : function() {
return this.__map;
}
},
construct : function()
{
this.base(arguments);
this.setMap({});
}
}); | apache-2.0 |
whuwxl/etcd | etcdserver/v3_server.go | 21481 | // Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package etcdserver
import (
"bytes"
"context"
"encoding/binary"
"time"
pb "github.com/coreos/etcd/etcdserver/etcdserverpb"
"github.com/coreos/etcd/etcdserver/membership"
"github.com/coreos/etcd/internal/auth"
"github.com/coreos/etcd/internal/lease"
"github.com/coreos/etcd/internal/lease/leasehttp"
"github.com/coreos/etcd/internal/mvcc"
"github.com/coreos/etcd/raft"
"github.com/gogo/protobuf/proto"
)
const (
// In the health case, there might be a small gap (10s of entries) between
// the applied index and committed index.
// However, if the committed entries are very heavy to apply, the gap might grow.
// We should stop accepting new proposals if the gap growing to a certain point.
maxGapBetweenApplyAndCommitIndex = 5000
)
type RaftKV interface {
Range(ctx context.Context, r *pb.RangeRequest) (*pb.RangeResponse, error)
Put(ctx context.Context, r *pb.PutRequest) (*pb.PutResponse, error)
DeleteRange(ctx context.Context, r *pb.DeleteRangeRequest) (*pb.DeleteRangeResponse, error)
Txn(ctx context.Context, r *pb.TxnRequest) (*pb.TxnResponse, error)
Compact(ctx context.Context, r *pb.CompactionRequest) (*pb.CompactionResponse, error)
}
type Lessor interface {
// LeaseGrant sends LeaseGrant request to raft and apply it after committed.
LeaseGrant(ctx context.Context, r *pb.LeaseGrantRequest) (*pb.LeaseGrantResponse, error)
// LeaseRevoke sends LeaseRevoke request to raft and apply it after committed.
LeaseRevoke(ctx context.Context, r *pb.LeaseRevokeRequest) (*pb.LeaseRevokeResponse, error)
// LeaseRenew renews the lease with given ID. The renewed TTL is returned. Or an error
// is returned.
LeaseRenew(ctx context.Context, id lease.LeaseID) (int64, error)
// LeaseTimeToLive retrieves lease information.
LeaseTimeToLive(ctx context.Context, r *pb.LeaseTimeToLiveRequest) (*pb.LeaseTimeToLiveResponse, error)
// LeaseLeases lists all leases.
LeaseLeases(ctx context.Context, r *pb.LeaseLeasesRequest) (*pb.LeaseLeasesResponse, error)
}
type Authenticator interface {
AuthEnable(ctx context.Context, r *pb.AuthEnableRequest) (*pb.AuthEnableResponse, error)
AuthDisable(ctx context.Context, r *pb.AuthDisableRequest) (*pb.AuthDisableResponse, error)
Authenticate(ctx context.Context, r *pb.AuthenticateRequest) (*pb.AuthenticateResponse, error)
UserAdd(ctx context.Context, r *pb.AuthUserAddRequest) (*pb.AuthUserAddResponse, error)
UserDelete(ctx context.Context, r *pb.AuthUserDeleteRequest) (*pb.AuthUserDeleteResponse, error)
UserChangePassword(ctx context.Context, r *pb.AuthUserChangePasswordRequest) (*pb.AuthUserChangePasswordResponse, error)
UserGrantRole(ctx context.Context, r *pb.AuthUserGrantRoleRequest) (*pb.AuthUserGrantRoleResponse, error)
UserGet(ctx context.Context, r *pb.AuthUserGetRequest) (*pb.AuthUserGetResponse, error)
UserRevokeRole(ctx context.Context, r *pb.AuthUserRevokeRoleRequest) (*pb.AuthUserRevokeRoleResponse, error)
RoleAdd(ctx context.Context, r *pb.AuthRoleAddRequest) (*pb.AuthRoleAddResponse, error)
RoleGrantPermission(ctx context.Context, r *pb.AuthRoleGrantPermissionRequest) (*pb.AuthRoleGrantPermissionResponse, error)
RoleGet(ctx context.Context, r *pb.AuthRoleGetRequest) (*pb.AuthRoleGetResponse, error)
RoleRevokePermission(ctx context.Context, r *pb.AuthRoleRevokePermissionRequest) (*pb.AuthRoleRevokePermissionResponse, error)
RoleDelete(ctx context.Context, r *pb.AuthRoleDeleteRequest) (*pb.AuthRoleDeleteResponse, error)
UserList(ctx context.Context, r *pb.AuthUserListRequest) (*pb.AuthUserListResponse, error)
RoleList(ctx context.Context, r *pb.AuthRoleListRequest) (*pb.AuthRoleListResponse, error)
}
func (s *EtcdServer) Range(ctx context.Context, r *pb.RangeRequest) (*pb.RangeResponse, error) {
defer warnOfExpensiveReadOnlyRangeRequest(time.Now(), r)
if !r.Serializable {
err := s.linearizableReadNotify(ctx)
if err != nil {
return nil, err
}
}
var resp *pb.RangeResponse
var err error
chk := func(ai *auth.AuthInfo) error {
return s.authStore.IsRangePermitted(ai, r.Key, r.RangeEnd)
}
get := func() { resp, err = s.applyV3Base.Range(nil, r) }
if serr := s.doSerialize(ctx, chk, get); serr != nil {
return nil, serr
}
return resp, err
}
func (s *EtcdServer) Put(ctx context.Context, r *pb.PutRequest) (*pb.PutResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{Put: r})
if err != nil {
return nil, err
}
return resp.(*pb.PutResponse), nil
}
func (s *EtcdServer) DeleteRange(ctx context.Context, r *pb.DeleteRangeRequest) (*pb.DeleteRangeResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{DeleteRange: r})
if err != nil {
return nil, err
}
return resp.(*pb.DeleteRangeResponse), nil
}
func (s *EtcdServer) Txn(ctx context.Context, r *pb.TxnRequest) (*pb.TxnResponse, error) {
if isTxnReadonly(r) {
if !isTxnSerializable(r) {
err := s.linearizableReadNotify(ctx)
if err != nil {
return nil, err
}
}
var resp *pb.TxnResponse
var err error
chk := func(ai *auth.AuthInfo) error {
return checkTxnAuth(s.authStore, ai, r)
}
defer warnOfExpensiveReadOnlyRangeRequest(time.Now(), r)
get := func() { resp, err = s.applyV3Base.Txn(r) }
if serr := s.doSerialize(ctx, chk, get); serr != nil {
return nil, serr
}
return resp, err
}
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{Txn: r})
if err != nil {
return nil, err
}
return resp.(*pb.TxnResponse), nil
}
func isTxnSerializable(r *pb.TxnRequest) bool {
for _, u := range r.Success {
if r := u.GetRequestRange(); r == nil || !r.Serializable {
return false
}
}
for _, u := range r.Failure {
if r := u.GetRequestRange(); r == nil || !r.Serializable {
return false
}
}
return true
}
func isTxnReadonly(r *pb.TxnRequest) bool {
for _, u := range r.Success {
if r := u.GetRequestRange(); r == nil {
return false
}
}
for _, u := range r.Failure {
if r := u.GetRequestRange(); r == nil {
return false
}
}
return true
}
func (s *EtcdServer) Compact(ctx context.Context, r *pb.CompactionRequest) (*pb.CompactionResponse, error) {
result, err := s.processInternalRaftRequestOnce(ctx, pb.InternalRaftRequest{Compaction: r})
if r.Physical && result != nil && result.physc != nil {
<-result.physc
// The compaction is done deleting keys; the hash is now settled
// but the data is not necessarily committed. If there's a crash,
// the hash may revert to a hash prior to compaction completing
// if the compaction resumes. Force the finished compaction to
// commit so it won't resume following a crash.
s.be.ForceCommit()
}
if err != nil {
return nil, err
}
if result.err != nil {
return nil, result.err
}
resp := result.resp.(*pb.CompactionResponse)
if resp == nil {
resp = &pb.CompactionResponse{}
}
if resp.Header == nil {
resp.Header = &pb.ResponseHeader{}
}
resp.Header.Revision = s.kv.Rev()
return resp, nil
}
func (s *EtcdServer) LeaseGrant(ctx context.Context, r *pb.LeaseGrantRequest) (*pb.LeaseGrantResponse, error) {
// no id given? choose one
for r.ID == int64(lease.NoLease) {
// only use positive int64 id's
r.ID = int64(s.reqIDGen.Next() & ((1 << 63) - 1))
}
resp, err := s.raftRequestOnce(ctx, pb.InternalRaftRequest{LeaseGrant: r})
if err != nil {
return nil, err
}
return resp.(*pb.LeaseGrantResponse), nil
}
func (s *EtcdServer) LeaseRevoke(ctx context.Context, r *pb.LeaseRevokeRequest) (*pb.LeaseRevokeResponse, error) {
resp, err := s.raftRequestOnce(ctx, pb.InternalRaftRequest{LeaseRevoke: r})
if err != nil {
return nil, err
}
return resp.(*pb.LeaseRevokeResponse), nil
}
func (s *EtcdServer) LeaseRenew(ctx context.Context, id lease.LeaseID) (int64, error) {
ttl, err := s.lessor.Renew(id)
if err == nil { // already requested to primary lessor(leader)
return ttl, nil
}
if err != lease.ErrNotPrimary {
return -1, err
}
cctx, cancel := context.WithTimeout(ctx, s.Cfg.ReqTimeout())
defer cancel()
// renewals don't go through raft; forward to leader manually
for cctx.Err() == nil && err != nil {
leader, lerr := s.waitLeader(cctx)
if lerr != nil {
return -1, lerr
}
for _, url := range leader.PeerURLs {
lurl := url + leasehttp.LeasePrefix
ttl, err = leasehttp.RenewHTTP(cctx, id, lurl, s.peerRt)
if err == nil || err == lease.ErrLeaseNotFound {
return ttl, err
}
}
}
return -1, ErrTimeout
}
func (s *EtcdServer) LeaseTimeToLive(ctx context.Context, r *pb.LeaseTimeToLiveRequest) (*pb.LeaseTimeToLiveResponse, error) {
if s.Leader() == s.ID() {
// primary; timetolive directly from leader
le := s.lessor.Lookup(lease.LeaseID(r.ID))
if le == nil {
return nil, lease.ErrLeaseNotFound
}
// TODO: fill out ResponseHeader
resp := &pb.LeaseTimeToLiveResponse{Header: &pb.ResponseHeader{}, ID: r.ID, TTL: int64(le.Remaining().Seconds()), GrantedTTL: le.TTL()}
if r.Keys {
ks := le.Keys()
kbs := make([][]byte, len(ks))
for i := range ks {
kbs[i] = []byte(ks[i])
}
resp.Keys = kbs
}
return resp, nil
}
cctx, cancel := context.WithTimeout(ctx, s.Cfg.ReqTimeout())
defer cancel()
// forward to leader
for cctx.Err() == nil {
leader, err := s.waitLeader(cctx)
if err != nil {
return nil, err
}
for _, url := range leader.PeerURLs {
lurl := url + leasehttp.LeaseInternalPrefix
resp, err := leasehttp.TimeToLiveHTTP(cctx, lease.LeaseID(r.ID), r.Keys, lurl, s.peerRt)
if err == nil {
return resp.LeaseTimeToLiveResponse, nil
}
if err == lease.ErrLeaseNotFound {
return nil, err
}
}
}
return nil, ErrTimeout
}
func (s *EtcdServer) LeaseLeases(ctx context.Context, r *pb.LeaseLeasesRequest) (*pb.LeaseLeasesResponse, error) {
ls := s.lessor.Leases()
lss := make([]*pb.LeaseStatus, len(ls))
for i := range ls {
lss[i] = &pb.LeaseStatus{ID: int64(ls[i].ID)}
}
return &pb.LeaseLeasesResponse{Header: newHeader(s), Leases: lss}, nil
}
func (s *EtcdServer) waitLeader(ctx context.Context) (*membership.Member, error) {
leader := s.cluster.Member(s.Leader())
for leader == nil {
// wait an election
dur := time.Duration(s.Cfg.ElectionTicks) * time.Duration(s.Cfg.TickMs) * time.Millisecond
select {
case <-time.After(dur):
leader = s.cluster.Member(s.Leader())
case <-s.stopping:
return nil, ErrStopped
case <-ctx.Done():
return nil, ErrNoLeader
}
}
if leader == nil || len(leader.PeerURLs) == 0 {
return nil, ErrNoLeader
}
return leader, nil
}
func (s *EtcdServer) Alarm(ctx context.Context, r *pb.AlarmRequest) (*pb.AlarmResponse, error) {
resp, err := s.raftRequestOnce(ctx, pb.InternalRaftRequest{Alarm: r})
if err != nil {
return nil, err
}
return resp.(*pb.AlarmResponse), nil
}
func (s *EtcdServer) AuthEnable(ctx context.Context, r *pb.AuthEnableRequest) (*pb.AuthEnableResponse, error) {
resp, err := s.raftRequestOnce(ctx, pb.InternalRaftRequest{AuthEnable: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthEnableResponse), nil
}
func (s *EtcdServer) AuthDisable(ctx context.Context, r *pb.AuthDisableRequest) (*pb.AuthDisableResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthDisable: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthDisableResponse), nil
}
func (s *EtcdServer) Authenticate(ctx context.Context, r *pb.AuthenticateRequest) (*pb.AuthenticateResponse, error) {
if err := s.linearizableReadNotify(ctx); err != nil {
return nil, err
}
var resp proto.Message
for {
checkedRevision, err := s.AuthStore().CheckPassword(r.Name, r.Password)
if err != nil {
if err != auth.ErrAuthNotEnabled {
plog.Errorf("invalid authentication request to user %s was issued", r.Name)
}
return nil, err
}
st, err := s.AuthStore().GenTokenPrefix()
if err != nil {
return nil, err
}
internalReq := &pb.InternalAuthenticateRequest{
Name: r.Name,
Password: r.Password,
SimpleToken: st,
}
resp, err = s.raftRequestOnce(ctx, pb.InternalRaftRequest{Authenticate: internalReq})
if err != nil {
return nil, err
}
if checkedRevision == s.AuthStore().Revision() {
break
}
plog.Infof("revision when password checked is obsolete, retrying")
}
return resp.(*pb.AuthenticateResponse), nil
}
func (s *EtcdServer) UserAdd(ctx context.Context, r *pb.AuthUserAddRequest) (*pb.AuthUserAddResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthUserAdd: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthUserAddResponse), nil
}
func (s *EtcdServer) UserDelete(ctx context.Context, r *pb.AuthUserDeleteRequest) (*pb.AuthUserDeleteResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthUserDelete: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthUserDeleteResponse), nil
}
func (s *EtcdServer) UserChangePassword(ctx context.Context, r *pb.AuthUserChangePasswordRequest) (*pb.AuthUserChangePasswordResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthUserChangePassword: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthUserChangePasswordResponse), nil
}
func (s *EtcdServer) UserGrantRole(ctx context.Context, r *pb.AuthUserGrantRoleRequest) (*pb.AuthUserGrantRoleResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthUserGrantRole: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthUserGrantRoleResponse), nil
}
func (s *EtcdServer) UserGet(ctx context.Context, r *pb.AuthUserGetRequest) (*pb.AuthUserGetResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthUserGet: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthUserGetResponse), nil
}
func (s *EtcdServer) UserList(ctx context.Context, r *pb.AuthUserListRequest) (*pb.AuthUserListResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthUserList: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthUserListResponse), nil
}
func (s *EtcdServer) UserRevokeRole(ctx context.Context, r *pb.AuthUserRevokeRoleRequest) (*pb.AuthUserRevokeRoleResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthUserRevokeRole: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthUserRevokeRoleResponse), nil
}
func (s *EtcdServer) RoleAdd(ctx context.Context, r *pb.AuthRoleAddRequest) (*pb.AuthRoleAddResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthRoleAdd: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthRoleAddResponse), nil
}
func (s *EtcdServer) RoleGrantPermission(ctx context.Context, r *pb.AuthRoleGrantPermissionRequest) (*pb.AuthRoleGrantPermissionResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthRoleGrantPermission: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthRoleGrantPermissionResponse), nil
}
func (s *EtcdServer) RoleGet(ctx context.Context, r *pb.AuthRoleGetRequest) (*pb.AuthRoleGetResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthRoleGet: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthRoleGetResponse), nil
}
func (s *EtcdServer) RoleList(ctx context.Context, r *pb.AuthRoleListRequest) (*pb.AuthRoleListResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthRoleList: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthRoleListResponse), nil
}
func (s *EtcdServer) RoleRevokePermission(ctx context.Context, r *pb.AuthRoleRevokePermissionRequest) (*pb.AuthRoleRevokePermissionResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthRoleRevokePermission: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthRoleRevokePermissionResponse), nil
}
func (s *EtcdServer) RoleDelete(ctx context.Context, r *pb.AuthRoleDeleteRequest) (*pb.AuthRoleDeleteResponse, error) {
resp, err := s.raftRequest(ctx, pb.InternalRaftRequest{AuthRoleDelete: r})
if err != nil {
return nil, err
}
return resp.(*pb.AuthRoleDeleteResponse), nil
}
func (s *EtcdServer) raftRequestOnce(ctx context.Context, r pb.InternalRaftRequest) (proto.Message, error) {
result, err := s.processInternalRaftRequestOnce(ctx, r)
if err != nil {
return nil, err
}
if result.err != nil {
return nil, result.err
}
return result.resp, nil
}
func (s *EtcdServer) raftRequest(ctx context.Context, r pb.InternalRaftRequest) (proto.Message, error) {
for {
resp, err := s.raftRequestOnce(ctx, r)
if err != auth.ErrAuthOldRevision {
return resp, err
}
}
}
// doSerialize handles the auth logic, with permissions checked by "chk", for a serialized request "get". Returns a non-nil error on authentication failure.
func (s *EtcdServer) doSerialize(ctx context.Context, chk func(*auth.AuthInfo) error, get func()) error {
for {
ai, err := s.AuthInfoFromCtx(ctx)
if err != nil {
return err
}
if ai == nil {
// chk expects non-nil AuthInfo; use empty credentials
ai = &auth.AuthInfo{}
}
if err = chk(ai); err != nil {
if err == auth.ErrAuthOldRevision {
continue
}
return err
}
// fetch response for serialized request
get()
// empty credentials or current auth info means no need to retry
if ai.Revision == 0 || ai.Revision == s.authStore.Revision() {
return nil
}
// avoid TOCTOU error, retry of the request is required.
}
}
func (s *EtcdServer) processInternalRaftRequestOnce(ctx context.Context, r pb.InternalRaftRequest) (*applyResult, error) {
ai := s.getAppliedIndex()
ci := s.getCommittedIndex()
if ci > ai+maxGapBetweenApplyAndCommitIndex {
return nil, ErrTooManyRequests
}
r.Header = &pb.RequestHeader{
ID: s.reqIDGen.Next(),
}
authInfo, err := s.AuthInfoFromCtx(ctx)
if err != nil {
return nil, err
}
if authInfo != nil {
r.Header.Username = authInfo.Username
r.Header.AuthRevision = authInfo.Revision
}
data, err := r.Marshal()
if err != nil {
return nil, err
}
if len(data) > int(s.Cfg.MaxRequestBytes) {
return nil, ErrRequestTooLarge
}
id := r.ID
if id == 0 {
id = r.Header.ID
}
ch := s.w.Register(id)
cctx, cancel := context.WithTimeout(ctx, s.Cfg.ReqTimeout())
defer cancel()
start := time.Now()
s.r.Propose(cctx, data)
proposalsPending.Inc()
defer proposalsPending.Dec()
select {
case x := <-ch:
return x.(*applyResult), nil
case <-cctx.Done():
proposalsFailed.Inc()
s.w.Trigger(id, nil) // GC wait
return nil, s.parseProposeCtxErr(cctx.Err(), start)
case <-s.done:
return nil, ErrStopped
}
}
// Watchable returns a watchable interface attached to the etcdserver.
func (s *EtcdServer) Watchable() mvcc.WatchableKV { return s.KV() }
func (s *EtcdServer) linearizableReadLoop() {
var rs raft.ReadState
for {
ctx := make([]byte, 8)
binary.BigEndian.PutUint64(ctx, s.reqIDGen.Next())
select {
case <-s.readwaitc:
case <-s.stopping:
return
}
nextnr := newNotifier()
s.readMu.Lock()
nr := s.readNotifier
s.readNotifier = nextnr
s.readMu.Unlock()
cctx, cancel := context.WithTimeout(context.Background(), s.Cfg.ReqTimeout())
if err := s.r.ReadIndex(cctx, ctx); err != nil {
cancel()
if err == raft.ErrStopped {
return
}
plog.Errorf("failed to get read index from raft: %v", err)
nr.notify(err)
continue
}
cancel()
var (
timeout bool
done bool
)
for !timeout && !done {
select {
case rs = <-s.r.readStateC:
done = bytes.Equal(rs.RequestCtx, ctx)
if !done {
// a previous request might time out. now we should ignore the response of it and
// continue waiting for the response of the current requests.
plog.Warningf("ignored out-of-date read index response (want %v, got %v)", rs.RequestCtx, ctx)
}
case <-time.After(s.Cfg.ReqTimeout()):
plog.Warningf("timed out waiting for read index response")
nr.notify(ErrTimeout)
timeout = true
case <-s.stopping:
return
}
}
if !done {
continue
}
if ai := s.getAppliedIndex(); ai < rs.Index {
select {
case <-s.applyWait.Wait(rs.Index):
case <-s.stopping:
return
}
}
// unblock all l-reads requested at indices before rs.Index
nr.notify(nil)
}
}
func (s *EtcdServer) linearizableReadNotify(ctx context.Context) error {
s.readMu.RLock()
nc := s.readNotifier
s.readMu.RUnlock()
// signal linearizable loop for current notify if it hasn't been already
select {
case s.readwaitc <- struct{}{}:
default:
}
// wait for read state notification
select {
case <-nc.c:
return nc.err
case <-ctx.Done():
return ctx.Err()
case <-s.done:
return ErrStopped
}
}
func (s *EtcdServer) AuthInfoFromCtx(ctx context.Context) (*auth.AuthInfo, error) {
authInfo, err := s.AuthStore().AuthInfoFromCtx(ctx)
if authInfo != nil || err != nil {
return authInfo, err
}
if !s.Cfg.ClientCertAuthEnabled {
return nil, nil
}
authInfo = s.AuthStore().AuthInfoFromTLS(ctx)
return authInfo, nil
}
| apache-2.0 |
sumbad/inscriptum | src/hub/auth/auth.effect.ts | 953 | import { AUTH_ACTION, AuthAction, AuthActionAuth } from 'hub/auth/auth.action';
import hub from 'hub';
import { defer, Observable, of } from 'rxjs';
import { filter, switchMap, map, catchError } from 'rxjs/operators';
import { auth, silentAuth } from 'services/Auth.service';
export const auth$: Observable<AuthAction> = hub.$.pipe(
filter((action) => AUTH_ACTION.AUTH === action.type),
switchMap((d: AuthActionAuth) => {
const redirectUri = d.payload.redirectUri ?? `${document.location.origin}/drafts`;
return defer(() => d.payload.silent ? silentAuth() : auth(redirectUri)).pipe(
map(
(payload): AuthAction =>
({
type: AUTH_ACTION.AUTH_DONE,
payload,
})
),
catchError(
(error) => {
console.warn(error);
return of<AuthAction>({
type: AUTH_ACTION.AUTH_FAIL,
payload: error,
});
}
)
);
})
);
| apache-2.0 |
ChanJLee/EnglishBaby | EnglishBabyProject/app/src/main/java/com/chan/englishbaby/view/LessonDialog.java | 2164 | package com.chan.englishbaby.view;
import android.app.Dialog;
import android.content.Context;
import android.widget.RadioGroup;
import com.chan.englishbaby.injector.annotation.ContextLife;
import com.chan.englishbaby.utils.ConstUtil;
import com.chan.englishboby.R;
import javax.inject.Inject;
import butterknife.Bind;
import butterknife.ButterKnife;
import butterknife.OnClick;
/**
* Created by chan on 16/6/22.
*/
public class LessonDialog extends Dialog {
private OnLevelSelected m_onLevelSelected;
@Bind(R.id.id_radio)
RadioGroup m_radioGroup;
@Inject
public LessonDialog(@ContextLife("activity") Context context) {
super(context, R.style.SimpleDialogTheme);
setContentView(R.layout.dialog_lesson);
ButterKnife.bind(this);
}
@OnClick(R.id.id_accept)
void onAccept() {
if (m_onLevelSelected == null) return;
switch (m_radioGroup.getCheckedRadioButtonId()) {
case R.id.id_none:
m_onLevelSelected.onLevelSelected(ConstUtil.ITEM_NONE);
break;
case R.id.id_level_0:
m_onLevelSelected.onLevelSelected(ConstUtil.ITEM_LEVEL_0);
break;
case R.id.id_level_1:
m_onLevelSelected.onLevelSelected(ConstUtil.ITEM_LEVEL_1);
break;
case R.id.id_level_2:
m_onLevelSelected.onLevelSelected(ConstUtil.ITEM_LEVEL_2);
break;
case R.id.id_level_3:
m_onLevelSelected.onLevelSelected(ConstUtil.ITEM_LEVEL_3);
break;
case R.id.id_level_4:
m_onLevelSelected.onLevelSelected(ConstUtil.ITEM_LEVEL_4);
break;
default: m_onLevelSelected.onLevelSelected(ConstUtil.ITEM_LEVEL_5);
break;
}
dismiss();
}
@OnClick(R.id.id_cancel)
void onCancel() {
dismiss();
}
public void setOnLevelSelected(OnLevelSelected onLevelSelected) {
m_onLevelSelected = onLevelSelected;
}
public interface OnLevelSelected {
void onLevelSelected(short level);
}
}
| apache-2.0 |
codespare/RoslynClrHeapAllocationAnalyzer | ClrHeapAllocationsAnalyzer.Test/CallSiteImplicitAllocationAnalyzerTests.cs | 3629 | using System.Collections.Immutable;
using ClrHeapAllocationAnalyzer;
using Microsoft.CodeAnalysis.CSharp;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace ClrHeapAllocationsAnalyzer.Test
{
[TestClass]
public class CallSiteImplicitAllocationAnalyzerTests : AllocationAnalyzerTests
{
[TestMethod]
public void CallSiteImplicitAllocation_Param()
{
var sampleProgram =
@"using System;
Params();
Params(1, 2);
Params(new [] { 1, 2}); // explicit, so no warning
ParamsWithObjects(new [] { 1, 2}); // explicit, but converted to objects, so stil la warning?!
// Only 4 args and above use the params overload of String.Format
var test = String.Format(""Testing {0}, {1}, {2}, {3}"", 1, ""blah"", 2.0m, 'c');
public void Params(params int[] args)
{
}
public void ParamsWithObjects(params object[] args)
{
}";
var analyser = new CallSiteImplicitAllocationAnalyzer();
var info = ProcessCode(analyser, sampleProgram, ImmutableArray.Create(SyntaxKind.InvocationExpression));
Assert.AreEqual(4, info.Allocations.Count);
// Diagnostic: (3,1): warning HeapAnalyzerImplicitParamsRule: This call site is calling into a function with a 'params' parameter. This results in an array allocation even if no parameter is passed in for the params parameter
AssertEx.ContainsDiagnostic(info.Allocations, id: CallSiteImplicitAllocationAnalyzer.ParamsParameterRule.Id, line: 3, character: 1);
// Diagnostic: (4,1): warning HeapAnalyzerImplicitParamsRule: This call site is calling into a function with a 'params' parameter. This results in an array allocation even if no parameter is passed in for the params parameter
AssertEx.ContainsDiagnostic(info.Allocations, id: CallSiteImplicitAllocationAnalyzer.ParamsParameterRule.Id, line: 4, character: 1);
// Diagnostic: (6,1): warning HeapAnalyzerImplicitParamsRule: This call site is calling into a function with a 'params' parameter. This results in an array allocation even if no parameter is passed in for the params parameter
AssertEx.ContainsDiagnostic(info.Allocations, id: CallSiteImplicitAllocationAnalyzer.ParamsParameterRule.Id, line: 6, character: 1);
// Diagnostic: (9,12): warning HeapAnalyzerImplicitParamsRule: This call site is calling into a function with a 'params' parameter. This results in an array allocation even if no parameter is passed in for the params parameter
AssertEx.ContainsDiagnostic(info.Allocations, id: CallSiteImplicitAllocationAnalyzer.ParamsParameterRule.Id, line: 9, character: 12);
}
[TestMethod]
public void CallSiteImplicitAllocation_NonOverridenMethodOnStruct()
{
var sampleProgram =
@"using System;
var normal = new Normal().GetHashCode();
var overridden = new OverrideToHashCode().GetHashCode();
struct Normal
{
}
struct OverrideToHashCode
{
public override int GetHashCode()
{
return -1;
}
}";
var analyser = new CallSiteImplicitAllocationAnalyzer();
var info = ProcessCode(analyser, sampleProgram, ImmutableArray.Create(SyntaxKind.InvocationExpression));
Assert.AreEqual(1, info.Allocations.Count);
// Diagnostic: (3,14): warning HeapAnalyzerValueTypeNonOverridenCallRule: Non-overriden virtual method call on a value type adds a boxing or constrained instruction
AssertEx.ContainsDiagnostic(info.Allocations, id: CallSiteImplicitAllocationAnalyzer.ValueTypeNonOverridenCallRule.Id, line: 3, character: 14);
}
}
}
| apache-2.0 |
pfranza/LanguagePropertyTranslator | src/main/java/com/peterfranza/propertytranslator/TranslatorDictionaryEvolutionConfiguration.java | 775 | package com.peterfranza.propertytranslator;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.shared.model.fileset.FileSet;
import com.peterfranza.propertytranslator.translators.TranslationType;
public class TranslatorDictionaryEvolutionConfiguration {
@Parameter(property = "translationType", alias = "translationType", required = true)
public TranslationType translationType;
@Parameter(required = true)
public FileSet fileset;
@Parameter(property = "delimiter", alias = "delimiter", required = true, defaultValue = "|")
public String delimiter = "|";
@Override
public String toString() {
return "TranslatorDictionaryEvolutionConfiguration [translationType=" + translationType + ", delimiter="
+ delimiter + "]";
}
}
| apache-2.0 |
luj1985/dionysus | dionysus-rest/src/main/java/com/huixinpn/dionysus/dto/user/ConsultantData.java | 391 | package com.huixinpn.dionysus.dto.user;
import com.huixinpn.dionysus.domain.user.Consultant;
import lombok.Data;
import lombok.NoArgsConstructor;
@Data
@NoArgsConstructor
public class ConsultantData extends UserData{
private int appointmentaNum;
public ConsultantData(Consultant consultant){
super(consultant);
this.appointmentaNum = consultant.getAppointments().size();
}
}
| apache-2.0 |
brianhks/tablesaw | src/main/java/tablesaw/RuleMethod.java | 667 | package tablesaw;
import tablesaw.rules.Rule;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
/**
Created by bhawkins on 3/13/14.
*/
public class RuleMethod
{
private final Rule m_rule;
private final Method m_method;
public RuleMethod(Rule rule, Method method)
{
m_rule = rule;
m_method = method;
}
public Object invoke() throws InvocationTargetException, IllegalAccessException
{
return m_method.invoke(m_rule);
}
public void invoke(Object arg) throws InvocationTargetException, IllegalAccessException
{
m_method.invoke(m_rule, arg);
}
public Method getMethod()
{
return m_method;
}
}
| apache-2.0 |
jthoenes/ips | ips/src/main/ruby/3rdparty/facets/core/facets/array/only.rb | 395 | class Array
# Returns the _only_ element in the array. Raises an IndexError if
# the array's size is not 1.
#
# [5].only # -> 5
# [1,2,3].only # -> IndexError
# [].only # -> IndexError
#
# CREDIT: Gavin Sinclair, Noah Gibbs
def only
unless size == 1
raise IndexError, "Array#only called on non-single-element array"
end
first
end
end
| apache-2.0 |
meruvian/yama-archetypes | starter/src/main/resources/archetype-resources/webapi/src/test/java/webapi/service/RoleServiceTest.java | 1468 | #set( $symbol_pound = '#' )
#set( $symbol_dollar = '$' )
#set( $symbol_escape = '\' )
package ${package}.webapi.service;
import static org.hamcrest.CoreMatchers.is;
import static org.junit.Assert.assertThat;
import javax.inject.Inject;
import javax.validation.ConstraintViolationException;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.meruvian.yama.core.role.Role;
import ${package}.webapi.Application;
import org.springframework.boot.test.SpringApplicationConfiguration;
import org.springframework.data.domain.Page;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.test.context.web.WebAppConfiguration;
import org.springframework.transaction.annotation.Transactional;
@RunWith(SpringJUnit4ClassRunner.class)
@SpringApplicationConfiguration(classes = Application.class)
@WebAppConfiguration
@Transactional
public class RoleServiceTest {
@Inject
private RoleService roleService;
@Test
public void testInitializedRoles() {
Role role = roleService.getRoleById("1");
assertThat(role.getName(), is("ADMINISTRATOR"));
}
@Test
public void testNumberOfInitializedRole() {
Page<Role> roles = roleService.findRoleByKeyword("", null);
assertThat(roles.getTotalElements(), is(2L));
}
@Test(expected = ConstraintViolationException.class)
public void testRoleValidation() {
Role role = new Role();
roleService.saveRole(role);
roleService.findRoleByKeyword("", null);
}
}
| apache-2.0 |
weghst/typhon | typhon-dist/src/main/scripts/war/bs/B_s045.java | 1970 | /*
* Copyright 2014 The Skfiy Open Association.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package war.bs;
import org.skfiy.typhon.dobj.BSaSkill;
import org.skfiy.typhon.spi.war.AttackEntry;
import org.skfiy.typhon.spi.war.BSaScript;
import org.skfiy.typhon.spi.war.BSaWapper;
import org.skfiy.typhon.spi.war.FightObject;
import org.skfiy.typhon.spi.war.MultiAttackResult;
import org.skfiy.typhon.spi.war.RecoveryEntry;
/**
* 周泰.
*
* @author Kevin Zou <kevinz@skfiy.org>
*/
public class B_s045 extends BSaScript {
@Override
protected Object doExecute(BSaWapper bsaWapper) {
BSaSkill bsaSkill = bsaWapper.getBsaSkill();
int atk = getAtk(bsaWapper, bsaWapper.getAobj());
FightObject aobj = bsaWapper.getAobj();
MultiAttackResult mar = new MultiAttackResult();
FightObject goal = bsaWapper.getDefenderEntity().findFightGoal();
AttackEntry ae = getWarProvider().attack0(bsaWapper.getWarInfo().getTerrain(), aobj,
goal, atk, getDef(bsaWapper, goal), bsaSkill.getFactor() * bsaWapper.getFactor());
goal.decrementHp((int) ae.getVal());
mar.addTarget(ae);
int rehp = (int) ((aobj.getMaxHp() - aobj.getHp()) * 0.4);
aobj.incrementHp(rehp);
RecoveryEntry re = new RecoveryEntry();
re.setLab(aobj.getLab());
re.setType("HP");
re.setVal(rehp);
mar.addTarget(re);
return mar;
}
}
| apache-2.0 |
datenstrudel/bulbs-core | src/integTest/java/net/datenstrudel/bulbs/core/domain/model/infrastructure/DomainServiceLocatorIT.java | 747 | package net.datenstrudel.bulbs.core.domain.model.infrastructure;
import net.datenstrudel.bulbs.core.AbstractBulbsIT;
import net.datenstrudel.bulbs.core.domain.model.bulb.BulbsHwService;
import org.junit.Test;
import static org.junit.Assert.assertNotNull;
/**
* @author Thomas Wendzinski
*/
public class DomainServiceLocatorIT extends AbstractBulbsIT {
@Test
public void testInstance() {
System.out.println("instance");
DomainServiceLocator result = DomainServiceLocator.instance();
assertNotNull(result);
}
@Test
public void testGetBean() {
System.out.println("getBean");
Object result = DomainServiceLocator.getBean(BulbsHwService.class);
assertNotNull(result);
}
}
| apache-2.0 |
open-o/nfvo | lcm/lcm/pub/nfvi/vim/vimadaptor.py | 4716 | # Copyright 2016 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import sys
import traceback
from requests import RequestException
from lcm.pub.nfvi.vim.lib.syscomm import fun_name
from lcm.pub.nfvi.vim import const
from lcm.pub.nfvi.vim.lib.vimexception import VimException
logger = logging.getLogger(__name__)
class VimAdaptor:
def __init__(self, connectInfo):
logger.info("[VimAdaptor]connectInfo=%s" % connectInfo)
self.apiImpl, self.authInfo = None, [1, "No auth info"]
self.create_api(connectInfo)
self.force_login(connectInfo)
def create_api(self, connectInfo):
vimtype = connectInfo['vimtype'] if 'vimtype' in connectInfo else None
logger.info("call %s, vimtype=%s" % (fun_name(), vimtype))
if vimtype == const.VIM_OPENSTACK:
from lcm.pub.nfvi.vim.api.openstack.api import OpenstackApi
self.apiImpl = OpenstackApi()
elif vimtype == const.VIM_VMWARE:
from lcm.pub.nfvi.vim.api.multivim.api import MultiVimApi
self.apiImpl = MultiVimApi()
else:
self.authInfo = [1, "Unsupported vimtype(%s)" % vimtype]
def api_call(self, funname, fun, *args):
logger.info("call %s%s" % (funname, str(args)))
ret = None
try:
ret = fun(self.authInfo[1], *args) if self.authInfo[0] == 0 else self.authInfo
except VimException as e:
ret = [1, e.message]
except RequestException as e:
logger.error("request=%s, url=%s" % (e.request.headers._store, e.request.url))
logger.error(traceback.format_exc())
ret = [1, e.message if e.message else str(sys.exc_info())]
except Exception as ex:
logger.error(traceback.format_exc())
ret = [1, ex.message if ex.message else str(sys.exc_info())]
except:
logger.error(traceback.format_exc())
ret = [1, str(sys.exc_info())]
logger.info("[%s]ret=%s" % (funname, ret))
return ret
def force_login(self, connectInfo):
if self.apiImpl:
logger.info("call %s(%s)" % (fun_name(), connectInfo))
try:
self.authInfo = self.apiImpl.login(connectInfo)
except VimException as e:
self.authInfo = [1, e.message]
except Exception as ex:
logger.error(traceback.format_exc())
logger.error(str(sys.exc_info()))
self.authInfo = [1, ex.message if ex.message else str(sys.exc_info())]
except:
logger.error(traceback.format_exc())
self.authInfo = [1, str(sys.exc_info())]
logger.info("self.authInfo=%s" % self.authInfo)
def query_net(self, net_id):
return self.api_call(fun_name(), self.apiImpl.query_net, net_id)
def query_nets(self):
return self.api_call(fun_name(), self.apiImpl.query_nets)
def query_subnet(self, subnet_id):
return self.api_call(fun_name(), self.apiImpl.query_subnet, subnet_id)
def query_port(self, port_id):
return self.api_call(fun_name(), self.apiImpl.query_port, port_id)
def create_image(self, data):
return self.api_call(fun_name(), self.apiImpl.create_image, data)
def get_image(self, image_id):
return self.api_call(fun_name(), self.apiImpl.get_image, image_id)
def get_images(self):
return self.api_call(fun_name(), self.apiImpl.get_images)
def delete_image(self, image_id):
return self.api_call(fun_name(), self.apiImpl.delete_image, image_id)
def create_network(self, data):
return self.api_call(fun_name(), self.apiImpl.create_network, data)
def delete_network(self, network_id):
return self.api_call(fun_name(), self.apiImpl.delete_network, network_id)
def delete_subnet(self, subnet_id):
return self.api_call(fun_name(), self.apiImpl.delete_subnet, subnet_id)
def create_port(self, data):
return self.api_call(fun_name(), self.apiImpl.create_port, data)
def delete_port(self, port_id):
return self.api_call(fun_name(), self.apiImpl.delete_port, port_id)
| apache-2.0 |
vband/Sete-Pecados | Assets/Scripts/Mini_GananciaOLD/LoadImage.cs | 308 | using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class LoadImage : MonoBehaviour {
// Use this for initialization
void Start () {
GetComponent<Image>().sprite = GameObject.Find("minigame").GetComponent<mecanica>().rosto_correto;
}
}
| apache-2.0 |
evankyle/sync_gateway | src/github.com/couchbase/sync_gateway/rest/api.go | 7516 | // Copyright (c) 2012 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package rest
import (
"encoding/json"
"fmt"
"net/http"
httpprof "net/http/pprof"
"os"
"runtime"
"runtime/pprof"
"strconv"
"strings"
"github.com/couchbase/sg-bucket"
"github.com/couchbase/sync_gateway/base"
"github.com/couchbase/sync_gateway/db"
)
const ServerName = "Couchbase Sync Gateway"
const VersionNumber float64 = 1.0 // API/feature level
const VersionBuildNumberString = "@PRODUCT_VERSION@" // Real string substituted by Gerrit
const VersionCommitSHA = "@COMMIT_SHA@" // Real string substituted by Gerrit
// This appears in the "Server:" header of HTTP responses
var VersionString string
// This includes build number; appears in the response of "GET /" and the initial log message
var LongVersionString string
func init() {
if VersionBuildNumberString[0] != '@' {
//Split version number and build number (optional)
versionTokens := strings.Split(VersionBuildNumberString, "-")
BuildVersionString := versionTokens[0]
var BuildNumberString string
if len(versionTokens) > 1 {
BuildNumberString = fmt.Sprintf("%s;", versionTokens[1])
}
LongVersionString = fmt.Sprintf("%s/%s(%s%.7s)",
ServerName, BuildVersionString, BuildNumberString, VersionCommitSHA)
VersionString = fmt.Sprintf("%s/%s", ServerName, BuildVersionString)
} else {
LongVersionString = fmt.Sprintf("%s/%s(%.7s%s)", ServerName, GitBranch, GitCommit, GitDirty)
VersionString = fmt.Sprintf("%s/unofficial", ServerName)
}
}
// HTTP handler for the root ("/")
func (h *handler) handleRoot() error {
response := map[string]interface{}{
"couchdb": "Welcome",
"version": LongVersionString,
"vendor": db.Body{"name": ServerName, "version": VersionNumber},
}
if h.privs == adminPrivs {
response["ADMIN"] = true
}
h.writeJSON(response)
return nil
}
func (h *handler) handleAllDbs() error {
h.writeJSON(h.server.AllDatabaseNames())
return nil
}
func (h *handler) handleCompact() error {
revsDeleted, err := h.db.Compact()
if err != nil {
return err
}
h.writeJSON(db.Body{"revs": revsDeleted})
return nil
}
func (h *handler) handleVacuum() error {
attsDeleted, err := db.VacuumAttachments(h.db.Bucket)
if err != nil {
return err
}
h.writeJSON(db.Body{"atts": attsDeleted})
return nil
}
func (h *handler) handleFlush() error {
if bucket, ok := h.db.Bucket.(sgbucket.DeleteableBucket); ok {
name := h.db.Name
config := h.server.GetDatabaseConfig(name)
h.server.RemoveDatabase(name)
err := bucket.CloseAndDelete()
_, err2 := h.server.AddDatabaseFromConfig(config)
if err == nil {
err = err2
}
return err
} else {
return base.HTTPErrorf(http.StatusServiceUnavailable, "Bucket does not support flush")
}
}
func (h *handler) handleResync() error {
docsChanged, err := h.db.UpdateAllDocChannels(true, false)
if err != nil {
return err
}
h.writeJSON(db.Body{"changes": docsChanged})
return nil
}
func (h *handler) instanceStartTime() json.Number {
return json.Number(strconv.FormatInt(h.db.StartTime.UnixNano()/1000, 10))
}
func (h *handler) handleGetDB() error {
if h.rq.Method == "HEAD" {
return nil
}
lastSeq, err := h.db.LastSequence()
if err != nil {
return err
}
response := db.Body{
"db_name": h.db.Name,
"update_seq": lastSeq,
"committed_update_seq": lastSeq,
"instance_start_time": h.instanceStartTime(),
"compact_running": false, // TODO: Implement this
"purge_seq": 0, // TODO: Should track this value
"disk_format_version": 0, // Probably meaningless, but add for compatibility
//"doc_count": h.db.DocCount(), // Removed: too expensive to compute (#278)
}
h.writeJSON(response)
return nil
}
// Stub handler for hadling create DB on the public API returns HTTP status 412
// if the db exists, and 403 if it doesn't.
// fixes issue #562
func (h *handler) handleCreateTarget() error {
dbname := h.PathVar("targetdb")
if _, err := h.server.GetDatabase(dbname); err != nil {
return base.HTTPErrorf(http.StatusForbidden, "Creating a DB over the public API is unsupported")
} else {
return base.HTTPErrorf(http.StatusPreconditionFailed, "Database already exists")
}
}
func (h *handler) handleEFC() error { // Handles _ensure_full_commit.
// no-op. CouchDB's replicator sends this, so don't barf. Status must be 201.
h.writeJSONStatus(http.StatusCreated, db.Body{
"ok": true,
"instance_start_time": h.instanceStartTime(),
})
return nil
}
// ADMIN API to turn Go CPU profiling on/off
func (h *handler) handleProfiling() error {
profileName := h.PathVar("name")
var params struct {
File string `json:"file"`
}
body, err := h.readBody()
if err != nil {
return err
}
if len(body) > 0 {
if err = json.Unmarshal(body, ¶ms); err != nil {
return err
}
}
if params.File != "" {
f, err := os.Create(params.File)
if err != nil {
return err
}
if profileName != "" {
defer f.Close()
if profile := pprof.Lookup(profileName); profile != nil {
profile.WriteTo(f, 0)
base.Logf("Wrote %s profile to %s", profileName, params.File)
} else {
return base.HTTPErrorf(http.StatusNotFound, "No such profile %q", profileName)
}
} else {
base.Logf("Starting CPU profile to %s ...", params.File)
pprof.StartCPUProfile(f)
}
} else {
if profileName != "" {
return base.HTTPErrorf(http.StatusBadRequest, "Missing JSON 'file' parameter")
} else {
base.Log("...ending CPU profile.")
pprof.StopCPUProfile()
}
}
return nil
}
// ADMIN API to dump Go heap profile
func (h *handler) handleHeapProfiling() error {
var params struct {
File string `json:"file"`
}
body, err := h.readBody()
if err != nil {
return err
}
if err = json.Unmarshal(body, ¶ms); err != nil {
return err
}
base.Logf("Dumping heap profile to %s ...", params.File)
f, err := os.Create(params.File)
if err != nil {
return err
}
pprof.WriteHeapProfile(f)
f.Close()
return nil
}
func (h *handler) handlePprofGoroutine() error {
httpprof.Handler("goroutine").ServeHTTP(h.response, h.rq)
return nil
}
func (h *handler) handlePprofCmdline() error {
httpprof.Cmdline(h.response, h.rq)
return nil
}
func (h *handler) handlePprofSymbol() error {
httpprof.Symbol(h.response, h.rq)
return nil
}
func (h *handler) handlePprofHeap() error {
httpprof.Handler("heap").ServeHTTP(h.response, h.rq)
return nil
}
func (h *handler) handlePprofProfile() error {
httpprof.Profile(h.response, h.rq)
return nil
}
func (h *handler) handlePprofBlock() error {
httpprof.Handler("block").ServeHTTP(h.response, h.rq)
return nil
}
func (h *handler) handlePprofThreadcreate() error {
httpprof.Handler("threadcreate").ServeHTTP(h.response, h.rq)
return nil
}
type stats struct {
MemStats runtime.MemStats
}
// ADMIN API to expose runtime and other stats
func (h *handler) handleStats() error {
st := stats{}
runtime.ReadMemStats(&st.MemStats)
h.writeJSON(st)
return nil
}
| apache-2.0 |
fuzzmz/fever_saved_download | start.py | 2190 | __author__ = 'Constantin Serban'
import re
import urllib
from multiprocessing.dummy import Pool as ThreadPool
from sql_connect import main as gr
from optparse import OptionParser
import os
def get_links(items):
links = []
for item in items:
matches = 0
if item is not None:
matches = re.findall(r"(http(s?):([/|.|\w|\s])*\.(?:jpg|gif|png))", item[0])
match_no = len(matches)
if matches:
while match_no:
match_no -= 1
link = matches[match_no]
links.append(link)
return links
def parallel_start(download_map):
pool = ThreadPool(4)
results = pool.map(download_files, download_map)
pool.close()
pool.join()
def download_files(download_map):
location = download_map[1]
link = download_map[0][0]
f = open((location + "\\" + link[link.rfind('/') + 1:]), 'wb')
try:
f.write(urllib.urlopen(link).read())
f.close()
except IOError:
print "Couldn't download image from " + link + "\n"
return
def main():
parser = OptionParser()
parser.add_option("-s", "--save", action="store_true", dest="keep_saved", default=False,
help="Do not mark items as unsaved")
parser.add_option("-d", "--download", action="store", dest="download_location", default=False,
help="Download location")
parser.add_option("-c", "--config", action="store", dest="config_location", default=False,
help="Configuration file location")
(options, args) = parser.parse_args()
keep_saved = options.keep_saved
download_location = options.download_location
config_location = options.config_location
if not download_location:
download_location = os.path.dirname(os.path.abspath(__file__)) + "\\down"
if not os.path.exists(download_location):
os.makedirs(download_location)
items = gr(keep_saved, config_location)
links = get_links(items)
download_map = []
for a in links:
download_map.append((a, download_location))
parallel_start(download_map)
if __name__ == "__main__":
main() | apache-2.0 |
111pontes/ydk-py | cisco-ios-xe/ydk/models/cisco_ios_xe/Cisco_IOS_XE_bgp_oper.py | 70898 | """ Cisco_IOS_XE_bgp_oper
This module contains a collection of YANG definitions for
monitoring BGP information.Copyright (c) 2016\-2017 by Cisco Systems, Inc.All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class BgpAfiSafiEnum(Enum):
"""
BgpAfiSafiEnum
.. data:: ipv4_mdt = 0
.. data:: ipv4_multicast = 1
.. data:: ipv4_unicast = 2
.. data:: ipv4_mvpn = 3
.. data:: ipv4_flowspec = 4
.. data:: ipv6_multicast = 5
.. data:: ipv6_unicast = 6
.. data:: ipv6_mvpn = 7
.. data:: ipv6_flowspec = 8
.. data:: l2vpn_vpls = 9
.. data:: l2vpn_e_vpn = 10
.. data:: nsap_unicast = 11
.. data:: rtfilter_unicast = 12
.. data:: vpnv4_multicast = 13
.. data:: vpnv4_unicast = 14
.. data:: vpnv6_unicast = 15
.. data:: vpnv6_multicast = 16
.. data:: vpnv4_flowspec = 17
.. data:: vpnv6_flowspec = 18
"""
ipv4_mdt = 0
ipv4_multicast = 1
ipv4_unicast = 2
ipv4_mvpn = 3
ipv4_flowspec = 4
ipv6_multicast = 5
ipv6_unicast = 6
ipv6_mvpn = 7
ipv6_flowspec = 8
l2vpn_vpls = 9
l2vpn_e_vpn = 10
nsap_unicast = 11
rtfilter_unicast = 12
vpnv4_multicast = 13
vpnv4_unicast = 14
vpnv6_unicast = 15
vpnv6_multicast = 16
vpnv4_flowspec = 17
vpnv6_flowspec = 18
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpAfiSafiEnum']
class BgpFsmStateEnum(Enum):
"""
BgpFsmStateEnum
.. data:: idle = 0
neighbor is in Idle state
.. data:: connect = 1
neighbor is in Connect state
.. data:: active = 2
neighbor is in Active state
.. data:: opensent = 3
neighbor is in OpenSent state
.. data:: openconfirm = 4
neighbor is in OpenConfirm state
.. data:: established = 5
neighbor is in Established state
.. data:: nonnegotiated = 6
neighbor is Non Negotiated
"""
idle = 0
connect = 1
active = 2
opensent = 3
openconfirm = 4
established = 5
nonnegotiated = 6
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpFsmStateEnum']
class BgpLinkEnum(Enum):
"""
BgpLinkEnum
.. data:: internal = 0
iBGP neighbors
.. data:: external = 1
eBGP neighbors
"""
internal = 0
external = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpLinkEnum']
class BgpModeEnum(Enum):
"""
BgpModeEnum
.. data:: active = 0
active connection
.. data:: passive = 1
passive connection
"""
active = 0
passive = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpModeEnum']
class BgpOriginCodeEnum(Enum):
"""
BgpOriginCodeEnum
.. data:: origin_igp = 0
BGP origin code IGP
.. data:: origin_egp = 1
BGP origin code EGP
.. data:: origin_incomplete = 2
BGP origin code incomplete
"""
origin_igp = 0
origin_egp = 1
origin_incomplete = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpOriginCodeEnum']
class BgpRouteOptionEnum(Enum):
"""
BgpRouteOptionEnum
.. data:: bgp_all_routes = 0
All entries
.. data:: bgp_cidr_only_routes = 1
CIDR ONLY route entries
.. data:: bgp_dampened_routes = 2
Dampened route entries
.. data:: bgp_rib_fail_routes = 3
Rib failure routes
.. data:: bgp_injected_routes = 4
Injected route entries
.. data:: bgp_pending_routes = 5
prefixes pending deletion
.. data:: bgp_inconsistent_routes = 6
inconsistency paths
"""
bgp_all_routes = 0
bgp_cidr_only_routes = 1
bgp_dampened_routes = 2
bgp_rib_fail_routes = 3
bgp_injected_routes = 4
bgp_pending_routes = 5
bgp_inconsistent_routes = 6
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpRouteOptionEnum']
class BgpRpkiStatusEnum(Enum):
"""
BgpRpkiStatusEnum
.. data:: rpki_valid = 0
.. data:: rpki_invalid = 1
.. data:: rpki_not_found = 2
"""
rpki_valid = 0
rpki_invalid = 1
rpki_not_found = 2
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpRpkiStatusEnum']
class TcpFsmStateEnum(Enum):
"""
TcpFsmStateEnum
.. data:: closed = 0
no connection
.. data:: listen = 1
waiting for a connection request from any remote TCP
.. data:: synsent = 2
waiting for a matching connection request
after having sent a connection request
.. data:: synrcvd = 3
waiting for a confirming connection request acknowledgment
after having both received and sent a connection request
.. data:: established = 4
connection established
.. data:: finwait1 = 5
waiting for a connection termination request
from the remote TCP, or an acknowledgment of
the connection termination request previously sent
.. data:: finwait2 = 6
waiting for a connection termination request from the
remote TCP
.. data:: closewait = 7
waiting for a connection termination request from
the local use
.. data:: lastack = 8
waiting for an acknowledgment of the connection termination
request previously sent to the remote TCP
.. data:: closing = 9
waiting for a connection termination request acknowledgment
from the remote
.. data:: timewait = 10
waiting for enough time to pass to be sure the remote TCP
received the acknowledgment of its connection termination
request
"""
closed = 0
listen = 1
synsent = 2
synrcvd = 3
established = 4
finwait1 = 5
finwait2 = 6
closewait = 7
lastack = 8
closing = 9
timewait = 10
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['TcpFsmStateEnum']
class BgpState(object):
"""
Data nodes for BGP entries.
.. attribute:: address_families
**type**\: :py:class:`AddressFamilies <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies>`
.. attribute:: neighbors
**type**\: :py:class:`Neighbors <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors>`
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.address_families = BgpState.AddressFamilies()
self.address_families.parent = self
self.neighbors = BgpState.Neighbors()
self.neighbors.parent = self
class Neighbors(object):
"""
.. attribute:: neighbor
**type**\: list of :py:class:`Neighbor <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor>`
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.neighbor = YList()
self.neighbor.parent = self
self.neighbor.name = 'neighbor'
class Neighbor(object):
"""
.. attribute:: afi_safi <key>
**type**\: :py:class:`BgpAfiSafiEnum <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpAfiSafiEnum>`
.. attribute:: vrf_name <key>
**type**\: str
.. attribute:: neighbor_id <key>
**type**\: str
.. attribute:: bgp_neighbor_counters
**type**\: :py:class:`BgpNeighborCounters <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor.BgpNeighborCounters>`
.. attribute:: bgp_version
BGP version being used to communicate with the remote router
**type**\: int
**range:** 0..65535
.. attribute:: connection
**type**\: :py:class:`Connection <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor.Connection>`
.. attribute:: description
**type**\: str
.. attribute:: installed_prefixes
number of installed prefixes
**type**\: int
**range:** 0..4294967295
.. attribute:: last_read
since BGP last received a message to this neighbor
**type**\: str
.. attribute:: last_write
since BGP last sent a message from this neighbor
**type**\: str
.. attribute:: link
**type**\: :py:class:`BgpLinkEnum <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpLinkEnum>`
.. attribute:: negotiated_cap
Information for bgp neighbor session negotiated capabilities
**type**\: list of str
.. attribute:: negotiated_keepalive_timers
**type**\: :py:class:`NegotiatedKeepaliveTimers <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor.NegotiatedKeepaliveTimers>`
.. attribute:: prefix_activity
**type**\: :py:class:`PrefixActivity <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor.PrefixActivity>`
.. attribute:: session_state
**type**\: :py:class:`BgpFsmStateEnum <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpFsmStateEnum>`
.. attribute:: transport
**type**\: :py:class:`Transport <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor.Transport>`
.. attribute:: up_time
How long the bgp session has been up since the sessioin was established
**type**\: str
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.afi_safi = None
self.vrf_name = None
self.neighbor_id = None
self.bgp_neighbor_counters = BgpState.Neighbors.Neighbor.BgpNeighborCounters()
self.bgp_neighbor_counters.parent = self
self.bgp_version = None
self.connection = BgpState.Neighbors.Neighbor.Connection()
self.connection.parent = self
self.description = None
self.installed_prefixes = None
self.last_read = None
self.last_write = None
self.link = None
self.negotiated_cap = YLeafList()
self.negotiated_cap.parent = self
self.negotiated_cap.name = 'negotiated_cap'
self.negotiated_keepalive_timers = BgpState.Neighbors.Neighbor.NegotiatedKeepaliveTimers()
self.negotiated_keepalive_timers.parent = self
self.prefix_activity = BgpState.Neighbors.Neighbor.PrefixActivity()
self.prefix_activity.parent = self
self.session_state = None
self.transport = BgpState.Neighbors.Neighbor.Transport()
self.transport.parent = self
self.up_time = None
class NegotiatedKeepaliveTimers(object):
"""
.. attribute:: hold_time
Hold time
**type**\: int
**range:** 0..65535
.. attribute:: keepalive_interval
keepalive interval
**type**\: int
**range:** 0..65535
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.hold_time = None
self.keepalive_interval = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:negotiated-keepalive-timers'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.hold_time is not None:
return True
if self.keepalive_interval is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor.NegotiatedKeepaliveTimers']['meta_info']
class BgpNeighborCounters(object):
"""
.. attribute:: inq_depth
Input Q depth
**type**\: int
**range:** 0..4294967295
.. attribute:: outq_depth
Output Q depth
**type**\: int
**range:** 0..4294967295
.. attribute:: received
**type**\: :py:class:`Received <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor.BgpNeighborCounters.Received>`
.. attribute:: sent
**type**\: :py:class:`Sent <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor.BgpNeighborCounters.Sent>`
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.inq_depth = None
self.outq_depth = None
self.received = BgpState.Neighbors.Neighbor.BgpNeighborCounters.Received()
self.received.parent = self
self.sent = BgpState.Neighbors.Neighbor.BgpNeighborCounters.Sent()
self.sent.parent = self
class Sent(object):
"""
.. attribute:: keepalives
KEEPALIVE messages
**type**\: int
**range:** 0..4294967295
.. attribute:: notifications
NOTIFICATION messages
**type**\: int
**range:** 0..4294967295
.. attribute:: opens
OPEN messages
**type**\: int
**range:** 0..4294967295
.. attribute:: route_refreshes
Route refresh messages
**type**\: int
**range:** 0..4294967295
.. attribute:: updates
UPDATE messages
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.keepalives = None
self.notifications = None
self.opens = None
self.route_refreshes = None
self.updates = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:sent'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.keepalives is not None:
return True
if self.notifications is not None:
return True
if self.opens is not None:
return True
if self.route_refreshes is not None:
return True
if self.updates is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor.BgpNeighborCounters.Sent']['meta_info']
class Received(object):
"""
.. attribute:: keepalives
KEEPALIVE messages
**type**\: int
**range:** 0..4294967295
.. attribute:: notifications
NOTIFICATION messages
**type**\: int
**range:** 0..4294967295
.. attribute:: opens
OPEN messages
**type**\: int
**range:** 0..4294967295
.. attribute:: route_refreshes
Route refresh messages
**type**\: int
**range:** 0..4294967295
.. attribute:: updates
UPDATE messages
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.keepalives = None
self.notifications = None
self.opens = None
self.route_refreshes = None
self.updates = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:received'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.keepalives is not None:
return True
if self.notifications is not None:
return True
if self.opens is not None:
return True
if self.route_refreshes is not None:
return True
if self.updates is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor.BgpNeighborCounters.Received']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:bgp-neighbor-counters'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.inq_depth is not None:
return True
if self.outq_depth is not None:
return True
if self.received is not None and self.received._has_data():
return True
if self.sent is not None and self.sent._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor.BgpNeighborCounters']['meta_info']
class Connection(object):
"""
.. attribute:: last_reset
since the peering session was last reset
**type**\: str
.. attribute:: mode
**type**\: :py:class:`BgpModeEnum <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpModeEnum>`
.. attribute:: reset_reason
The reason for the last reset
**type**\: str
.. attribute:: state
TCP FSM state
**type**\: :py:class:`TcpFsmStateEnum <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.TcpFsmStateEnum>`
.. attribute:: total_dropped
number of times that a valid session has failed or been taken down
**type**\: int
**range:** 0..4294967295
.. attribute:: total_established
number of times a TCP and BGP connection has been successfully established
**type**\: int
**range:** 0..4294967295
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.last_reset = None
self.mode = None
self.reset_reason = None
self.state = None
self.total_dropped = None
self.total_established = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:connection'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.last_reset is not None:
return True
if self.mode is not None:
return True
if self.reset_reason is not None:
return True
if self.state is not None:
return True
if self.total_dropped is not None:
return True
if self.total_established is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor.Connection']['meta_info']
class Transport(object):
"""
.. attribute:: foreign_host
Remote address to which the BGP session has established
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: foreign_port
Remote port used by the peer for the TCP session
**type**\: int
**range:** 0..4294967295
.. attribute:: local_host
Local address used for the TCP session
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: local_port
Local TCP port used for TCP session
**type**\: int
**range:** 0..4294967295
.. attribute:: mss
Maximum Data segment size
**type**\: int
**range:** 0..4294967295
.. attribute:: path_mtu_discovery
**type**\: bool
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.foreign_host = None
self.foreign_port = None
self.local_host = None
self.local_port = None
self.mss = None
self.path_mtu_discovery = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:transport'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.foreign_host is not None:
return True
if self.foreign_port is not None:
return True
if self.local_host is not None:
return True
if self.local_port is not None:
return True
if self.mss is not None:
return True
if self.path_mtu_discovery is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor.Transport']['meta_info']
class PrefixActivity(object):
"""
.. attribute:: received
**type**\: :py:class:`Received <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor.PrefixActivity.Received>`
.. attribute:: sent
**type**\: :py:class:`Sent <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.Neighbors.Neighbor.PrefixActivity.Sent>`
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.received = BgpState.Neighbors.Neighbor.PrefixActivity.Received()
self.received.parent = self
self.sent = BgpState.Neighbors.Neighbor.PrefixActivity.Sent()
self.sent.parent = self
class Sent(object):
"""
.. attribute:: bestpaths
Number of received prefixes installed as best paths
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: current_prefixes
Current number of prefixes accepted
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: explicit_withdraw
Number of times that a prefix has been withdrawn because it is no longer feasible
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: implicit_withdraw
number of times that a prefix has been withdrawn and readvertised
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: multipaths
Number of received prefixes installed as multipaths
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: total_prefixes
Total number of prefixes accepted
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.bestpaths = None
self.current_prefixes = None
self.explicit_withdraw = None
self.implicit_withdraw = None
self.multipaths = None
self.total_prefixes = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:sent'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bestpaths is not None:
return True
if self.current_prefixes is not None:
return True
if self.explicit_withdraw is not None:
return True
if self.implicit_withdraw is not None:
return True
if self.multipaths is not None:
return True
if self.total_prefixes is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor.PrefixActivity.Sent']['meta_info']
class Received(object):
"""
.. attribute:: bestpaths
Number of received prefixes installed as best paths
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: current_prefixes
Current number of prefixes accepted
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: explicit_withdraw
Number of times that a prefix has been withdrawn because it is no longer feasible
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: implicit_withdraw
number of times that a prefix has been withdrawn and readvertised
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: multipaths
Number of received prefixes installed as multipaths
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: total_prefixes
Total number of prefixes accepted
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.bestpaths = None
self.current_prefixes = None
self.explicit_withdraw = None
self.implicit_withdraw = None
self.multipaths = None
self.total_prefixes = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:received'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bestpaths is not None:
return True
if self.current_prefixes is not None:
return True
if self.explicit_withdraw is not None:
return True
if self.implicit_withdraw is not None:
return True
if self.multipaths is not None:
return True
if self.total_prefixes is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor.PrefixActivity.Received']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:prefix-activity'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.received is not None and self.received._has_data():
return True
if self.sent is not None and self.sent._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor.PrefixActivity']['meta_info']
@property
def _common_path(self):
if self.afi_safi is None:
raise YPYModelError('Key property afi_safi is None')
if self.vrf_name is None:
raise YPYModelError('Key property vrf_name is None')
if self.neighbor_id is None:
raise YPYModelError('Key property neighbor_id is None')
return '/Cisco-IOS-XE-bgp-oper:bgp-state/Cisco-IOS-XE-bgp-oper:neighbors/Cisco-IOS-XE-bgp-oper:neighbor[Cisco-IOS-XE-bgp-oper:afi-safi = ' + str(self.afi_safi) + '][Cisco-IOS-XE-bgp-oper:vrf-name = ' + str(self.vrf_name) + '][Cisco-IOS-XE-bgp-oper:neighbor-id = ' + str(self.neighbor_id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.afi_safi is not None:
return True
if self.vrf_name is not None:
return True
if self.neighbor_id is not None:
return True
if self.bgp_neighbor_counters is not None and self.bgp_neighbor_counters._has_data():
return True
if self.bgp_version is not None:
return True
if self.connection is not None and self.connection._has_data():
return True
if self.description is not None:
return True
if self.installed_prefixes is not None:
return True
if self.last_read is not None:
return True
if self.last_write is not None:
return True
if self.link is not None:
return True
if self.negotiated_cap is not None:
for child in self.negotiated_cap:
if child is not None:
return True
if self.negotiated_keepalive_timers is not None and self.negotiated_keepalive_timers._has_data():
return True
if self.prefix_activity is not None and self.prefix_activity._has_data():
return True
if self.session_state is not None:
return True
if self.transport is not None and self.transport._has_data():
return True
if self.up_time is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors.Neighbor']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XE-bgp-oper:bgp-state/Cisco-IOS-XE-bgp-oper:neighbors'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.neighbor is not None:
for child_ref in self.neighbor:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.Neighbors']['meta_info']
class AddressFamilies(object):
"""
.. attribute:: address_family
**type**\: list of :py:class:`AddressFamily <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies.AddressFamily>`
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.address_family = YList()
self.address_family.parent = self
self.address_family.name = 'address_family'
class AddressFamily(object):
"""
.. attribute:: afi_safi <key>
**type**\: :py:class:`BgpAfiSafiEnum <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpAfiSafiEnum>`
.. attribute:: vrf_name <key>
**type**\: str
.. attribute:: activities
BGP activity information
**type**\: :py:class:`Activities <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies.AddressFamily.Activities>`
.. attribute:: as_path
**type**\: :py:class:`AsPath <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies.AddressFamily.AsPath>`
.. attribute:: bgp_neighbor_summaries
Summary of neighbor
**type**\: :py:class:`BgpNeighborSummaries <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies.AddressFamily.BgpNeighborSummaries>`
.. attribute:: bgp_table_version
BGP table version number
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: filter_list
**type**\: :py:class:`FilterList <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies.AddressFamily.FilterList>`
.. attribute:: path
**type**\: :py:class:`Path <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies.AddressFamily.Path>`
.. attribute:: prefixes
**type**\: :py:class:`Prefixes <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies.AddressFamily.Prefixes>`
.. attribute:: route_map
**type**\: :py:class:`RouteMap <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies.AddressFamily.RouteMap>`
.. attribute:: router_id
**type**\: one of the below types:
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
----
**type**\: str
**pattern:** ((\:\|[0\-9a\-fA\-F]{0,4})\:)([0\-9a\-fA\-F]{0,4}\:){0,5}((([0\-9a\-fA\-F]{0,4}\:)?(\:\|[0\-9a\-fA\-F]{0,4}))\|(((25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])\\.){3}(25[0\-5]\|2[0\-4][0\-9]\|[01]?[0\-9]?[0\-9])))(%[\\p{N}\\p{L}]+)?
----
.. attribute:: routing_table_version
Routing table version number
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: total_memory
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.afi_safi = None
self.vrf_name = None
self.activities = BgpState.AddressFamilies.AddressFamily.Activities()
self.activities.parent = self
self.as_path = BgpState.AddressFamilies.AddressFamily.AsPath()
self.as_path.parent = self
self.bgp_neighbor_summaries = BgpState.AddressFamilies.AddressFamily.BgpNeighborSummaries()
self.bgp_neighbor_summaries.parent = self
self.bgp_table_version = None
self.filter_list = BgpState.AddressFamilies.AddressFamily.FilterList()
self.filter_list.parent = self
self.path = BgpState.AddressFamilies.AddressFamily.Path()
self.path.parent = self
self.prefixes = BgpState.AddressFamilies.AddressFamily.Prefixes()
self.prefixes.parent = self
self.route_map = BgpState.AddressFamilies.AddressFamily.RouteMap()
self.route_map.parent = self
self.router_id = None
self.routing_table_version = None
self.total_memory = None
class Prefixes(object):
"""
.. attribute:: memory_usage
total memory usage in byte
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: total_entries
total prefix entires
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.memory_usage = None
self.total_entries = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:prefixes'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.memory_usage is not None:
return True
if self.total_entries is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies.AddressFamily.Prefixes']['meta_info']
class Path(object):
"""
.. attribute:: memory_usage
total memory usage in byte
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: total_entries
total prefix entires
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.memory_usage = None
self.total_entries = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:path'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.memory_usage is not None:
return True
if self.total_entries is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies.AddressFamily.Path']['meta_info']
class AsPath(object):
"""
.. attribute:: memory_usage
total memory usage in byte
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: total_entries
total prefix entires
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.memory_usage = None
self.total_entries = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:as-path'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.memory_usage is not None:
return True
if self.total_entries is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies.AddressFamily.AsPath']['meta_info']
class RouteMap(object):
"""
.. attribute:: memory_usage
total memory usage in byte
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: total_entries
total prefix entires
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.memory_usage = None
self.total_entries = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:route-map'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.memory_usage is not None:
return True
if self.total_entries is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies.AddressFamily.RouteMap']['meta_info']
class FilterList(object):
"""
.. attribute:: memory_usage
total memory usage in byte
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: total_entries
total prefix entires
**type**\: int
**range:** 0..18446744073709551615
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.memory_usage = None
self.total_entries = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:filter-list'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.memory_usage is not None:
return True
if self.total_entries is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies.AddressFamily.FilterList']['meta_info']
class Activities(object):
"""
BGP activity information
.. attribute:: paths
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: prefixes
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: scan_interval
scan interval in second
**type**\: str
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.paths = None
self.prefixes = None
self.scan_interval = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:activities'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.paths is not None:
return True
if self.prefixes is not None:
return True
if self.scan_interval is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies.AddressFamily.Activities']['meta_info']
class BgpNeighborSummaries(object):
"""
Summary of neighbor
.. attribute:: bgp_neighbor_summary
**type**\: list of :py:class:`BgpNeighborSummary <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpState.AddressFamilies.AddressFamily.BgpNeighborSummaries.BgpNeighborSummary>`
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.bgp_neighbor_summary = YList()
self.bgp_neighbor_summary.parent = self
self.bgp_neighbor_summary.name = 'bgp_neighbor_summary'
class BgpNeighborSummary(object):
"""
.. attribute:: id <key>
**type**\: str
.. attribute:: bgp_version
**type**\: int
**range:** 0..4294967295
.. attribute:: input_queue
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: messages_received
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: messages_sent
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: output_queue
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: prefixes_received
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: state
**type**\: :py:class:`BgpFsmStateEnum <ydk.models.cisco_ios_xe.Cisco_IOS_XE_bgp_oper.BgpFsmStateEnum>`
.. attribute:: table_version
**type**\: int
**range:** 0..18446744073709551615
.. attribute:: up_time
**type**\: str
"""
_prefix = 'bgp-ios-xe-oper'
_revision = '2017-02-07'
def __init__(self):
self.parent = None
self.id = None
self.bgp_version = None
self.input_queue = None
self.messages_received = None
self.messages_sent = None
self.output_queue = None
self.prefixes_received = None
self.state = None
self.table_version = None
self.up_time = None
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
if self.id is None:
raise YPYModelError('Key property id is None')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:bgp-neighbor-summary[Cisco-IOS-XE-bgp-oper:id = ' + str(self.id) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.id is not None:
return True
if self.bgp_version is not None:
return True
if self.input_queue is not None:
return True
if self.messages_received is not None:
return True
if self.messages_sent is not None:
return True
if self.output_queue is not None:
return True
if self.prefixes_received is not None:
return True
if self.state is not None:
return True
if self.table_version is not None:
return True
if self.up_time is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies.AddressFamily.BgpNeighborSummaries.BgpNeighborSummary']['meta_info']
@property
def _common_path(self):
if self.parent is None:
raise YPYModelError('parent is not set . Cannot derive path.')
return self.parent._common_path +'/Cisco-IOS-XE-bgp-oper:bgp-neighbor-summaries'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.bgp_neighbor_summary is not None:
for child_ref in self.bgp_neighbor_summary:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies.AddressFamily.BgpNeighborSummaries']['meta_info']
@property
def _common_path(self):
if self.afi_safi is None:
raise YPYModelError('Key property afi_safi is None')
if self.vrf_name is None:
raise YPYModelError('Key property vrf_name is None')
return '/Cisco-IOS-XE-bgp-oper:bgp-state/Cisco-IOS-XE-bgp-oper:address-families/Cisco-IOS-XE-bgp-oper:address-family[Cisco-IOS-XE-bgp-oper:afi-safi = ' + str(self.afi_safi) + '][Cisco-IOS-XE-bgp-oper:vrf-name = ' + str(self.vrf_name) + ']'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.afi_safi is not None:
return True
if self.vrf_name is not None:
return True
if self.activities is not None and self.activities._has_data():
return True
if self.as_path is not None and self.as_path._has_data():
return True
if self.bgp_neighbor_summaries is not None and self.bgp_neighbor_summaries._has_data():
return True
if self.bgp_table_version is not None:
return True
if self.filter_list is not None and self.filter_list._has_data():
return True
if self.path is not None and self.path._has_data():
return True
if self.prefixes is not None and self.prefixes._has_data():
return True
if self.route_map is not None and self.route_map._has_data():
return True
if self.router_id is not None:
return True
if self.routing_table_version is not None:
return True
if self.total_memory is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies.AddressFamily']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XE-bgp-oper:bgp-state/Cisco-IOS-XE-bgp-oper:address-families'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.address_family is not None:
for child_ref in self.address_family:
if child_ref._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState.AddressFamilies']['meta_info']
@property
def _common_path(self):
return '/Cisco-IOS-XE-bgp-oper:bgp-state'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return False
def _has_data(self):
if self.address_families is not None and self.address_families._has_data():
return True
if self.neighbors is not None and self.neighbors._has_data():
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xe._meta import _Cisco_IOS_XE_bgp_oper as meta
return meta._meta_table['BgpState']['meta_info']
| apache-2.0 |
zeit/now-cli | packages/now-go/test/fixtures/10-go-mod/index.go | 163 | package handler
import (
"fmt"
"net/http"
)
// Handler func
func Handler(w http.ResponseWriter, r *http.Request) {
fmt.Fprintf(w, "RANDOMNESS_PLACEHOLDER")
}
| apache-2.0 |
torrances/swtk-commons | commons-dict-wordnet-indexbyname/src/main/java/org/swtk/commons/dict/wordnet/indexbyname/instance/m/a/s/WordnetNounIndexNameInstanceMAS.java | 20915 | package org.swtk.commons.dict.wordnet.indexbyname.instance.m.a.s; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import org.swtk.common.dict.dto.wordnet.IndexNoun; import com.trimc.blogger.commons.utils.GsonUtils; public final class WordnetNounIndexNameInstanceMAS { private static Map<String, Collection<IndexNoun>> map = new TreeMap<String, Collection<IndexNoun>>(); static { add("{\"term\":\"masa\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06998760\"]}");
add("{\"term\":\"masai\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07011120\"]}");
add("{\"term\":\"mascara\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03729557\"]}");
add("{\"term\":\"mascarene grass\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12167340\"]}");
add("{\"term\":\"mascarpone\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07867530\"]}");
add("{\"term\":\"mascot\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10316895\"]}");
add("{\"term\":\"masculine\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06340158\"]}");
add("{\"term\":\"masculinisation\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13532958\"]}");
add("{\"term\":\"masculinity\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"04673972\", \"05015411\"]}");
add("{\"term\":\"masculinization\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13532958\"]}");
add("{\"term\":\"masdevallia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12093223\"]}");
add("{\"term\":\"masefield\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11182783\"]}");
add("{\"term\":\"maser\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03729667\"]}");
add("{\"term\":\"maseru\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08978878\"]}");
add("{\"term\":\"mash\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07821230\", \"14972783\"]}");
add("{\"term\":\"mash bean\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07741018\", \"12599160\"]}");
add("{\"term\":\"mashed potato\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07727314\"]}");
add("{\"term\":\"masher\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"03729908\", \"10806873\"]}");
add("{\"term\":\"mashhad\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08931641\"]}");
add("{\"term\":\"mashi\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07006817\"]}");
add("{\"term\":\"mashie\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03730029\"]}");
add("{\"term\":\"mashie niblick\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03730114\"]}");
add("{\"term\":\"mashriq\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08809830\"]}");
add("{\"term\":\"masjid\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03730247\"]}");
add("{\"term\":\"mask\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"03730526\", \"08270371\", \"01051399\", \"03730361\"]}");
add("{\"term\":\"mask of pregnancy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14326172\"]}");
add("{\"term\":\"masked ball\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07463757\"]}");
add("{\"term\":\"masked shrew\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01894786\"]}");
add("{\"term\":\"masker\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317508\"]}");
add("{\"term\":\"masking\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"03730689\", \"05721294\", \"01051609\"]}");
add("{\"term\":\"masking paper\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03730846\"]}");
add("{\"term\":\"masking piece\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03730689\"]}");
add("{\"term\":\"masking tape\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03730846\"]}");
add("{\"term\":\"masochism\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07509315\"]}");
add("{\"term\":\"masochist\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317034\"]}");
add("{\"term\":\"mason\", \"synsetCount\":5, \"upperType\":\"NOUN\", \"ids\":[\"10130913\", \"10317198\", \"11182905\", \"11183031\", \"11183150\"]}");
add("{\"term\":\"mason and dixon line\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08532729\"]}");
add("{\"term\":\"mason bee\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02214279\"]}");
add("{\"term\":\"mason city\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"09109798\"]}");
add("{\"term\":\"mason jar\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03731091\"]}");
add("{\"term\":\"mason wasp\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"02216993\", \"02218273\"]}");
add("{\"term\":\"masonite\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03730997\"]}");
add("{\"term\":\"masonry\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"00610164\", \"08252434\", \"03731208\"]}");
add("{\"term\":\"masora\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06387247\"]}");
add("{\"term\":\"masorah\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06387247\"]}");
add("{\"term\":\"masorete\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317322\"]}");
add("{\"term\":\"masorite\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317322\"]}");
add("{\"term\":\"masoud\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11183332\"]}");
add("{\"term\":\"masqat\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08997805\"]}");
add("{\"term\":\"masque\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08270371\"]}");
add("{\"term\":\"masquer\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317508\"]}");
add("{\"term\":\"masquerade\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"00757537\", \"03325464\", \"08270371\"]}");
add("{\"term\":\"masquerade ball\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07463757\"]}");
add("{\"term\":\"masquerade costume\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03325464\"]}");
add("{\"term\":\"masquerade party\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08270371\"]}");
add("{\"term\":\"masquerader\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317508\"]}");
add("{\"term\":\"mass\", \"synsetCount\":9, \"upperType\":\"NOUN\", \"ids\":[\"07047100\", \"07047240\", \"05106651\", \"08197108\", \"09370952\", \"01044274\", \"07978738\", \"13796604\", \"05031420\"]}");
add("{\"term\":\"mass action\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06000900\"]}");
add("{\"term\":\"mass card\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06640957\"]}");
add("{\"term\":\"mass culture\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05760176\"]}");
add("{\"term\":\"mass defect\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11502224\"]}");
add("{\"term\":\"mass deficiency\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11502224\"]}");
add("{\"term\":\"mass energy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05033337\"]}");
add("{\"term\":\"mass hysteria\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14415548\"]}");
add("{\"term\":\"mass medium\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06272239\"]}");
add("{\"term\":\"mass meeting\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08375259\"]}");
add("{\"term\":\"mass murder\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00224850\"]}");
add("{\"term\":\"mass murderer\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10318010\"]}");
add("{\"term\":\"mass noun\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06330703\"]}");
add("{\"term\":\"mass number\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13737719\"]}");
add("{\"term\":\"mass production\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00916013\"]}");
add("{\"term\":\"mass rapid transit\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"04061325\"]}");
add("{\"term\":\"mass spectrograph\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03731862\"]}");
add("{\"term\":\"mass spectrometer\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03732007\"]}");
add("{\"term\":\"mass spectroscopy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00651986\"]}");
add("{\"term\":\"mass spectrum\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11521097\"]}");
add("{\"term\":\"mass unit\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13630457\"]}");
add("{\"term\":\"massachuset\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"09680078\", \"06923415\"]}");
add("{\"term\":\"massachusetts\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"06923415\", \"09118198\", \"09680078\", \"09117607\"]}");
add("{\"term\":\"massachusetts bay\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"09371133\"]}");
add("{\"term\":\"massachusetts bay colony\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"09118198\"]}");
add("{\"term\":\"massachusetts fern\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13251088\"]}");
add("{\"term\":\"massachusetts institute of technology\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03731459\"]}");
add("{\"term\":\"massacre\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00224850\"]}");
add("{\"term\":\"massage\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00660291\"]}");
add("{\"term\":\"massage parlor\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"03731607\", \"03731724\"]}");
add("{\"term\":\"massager\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317650\"]}");
add("{\"term\":\"massasauga\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"01760318\", \"01760542\"]}");
add("{\"term\":\"massasauga rattler\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01760318\"]}");
add("{\"term\":\"massasoit\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11183572\"]}");
add("{\"term\":\"massawa\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08795771\"]}");
add("{\"term\":\"masse\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00501680\"]}");
add("{\"term\":\"masse shot\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00501680\"]}");
add("{\"term\":\"massed fire\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00991669\"]}");
add("{\"term\":\"massenet\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11183699\"]}");
add("{\"term\":\"masses\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08197108\"]}");
add("{\"term\":\"masseter\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05301755\"]}");
add("{\"term\":\"masseur\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317869\"]}");
add("{\"term\":\"masseuse\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317938\"]}");
add("{\"term\":\"massicot\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14957893\"]}");
add("{\"term\":\"massicotite\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14957893\"]}");
add("{\"term\":\"massif\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"09371357\"]}");
add("{\"term\":\"massif central\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"09371540\"]}");
add("{\"term\":\"massine\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11183849\"]}");
add("{\"term\":\"massiveness\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"05035003\", \"05111721\"]}");
add("{\"term\":\"massorete\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10317322\"]}");
add("{\"term\":\"mast\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"03732484\", \"07821848\", \"12300881\", \"03732251\"]}");
add("{\"term\":\"mast cell\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05615357\"]}");
add("{\"term\":\"mastaba\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03732558\"]}");
add("{\"term\":\"mastabah\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03732558\"]}");
add("{\"term\":\"mastalgia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14352125\"]}");
add("{\"term\":\"mastectomy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00686761\"]}");
add("{\"term\":\"master\", \"synsetCount\":10, \"upperType\":\"NOUN\", \"ids\":[\"03902681\", \"10318149\", \"10318465\", \"10318579\", \"03732765\", \"10183826\", \"10318314\", \"10772598\", \"10408139\", \"10299827\"]}");
add("{\"term\":\"master bedroom\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03732956\"]}");
add("{\"term\":\"master class\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"08255945\"]}");
add("{\"term\":\"master copy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03732765\"]}");
add("{\"term\":\"master cylinder\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02893237\"]}");
add("{\"term\":\"master file\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06521893\"]}");
add("{\"term\":\"master in business\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714122\"]}");
add("{\"term\":\"master in business administration\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714122\"]}");
add("{\"term\":\"master in public affairs\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714783\"]}");
add("{\"term\":\"master key\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03902681\"]}");
add("{\"term\":\"master of architecture\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06713607\"]}");
add("{\"term\":\"master of arts\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06713764\"]}");
add("{\"term\":\"master of arts in library science\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06713891\"]}");
add("{\"term\":\"master of arts in teaching\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714014\"]}");
add("{\"term\":\"master of ceremonies\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10318917\"]}");
add("{\"term\":\"master of divinity\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714258\"]}");
add("{\"term\":\"master of education\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714359\"]}");
add("{\"term\":\"master of fine arts\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714461\"]}");
add("{\"term\":\"master of laws\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06717753\"]}");
add("{\"term\":\"master of library science\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714669\"]}");
add("{\"term\":\"master of literature\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714563\"]}");
add("{\"term\":\"master of science\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06714902\"]}");
add("{\"term\":\"master of science in engineering\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06715010\"]}");
add("{\"term\":\"master of theology\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06715121\"]}");
add("{\"term\":\"master plan\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05908049\"]}");
add("{\"term\":\"master race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07984842\"]}");
add("{\"term\":\"master sergeant\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10319250\"]}");
add("{\"term\":\"mastering\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"00912554\", \"05765231\"]}");
add("{\"term\":\"mastermind\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"10146463\", \"10403515\"]}");
add("{\"term\":\"masterpiece\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"00037727\", \"03733096\"]}");
add("{\"term\":\"masters\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11184048\"]}");
add("{\"term\":\"mastership\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"00596261\", \"05646545\"]}");
add("{\"term\":\"masterstroke\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00037811\"]}");
add("{\"term\":\"masterwort\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12954796\"]}");
add("{\"term\":\"mastery\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"01130976\", \"14465996\", \"05649804\"]}");
add("{\"term\":\"masthead\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"03733229\", \"06356216\", \"06505149\"]}");
add("{\"term\":\"mastic\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"12782809\", \"14729012\", \"14920472\"]}");
add("{\"term\":\"mastic tree\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12782809\"]}");
add("{\"term\":\"mastication\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00279654\"]}");
add("{\"term\":\"masticophis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01734059\"]}");
add("{\"term\":\"masticophis bilineatus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01734734\"]}");
add("{\"term\":\"masticophis flagellum\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01734405\"]}");
add("{\"term\":\"masticophis lateralis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01734582\"]}");
add("{\"term\":\"mastiff\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02110906\"]}");
add("{\"term\":\"mastiff bat\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02152786\"]}");
add("{\"term\":\"mastigomycota\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12997745\"]}");
add("{\"term\":\"mastigomycotina\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12997745\"]}");
add("{\"term\":\"mastigophora\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01418995\"]}");
add("{\"term\":\"mastigophoran\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01419226\"]}");
add("{\"term\":\"mastigophore\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01419226\"]}");
add("{\"term\":\"mastigoproctus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01774265\"]}");
add("{\"term\":\"mastigoproctus giganteus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01774407\"]}");
add("{\"term\":\"mastitis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14373122\"]}");
add("{\"term\":\"mastocyte\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05615357\"]}");
add("{\"term\":\"mastodon\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02508629\"]}");
add("{\"term\":\"mastodont\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02508629\"]}");
add("{\"term\":\"mastoid\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05549826\"]}");
add("{\"term\":\"mastoid bone\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05549826\"]}");
add("{\"term\":\"mastoid process\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05549826\"]}");
add("{\"term\":\"mastoidal\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05549826\"]}");
add("{\"term\":\"mastoidale\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05241205\"]}");
add("{\"term\":\"mastoidectomy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00687421\"]}");
add("{\"term\":\"mastoiditis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14373212\"]}");
add("{\"term\":\"mastopathy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14231824\"]}");
add("{\"term\":\"mastopexy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00683689\"]}");
add("{\"term\":\"mastotermes\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02227118\"]}");
add("{\"term\":\"mastotermes darwiniensis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02227365\"]}");
add("{\"term\":\"mastotermes electrodominicus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02227733\"]}");
add("{\"term\":\"mastotermes electromexicus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02227589\"]}");
add("{\"term\":\"mastotermitidae\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02226975\"]}");
add("{\"term\":\"masturbation\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00857303\"]}");
add("{\"term\":\"masturbator\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10319367\"]}");
} private static void add(final String JSON) { IndexNoun indexNoun = GsonUtils.toObject(JSON, IndexNoun.class); Collection<IndexNoun> list = (map.containsKey(indexNoun.getTerm())) ? map.get(indexNoun.getTerm()) : new ArrayList<IndexNoun>(); list.add(indexNoun); map.put(indexNoun.getTerm(), list); } public static Collection<IndexNoun> get(final String TERM) { return map.get(TERM); } public static boolean has(final String TERM) { return map.containsKey(TERM); } public static Collection<String> terms() { return map.keySet(); } } | apache-2.0 |
davido/buck | src/com/facebook/buck/cxx/Archive.java | 8130 | /*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.rules.AbstractBuildRule;
import com.facebook.buck.rules.AddToRuleKey;
import com.facebook.buck.rules.BuildContext;
import com.facebook.buck.rules.BuildRule;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildableContext;
import com.facebook.buck.rules.ExplicitBuildTargetSourcePath;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.SourcePathRuleFinder;
import com.facebook.buck.rules.Tool;
import com.facebook.buck.rules.args.Arg;
import com.facebook.buck.rules.args.SourcePathArg;
import com.facebook.buck.rules.keys.SupportsInputBasedRuleKey;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.fs.FileScrubberStep;
import com.facebook.buck.step.fs.MkdirStep;
import com.facebook.buck.step.fs.RmStep;
import com.facebook.buck.util.MoreCollectors;
import com.google.common.base.Preconditions;
import com.google.common.base.Suppliers;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
import java.nio.file.Path;
/**
* A {@link com.facebook.buck.rules.BuildRule} which builds an "ar" archive from input files
* represented as {@link com.facebook.buck.rules.SourcePath}.
*/
public class Archive extends AbstractBuildRule implements SupportsInputBasedRuleKey {
@AddToRuleKey
private final Archiver archiver;
@AddToRuleKey
private ImmutableList<String> archiverFlags;
@AddToRuleKey
private final Tool ranlib;
@AddToRuleKey
private ImmutableList<String> ranlibFlags;
@AddToRuleKey
private final Contents contents;
@AddToRuleKey(stringify = true)
private final Path output;
@AddToRuleKey
private final ImmutableList<SourcePath> inputs;
private Archive(
BuildRuleParams params,
Archiver archiver,
ImmutableList<String> archiverFlags,
Tool ranlib,
ImmutableList<String> ranlibFlags,
Contents contents,
Path output,
ImmutableList<SourcePath> inputs) {
super(params);
Preconditions.checkState(
contents == Contents.NORMAL || archiver.supportsThinArchives(),
"%s: archive tool for this platform does not support thin archives",
getBuildTarget());
Preconditions.checkArgument(
!LinkerMapMode.FLAVOR_DOMAIN.containsAnyOf(params.getBuildTarget().getFlavors()),
"Static archive rule %s should not have any Linker Map Mode flavors", this);
this.archiver = archiver;
this.archiverFlags = archiverFlags;
this.ranlib = ranlib;
this.ranlibFlags = ranlibFlags;
this.contents = contents;
this.output = output;
this.inputs = inputs;
}
public static Archive from(
BuildTarget target,
BuildRuleParams baseParams,
SourcePathRuleFinder ruleFinder,
CxxPlatform platform,
Contents contents,
Path output,
ImmutableList<SourcePath> inputs) {
return from(
target,
baseParams,
ruleFinder,
platform.getAr(),
platform.getArflags(),
platform.getRanlib(),
platform.getRanlibflags(),
contents,
output,
inputs);
}
/**
* Construct an {@link com.facebook.buck.cxx.Archive} from a
* {@link com.facebook.buck.rules.BuildRuleParams} object representing a target
* node. In particular, make sure to trim dependencies to *only* those that
* provide the input {@link com.facebook.buck.rules.SourcePath}.
*/
public static Archive from(
BuildTarget target,
BuildRuleParams baseParams,
SourcePathRuleFinder ruleFinder,
Archiver archiver,
ImmutableList<String> arFlags,
Tool ranlib,
ImmutableList<String> ranlibFlags,
Contents contents,
Path output,
ImmutableList<SourcePath> inputs) {
// Convert the input build params into ones specialized for this archive build rule.
// In particular, we only depend on BuildRules directly from the input file SourcePaths.
BuildRuleParams archiveParams =
baseParams.copyWithChanges(
target,
Suppliers.ofInstance(ImmutableSortedSet.of()),
Suppliers.ofInstance(
ImmutableSortedSet.<BuildRule>naturalOrder()
.addAll(ruleFinder.filterBuildRuleInputs(inputs))
.addAll(archiver.getDeps(ruleFinder))
.build()));
return new Archive(
archiveParams,
archiver,
arFlags,
ranlib,
ranlibFlags,
contents,
output,
inputs);
}
@Override
public ImmutableList<Step> getBuildSteps(
BuildContext context,
BuildableContext buildableContext) {
// Cache the archive we built.
buildableContext.recordArtifact(output);
SourcePathResolver resolver = context.getSourcePathResolver();
// We only support packaging inputs that use the same filesystem root as the output, as thin
// archives embed relative paths from output to input inside the archive. If this becomes a
// limitation, we could make this rule uncacheable and allow thin archives to embed absolute
// paths.
for (SourcePath input : inputs) {
Preconditions.checkState(
resolver.getFilesystem(input).getRootPath()
.equals(getProjectFilesystem().getRootPath()));
}
ImmutableList.Builder<Step> builder = ImmutableList.builder();
builder.add(
new MkdirStep(getProjectFilesystem(), output.getParent()),
new RmStep(getProjectFilesystem(), output),
new ArchiveStep(
getProjectFilesystem(),
archiver.getEnvironment(resolver),
archiver.getCommandPrefix(resolver),
archiverFlags,
archiver.getArchiveOptions(contents == Contents.THIN),
output,
inputs.stream()
.map(resolver::getRelativePath)
.collect(MoreCollectors.toImmutableList()),
archiver));
if (archiver.isRanLibStepRequired()) {
builder.add(
new RanlibStep(
getProjectFilesystem(),
ranlib.getEnvironment(resolver),
ranlib.getCommandPrefix(resolver),
ranlibFlags,
output));
}
if (!archiver.getScrubbers().isEmpty()) {
builder.add(new FileScrubberStep(getProjectFilesystem(), output, archiver.getScrubbers()));
}
return builder.build();
}
/**
* @return the {@link Arg} to use when using this archive. When thin archives are used, this will
* ensure that the inputs are also propagated as build time deps to whatever rule uses this
* archive.
*/
public Arg toArg() {
SourcePath archive = getSourcePathToOutput();
return contents == Contents.NORMAL ?
SourcePathArg.of(archive) :
ThinArchiveArg.of(archive, inputs);
}
@Override
public SourcePath getSourcePathToOutput() {
return new ExplicitBuildTargetSourcePath(getBuildTarget(), output);
}
public Contents getContents() {
return contents;
}
/**
* How this archive packages its contents.
*/
public enum Contents {
/**
* This archive packages a copy of its inputs and can be used independently of its inputs.
*/
NORMAL,
/**
* This archive only packages the relative paths to its inputs and so can only be used when its
* inputs are available.
*/
THIN,
}
}
| apache-2.0 |
juangon/wildfly-swarm | testsuite/testsuite-microprofile-jwt/src/test/java/org/wildfly/swarm/microprofile/jwtauth/StaticKeyWithMPConfigTest.java | 3040 | package org.wildfly.swarm.microprofile.jwtauth;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.asset.ClassLoaderAsset;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.wildfly.swarm.Swarm;
import org.wildfly.swarm.arquillian.CreateSwarm;
import org.wildfly.swarm.jaxrs.JAXRSArchive;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
@RunWith(Arquillian.class)
public class StaticKeyWithMPConfigTest {
@Deployment(testable = false)
public static JAXRSArchive createDeployment() {
return ShrinkWrap.create(JAXRSArchive.class)
.addClass(TestApplication.class)
.addClass(TokenResource.class)
.addClass(KeyTool.class)
.addClass(JwtTool.class)
.addAsResource("project-empty-roles-static-mpconfig.yml", "project-defaults.yml")
.addAsResource("emptyRoles.properties")
.addAsResource(new ClassLoaderAsset("keys/pkcs8_bad_key.pem"), "pkcs8_bad_key.pem")
.addAsResource(new ClassLoaderAsset("keys/pkcs8_good_key.pem"), "pkcs8_good_key.pem")
.setContextRoot("/testsuite");
}
@Test
@RunAsClient
public void testThatStaticKeyIsVerified() throws Exception {
final KeyTool keyTool = KeyTool.newKeyTool(getClass().getResource("/keys/pkcs8_good_key.pem").toURI());
final String jwt = new JwtTool(keyTool, "http://testsuite-jwt-issuer.io").generateSignedJwt();
final URL url = new URL("http://localhost:8080/testsuite/mpjwt/token");
final URLConnection urlConnection = url.openConnection();
urlConnection.addRequestProperty("Authorization", "Bearer " + jwt);
try (InputStreamReader isr = new InputStreamReader(urlConnection.getInputStream());
BufferedReader br = new BufferedReader(isr)) {
assertEquals(jwt, br.readLine());
}
}
@Test
@RunAsClient
public void testThatStaticKeyIsFake() throws Exception {
final KeyTool keyTool = KeyTool.newKeyTool(getClass().getResource("/keys/pkcs8_bad_key.pem").toURI());
final String jwt = new JwtTool(keyTool, "http://testsuite-jwt-issuer.io").generateSignedJwt();
final URL url = new URL("http://localhost:8080/testsuite/mpjwt/token");
final URLConnection urlConnection = url.openConnection();
urlConnection.addRequestProperty("Authorization", "Bearer " + jwt);
try (InputStreamReader isr = new InputStreamReader(urlConnection.getInputStream());
BufferedReader br = new BufferedReader(isr)) {
assertNull(br.readLine()); // only if no body is returned, we know that the JWT was refused.
}
}
}
| apache-2.0 |
MyersResearchGroup/iBioSim | analysis/src/main/java/edu/utah/ece/async/ibiosim/analysis/simulation/hierarchical/math/EventNode.java | 7628 | /*******************************************************************************
*
* This file is part of iBioSim. Please visit <http://www.async.ece.utah.edu/ibiosim>
* for the latest version of iBioSim.
*
* Copyright (C) 2017 University of Utah
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the Apache License. A copy of the license agreement is provided
* in the file named "LICENSE.txt" included with this software distribution
* and also available online at <http://www.async.ece.utah.edu/ibiosim/License>.
*
*******************************************************************************/
package edu.utah.ece.async.ibiosim.analysis.simulation.hierarchical.math;
import java.util.ArrayList;
import java.util.LinkedList;
import java.util.List;
import edu.utah.ece.async.ibiosim.analysis.simulation.hierarchical.states.EventState;
import edu.utah.ece.async.ibiosim.analysis.simulation.hierarchical.util.comp.TriggeredEvent;
/**
* A node that represents Events.
*
* @author Leandro Watanabe
* @author Chris Myers
* @author <a href="http://www.async.ece.utah.edu/ibiosim#Credits"> iBioSim Contributors </a>
* @version %I%
*/
public class EventNode extends HierarchicalNode {
private FunctionNode delay;
private FunctionNode priority;
private List<FunctionNode> eventAssignments;
public EventNode() {
super(Type.PLUS);
}
/**
* Adds a new event state for a given model.
*
* @param index
* - the model index.
*/
public EventState addEventState(int index) {
EventState eventState = new EventState();
state.getChild(index).setEventState(eventState);
return eventState;
}
/**
* Gets the node corresponding to the delay.
*
* @return the delay.
*/
public HierarchicalNode getDelay() {
return delay;
}
/**
* Sets the node corresponding to the delay.
*
* @param delay
* - the new value of the delay.
*/
public void setDelay(FunctionNode delay) {
this.delay = delay;
}
public void setTrigger(HierarchicalNode node) {
this.addChild(node);
}
/**
* Gets the max disabled time.
*
* @param index
* - the model index.
*
* @return the max disabled time.
*/
public double getMaxDisabledTime(int index) {
return getRootState(index).getEventState().getMaxDisabledTime();
}
/**
* Sets the max disable time.
*
* @param index
* - the model index.
* @param maxDisabledTime
* - the time that was last seen disabled.
*/
public void setMaxDisabledTime(int index, double maxDisabledTime) {
getRootState(index).getEventState().setMaxDisabledTime(maxDisabledTime);
}
/**
* Gets the min disable time.
*
* @param index
* - the model index.
*
* @return the min disabled time.
*/
public double getMinEnabledTime(int index) {
return getRootState(index).getEventState().getMinEnabledTime();
}
/**
* Sets the min disable time.
*
* @param index
* - the model index.
* @param minDisabledTime
* - the time that was first seen enabled.
*/
public void setMinEnabledTime(int index, double minEnabledTime) {
getRootState(index).getEventState().setMinEnabledTime(minEnabledTime);
}
/**
*
* @param index
* @param event
*/
public void addTriggeredEvent(int index, TriggeredEvent event) {
getRootState(index).getEventState().addNonPersistentEvent(event);
}
private void untriggerNonPersistent(int index) {
LinkedList<TriggeredEvent> nonPersistent = getRootState(index).getEventState().getNonPersistentEvents();
while (!nonPersistent.isEmpty()) {
TriggeredEvent node = nonPersistent.removeFirst();
node.setFlipped();
}
}
/**
* Adds an event assignment node.
*
* @param eventAssignmentNode
* - the event assignment that will be executed when the event is fired.
*
*/
public void addEventAssignment(FunctionNode eventAssignmentNode) {
if (eventAssignments == null) {
eventAssignments = new ArrayList<>();
}
eventAssignments.add(eventAssignmentNode);
}
/**
* Evaluates the time when the event should be fired.
*
* @param index
* - the model index.
* @return the fire time.
*/
public double evaluateFireTime(int index) {
double fireTime = 0;
if (delay != null && !delay.isDeleted(index)) {
fireTime = Evaluator.evaluateExpressionRecursive(delay.getMath(), index);
}
return fireTime;
}
/**
* Evaluates the priority value.
*
* @param index
* - the model index.
*
* @return the priority value.
*/
public double evaluatePriority(int index) {
double priorityValue = 0;
if (priority != null && !priority.isDeleted(index)) {
priorityValue = Evaluator.evaluateExpressionRecursive(priority.getMath(), index);
}
return priorityValue;
}
/**
* Evaluates the event assignments.
*
* @param index
* - the model index.
* @param time
* - the current simulation time.
* @return an array with the evaluated event assignments.
*/
public double[] computeEventAssignmentValues(int index) {
double[] assignmentValues = null;
if (eventAssignments != null) {
int size = 0, assignmentIndex = 0;
for (FunctionNode eventAssignment : eventAssignments) {
size += eventAssignment.getSize();
}
assignmentValues = new double[size];
for (FunctionNode eventAssignment : eventAssignments) {
for (HierarchicalNode subNode : eventAssignment) {
double value = Evaluator.evaluateExpressionRecursive(eventAssignment.getMath(), index);
assignmentValues[assignmentIndex++] = value;
}
}
}
return assignmentValues;
}
/**
* Computes the trigger value.
*
* @param index
* - the model index.
* @return the trigger value.
*/
public boolean computeTrigger(int index) {
double triggerResult = Evaluator.evaluateExpressionRecursive(this, index);
return triggerResult != 0;
}
/**
* Evaluates whether the event is triggered at a specified time.
*
* @param time
* - the simulation time.
* @param index
* - the model index.
* @return true if the event evaluates to true at the given time. False otherwise.
*/
public boolean isTriggeredAtTime(double time, int index) {
boolean trigger = computeTrigger(index);
EventState state = getRootState(index).getEventState();
if (trigger) {
if (state.getMaxDisabledTime() >= 0 && time >= state.getMaxDisabledTime() && time <= state.getMinEnabledTime()) {
state.setMinEnabledTime(time);
}
return state.getMaxDisabledTime() >= 0 && state.getMinEnabledTime() <= time;
} else {
untriggerNonPersistent(index);
if (time > state.getMaxDisabledTime()) {
state.setMaxDisabledTime(time);
}
return false;
}
}
/**
* Sets the event priority.
*
* @param priority
* - the event priority node.
*/
public void setPriority(FunctionNode priority) {
this.priority = priority;
}
/**
* Gets the event assignment nodes.
*
* @return the list of event assignments.
*/
public List<FunctionNode> getEventAssignments() {
return eventAssignments;
}
/**
* Resets the state of the event.
*
* @param index
* - the model index.
*/
public void resetEvents(int index) {
getRootState(index).getEventState().reset();
}
}
| apache-2.0 |
sverhoeven/octopus-job-ws | src/test/java/nl/esciencecenter/octopus/webservice/api/JobStatusResponseTest.java | 7115 | package nl.esciencecenter.octopus.webservice.api;
/*
* #%L
* Octopus Job Webservice
* %%
* Copyright (C) 2013 Nederlands eScience Center
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static com.yammer.dropwizard.testing.JsonHelpers.jsonFixture;
import static org.fest.assertions.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.HashMap;
import java.util.Map;
import nl.esciencecenter.octopus.engine.jobs.JobStatusImplementation;
import nl.esciencecenter.octopus.jobs.Job;
import nl.esciencecenter.octopus.jobs.JobStatus;
import org.junit.Test;
public class JobStatusResponseTest {
private JobStatusResponse getRunningJobStatus() {
Map<String, String> info = new HashMap<String, String>();
info.put("status", "EXECUTING");
JobStatusResponse status = new JobStatusResponse("RUNNING", true, false, null, null, info);
return status;
}
@Test
public void construct_JobStatus() {
Job job = mock(Job.class);
JobStatus jobstatus = new JobStatusImplementation(job, "PENDING", null, null, false, false, null);
JobStatusResponse status = new JobStatusResponse(jobstatus);
JobStatusResponse expected = new JobStatusResponse("PENDING", false, false, null, null, null);
assertThat(status).isEqualTo(expected);
}
@Test
public void construct_Null() {
JobStatusResponse status = new JobStatusResponse(null);
assertThat(status.getState()).isEqualTo("INITIAL");
assertThat(status.getException()).isNull();
assertThat(status.getSchedulerSpecficInformation()).isNull();
assertThat(status.getExitCode()).isNull();
assertThat(status.isDone()).isFalse();
assertThat(status.isRunning()).isFalse();
}
@Test
public void test_hashCode() {
JobStatusResponse status = getRunningJobStatus();
assertThat(status.hashCode()).isEqualTo(-481497476);
}
@Test
public void testEqual_null_false() {
JobStatusResponse status = getRunningJobStatus();
assertThat(status.equals(null)).isFalse();
}
@Test
public void testEqual_sameInstance_true() {
JobStatusResponse status = getRunningJobStatus();
assertThat(status.equals(status)).isTrue();
}
@Test
public void testEqual_sameConstruct_true() {
JobStatusResponse status = getRunningJobStatus();
JobStatusResponse expected = getRunningJobStatus();
assertThat(status.equals(expected)).isTrue();
}
@Test
public void testEqual_diffClass_false() {
JobStatusResponse status = getRunningJobStatus();
assertThat(status.equals(42)).isFalse();
}
@Test
public void testEqual_otherState_false() {
JobStatusResponse status = getRunningJobStatus();
Map<String, String> info = new HashMap<String, String>();
info.put("status", "EXECUTING");
JobStatusResponse expected = new JobStatusResponse("EXECUTING", true, false, null, null, info);
assertThat(status.equals(expected)).isFalse();
}
@Test
public void testEqual_otherNotRunning_false() {
JobStatusResponse status = getRunningJobStatus();
Map<String, String> info = new HashMap<String, String>();
info.put("status", "EXECUTING");
JobStatusResponse expected = new JobStatusResponse("RUNNING", false, false, null, null, info);
assertThat(status.equals(expected)).isFalse();
}
@Test
public void testEqual_otherDone_false() {
JobStatusResponse status = getRunningJobStatus();
Map<String, String> info = new HashMap<String, String>();
info.put("status", "EXECUTING");
JobStatusResponse expected = new JobStatusResponse("RUNNING", true, true, null, null, info);
assertThat(status.equals(expected)).isFalse();
}
@Test
public void testEqual_otherExitCode_false() {
JobStatusResponse status = getRunningJobStatus();
Map<String, String> info = new HashMap<String, String>();
info.put("status", "EXECUTING");
JobStatusResponse expected = new JobStatusResponse("RUNNING", true, false, 0, null, info);
assertThat(status.equals(expected)).isFalse();
}
@Test
public void testEqual_otherException_false() {
JobStatusResponse status = getRunningJobStatus();
Map<String, String> info = new HashMap<String, String>();
info.put("status", "EXECUTING");
Exception exception = new Exception();
JobStatusResponse expected = new JobStatusResponse("RUNNING", true, false, null, exception, info);
assertThat(status.equals(expected)).isFalse();
}
@Test
public void testEqual_otherSchedulerInfo_false() {
JobStatusResponse status = getRunningJobStatus();
Map<String, String> info = new HashMap<String, String>();
info.put("status", "r");
JobStatusResponse expected = new JobStatusResponse("RUNNING", true, false, null, null, info);
assertThat(status.equals(expected)).isFalse();
}
@Test
public void test_toString() {
JobStatusResponse status = getRunningJobStatus();
String expected = "JobStatusResponse{RUNNING, true, false, null, null, {status=EXECUTING}}";
assertThat(status.toString()).isEqualTo(expected);
}
@Test
public void toJson_Done() throws IOException, URISyntaxException {
String state = "DONE";
Boolean done = true;
int exitCode = 0;
Exception exception = null;
String scheduler_status = "STOPPED";
Map<String, String> info = new HashMap<String, String>();
info.put("status", scheduler_status);
JobStatusResponse status = new JobStatusResponse(state, false, done, exitCode, exception, info);
assertThat(status.toJson()).isEqualTo(jsonFixture("fixtures/status.done.json"));
}
@Test
public void toJson_Null() throws IOException {
JobStatusResponse status = new JobStatusResponse(null);
assertThat(status.toJson()).isEqualTo(jsonFixture("fixtures/status.initial.json"));
}
@Test
public void toJson_Exception() throws IOException, URISyntaxException {
Exception exception = new Exception("Process cancelled by user.");
JobStatusResponse status = new JobStatusResponse("KILLED", false, true, null, exception , null);
assertThat(status.toJson()).isEqualTo(jsonFixture("fixtures/status.exception.json"));
}
}
| apache-2.0 |
treeleafj/treeleaf | treeleaf-db/src/main/java/org/treeleaf/db/model/Model.java | 295 | package org.treeleaf.db.model;
import org.treeleaf.common.json.Jsoner;
import java.io.Serializable;
/**
* @Author leaf
* 2015/1/8 0008 1:05.
*/
public abstract class Model implements Serializable {
@Override
public String toString() {
return Jsoner.toJson(this);
}
}
| apache-2.0 |
hai-nguyen/Impala | impala/src/main/java/hainguyen/impala/appsenum/Enums.java | 348 | package hainguyen.impala.appsenum;
/**
* Created by nguyenminhhai on 22/5/16.
*/
public class Enums {
public enum EmailErrorType {
EMPTY(0),
INVALID(1);
private final int id;
EmailErrorType(int id) {
this.id = id;
}
public int getId() {
return id;
}
}
}
| apache-2.0 |
orioncode/orionplatform | orion_math/orion_math_core/src/main/java/com/orionplatform/math/linearalgebra/matrix/tasks/query/GetHadamartMatrixTask.java | 1007 | package com.orionplatform.math.linearalgebra.matrix.tasks.query;
import com.orionplatform.core.abstraction.Orion;
import com.orionplatform.math.linearalgebra.matrix.Matrix;
import com.orionplatform.math.number.ANumber;
import com.orionplatform.math.number.NumberRules;
public class GetHadamartMatrixTask extends Orion
{
public static synchronized Matrix run(int order)
{
NumberRules.hasNaturalNumberValue(order);
ANumber[][] hadamard = new ANumber[order][order];
hadamard[0][0] = ANumber.of(1);
for(int k = 1; k < order; k += k)
{
for(int i = 0; i < k; i++)
{
for(int j = 0; j < k; j++)
{
hadamard[i + k][j] = hadamard[i][j];
hadamard[i][j + k] = hadamard[i][j];
hadamard[i + k][j + k] = (hadamard[i][j].isOne()) ? ANumber.of(0) : ANumber.of(1);
}
}
}
return Matrix.of(hadamard);
}
} | apache-2.0 |
tkobayas/optaplanner | optaplanner-core/src/main/java/org/optaplanner/core/impl/score/stream/drools/uni/DroolsExistsUniConstraintStream.java | 2295 | /*
* Copyright 2021 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.score.stream.drools.uni;
import org.optaplanner.core.api.score.stream.bi.BiJoiner;
import org.optaplanner.core.impl.score.stream.drools.DroolsConstraintFactory;
import org.optaplanner.core.impl.score.stream.drools.common.UniLeftHandSide;
public final class DroolsExistsUniConstraintStream<Solution_, A> extends DroolsAbstractUniConstraintStream<Solution_, A> {
private final DroolsAbstractUniConstraintStream<Solution_, A> parent;
private final UniLeftHandSide<A> leftHandSide;
private final String streamName;
public <B> DroolsExistsUniConstraintStream(DroolsConstraintFactory<Solution_> constraintFactory,
DroolsAbstractUniConstraintStream<Solution_, A> parent, boolean shouldExist, Class<B> otherClass,
BiJoiner<A, B>... joiners) {
super(constraintFactory);
this.parent = parent;
this.leftHandSide = shouldExist
? parent.getLeftHandSide().andExists(otherClass, joiners)
: parent.getLeftHandSide().andNotExists(otherClass, joiners);
this.streamName = shouldExist ? "IfExists()" : "IfNotExists()";
}
@Override
public boolean guaranteesDistinct() {
return parent.guaranteesDistinct();
}
// ************************************************************************
// Pattern creation
// ************************************************************************
@Override
public UniLeftHandSide<A> getLeftHandSide() {
return leftHandSide;
}
@Override
public String toString() {
return streamName + " with " + getChildStreams().size() + " children";
}
}
| apache-2.0 |
arstherapia/dobrapsychoterapia | src/MES/SecurityBundle/Form/MesUserType.php | 1422 | <?php
namespace MES\SecurityBundle\Form;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolverInterface;
class MesUserType extends AbstractType {
/**
* @param FormBuilderInterface $builder
* @param array $options
*/
public function buildForm(FormBuilderInterface $builder, array $options) {
$builder
->add('email')
->add('username', 'text', [
'label' => 'Nazwa użytkownika'
])
->add('password', 'repeated', array(
'type' => 'password',
'invalid_message' => 'Hasła muszą się zgadzać',
'options' => array('attr' => array('class' => 'password-field')),
'required' => true,
'first_options' => array('label' => 'Hasło'),
'second_options' => array('label' => 'Powtórz hasło'),
))
;
}
/**
* @param OptionsResolverInterface $resolver
*/
public function setDefaultOptions(OptionsResolverInterface $resolver) {
$resolver->setDefaults(array(
'data_class' => 'MES\UserBundle\Entity\MesUser'
));
}
/**
* @return string
*/
public function getName() {
return 'mes_userbundle_mesuser';
}
}
| apache-2.0 |
rafizanbaharum/cfi-gov | src/main/java/net/canang/cfi/core/vm/dao/impl/CfVoteDaoImpl.java | 25804 | package net.canang.cfi.core.vm.dao.impl;
import net.canang.cfi.core.dd.model.*;
import net.canang.cfi.core.so.dao.DaoSupport;
import net.canang.cfi.core.so.model.CfMetadata;
import net.canang.cfi.core.so.model.CfUser;
import net.canang.cfi.core.vm.dao.CfVoteDao;
import net.canang.cfi.core.vm.model.CfVote;
import net.canang.cfi.core.vm.model.CfVoteTransaction;
import net.canang.cfi.core.vm.model.CfVoteTransactionType;
import net.canang.cfi.core.vm.model.impl.CfVoteImpl;
import net.canang.cfi.core.vm.model.impl.CfVoteTransactionImpl;
import org.apache.commons.lang.Validate;
import org.apache.log4j.Logger;
import org.hibernate.Query;
import org.hibernate.Session;
import org.perf4j.aop.Profiled;
import org.springframework.stereotype.Repository;
import java.math.BigDecimal;
import java.sql.Timestamp;
import java.util.Date;
import java.util.List;
import static net.canang.cfi.core.so.model.CfMetaState.ACTIVE;
import static net.canang.cfi.core.vm.model.CfVoteTransactionType.*;
/**
* @author : alif haikal razak
*/
@Repository("voteDao")
public class CfVoteDaoImpl extends DaoSupport<Long, CfVote, CfVoteImpl> implements CfVoteDao {
private static final Logger log = Logger.getLogger(CfVoteDaoImpl.class);
@Override
public CfVote newInstance() {
CfVote vote = new CfVoteImpl();
return vote;
}
@Override
public CfVoteTransaction newTransactionInstance() {
CfVoteTransaction voteTx = new CfVoteTransactionImpl();
return voteTx;
}
@Override
public CfVote find(CfPeriod period, CfSodoCode sodo) {
Validate.notNull(period, "Budget cannot be empty");
Validate.notNull(sodo, "Sodo cannot be empty");
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select v from CfVote v where " +
"v.sodo = :sodo " +
"and v.budget = :period " +
"and v.metadata.state = :metaState");
query.setEntity("sodo", sodo);
query.setEntity("period", period);
query.setCacheable(Boolean.TRUE);
query.setInteger("metaState", ACTIVE.ordinal());
return (CfVote) query.uniqueResult();
}
@Profiled
@Override
public List<CfVote> findByPeriod(CfPeriod period) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select v from CfVote v where " +
"v.budget = :period " +
"and v.metadata.state = :metaState");
query.setEntity("period", period);
query.setInteger("metaState", ACTIVE.ordinal());
return (List<CfVote>) query.list();
}
@Override
public List<CfVote> findByPeriod(CfPeriod period, Integer offset, Integer limit) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select v from CfVote v where " +
"v.budget = :period " +
"and v.metadata.state = :metaState");
query.setEntity("period", period);
query.setInteger("metaState", ACTIVE.ordinal());
query.setFirstResult(offset);
query.setMaxResults(limit);
return (List<CfVote>) query.list();
}
@Override
public List<CfVote> findByPeriodAndSodo(CfPeriod period, CfSodoCode sodo) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select v from CfVote v where " +
"v.budget = :period " +
"and v.sodo = :sodo " +
"and v.metadata.state = :metaState " +
"order by v.id asc");
query.setEntity("period", period);
query.setEntity("sodo", sodo);
query.setInteger("metaState", ACTIVE.ordinal());
return (List<CfVote>) query.list();
}
@Override
public List<CfVote> findByPeriodAndSodo(CfPeriod period, CfSodoCode sodo, Integer offset, Integer limit) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select v from CfVote v where " +
"v.budget = :period " +
"and v.sodo = :sodo " +
"and v.metadata.state = :metaState " +
"order by v.id asc");
query.setEntity("period", period);
query.setEntity("sodo", sodo);
query.setInteger("metaState", ACTIVE.ordinal());
query.setFirstResult(offset);
query.setMaxResults(limit);
return (List<CfVote>) query.list();
}
@Override
public List<CfVote> find(CfFundCode fund, CfDepartmentCode department, CfProjectCode project, CfSubProjectCode subProjectCode, Integer offset, Integer limit) {
return null;
}
@Override
public List<CfVote> find(String filter, Integer offset, Integer limit) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select v from CfVote v where " +
"(v.code like upper(:filter) " +
"or v.description like upper(:filter)) " +
"and v.metadata.state = :metaState " +
"order by v.id asc");
query.setString("filter", WILDCARD + filter + WILDCARD);
query.setInteger("metaState", ACTIVE.ordinal());
query.setFirstResult(offset);
query.setMaxResults(limit);
return (List<CfVote>) query.list();
}
@Override
public Integer count() {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select count(v) from CfVote v where " +
"v.metadata.state = :metaState");
query.setInteger("metaState", ACTIVE.ordinal());
return ((Long) query.uniqueResult()).intValue();
}
@Override
public Integer count(CfFundCode fund, CfDepartmentCode department, CfProjectCode project, CfSubProjectCode subProjectCode) {
return null;
}
@Override
public Integer count(String filter) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select count(v) from CfVote v where" +
"(v.code like upper(:filter) " +
"or v.description like upper(:filter)) " +
"and v.metadata.state = :metaState " +
"order by v.code");
query.setString("filter", WILDCARD + filter + WILDCARD);
query.setInteger("metaState", ACTIVE.ordinal());
return ((Long) query.uniqueResult()).intValue();
}
@Override
public BigDecimal getVoteBalance(CfPeriod period, CfSodoCode sodo) {
return getSumSodo(period, sodo);
}
@Override
public BigDecimal getVoteBalance(CfAccountCode account, CfPeriod period) {
return getSumSodo(account, period);
}
@Override
public void addTransaction(CfVote vote, CfVoteTransaction voteTx, CfUser user) {
Session session = sessionFactory.getCurrentSession();
voteTx.setVote(vote);
// prepare metadata
CfMetadata metadata = new CfMetadata();
metadata.setCreatedDate(new Timestamp(System.currentTimeMillis()));
metadata.setCreator(user.getId());
metadata.setState(ACTIVE);
voteTx.setMetadata(metadata);
session.save(vote);
}
@Override
public void addTransactions(CfVote vote, List<CfVoteTransaction> voteTx, CfUser user) {
for (CfVoteTransaction tx : voteTx) {
addTransaction(vote, tx, user);
}
}
@Override
public List<CfVoteTransaction> findTransactions(CfVote vote) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select vt from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.metadata.state = :metaState " +
"order by vt.id asc");
query.setEntity("vote", vote);
query.setInteger("metaState", ACTIVE.ordinal());
return (List<CfVoteTransaction>) query.list();
}
@Override
public List<CfVoteTransaction> findTransactions(CfVote vote, Integer offset, Integer limit) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select vt from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.metadata.state = :metaState " +
"order by vt.id asc");
query.setEntity("vote", vote);
query.setInteger("metaState", ACTIVE.ordinal());
query.setFirstResult(offset);
query.setMaxResults(limit);
return (List<CfVoteTransaction>) query.list();
}
@Override
public List<CfVoteTransaction> findTransactions(String referenceNo) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select vt from CfVoteTransaction vt where " +
"vt.sourceNo = :sourceNo " +
"and vt.metadata.state = :metaState " +
"order by vt.id asc");
query.setString("sourceNo", referenceNo);
query.setInteger("metaState", ACTIVE.ordinal());
return (List<CfVoteTransaction>) query.list();
}
@Override
public List<CfVoteTransaction> findTransactions(CfVote vote, CfSodoCode spender, Integer offset, Integer limit) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select vt from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.spender = :spender " +
"and vt.metadata.state = :metaState " +
"order by vt.id asc");
query.setEntity("vote", vote);
query.setEntity("spender", spender);
query.setInteger("metaState", ACTIVE.ordinal());
query.setFirstResult(offset);
query.setMaxResults(limit);
return (List<CfVoteTransaction>) query.list();
}
@Override
public Integer countTransaction(CfVote vote) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select count(vt) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.metadata.state = :metaState");
query.setEntity("vote", vote);
query.setInteger("metaState", ACTIVE.ordinal());
return ((Long) query.uniqueResult()).intValue();
}
@Override
public Integer count(CfPeriod period) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select count(v) from CfVote v where " +
"v.budget = :period " +
"and v.metadata.state = :metaState");
query.setEntity("period", period);
query.setInteger("metaState", ACTIVE.ordinal());
return ((Long) query.uniqueResult()).intValue();
}
@Override
public Integer count(CfPeriod period, CfSodoCode sodo) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select count(v) from CfVote v where " +
"v.budget = :period " +
"and v.sodo = :sodo " +
"and v.metadata.state = :metaState");
query.setEntity("period", period);
query.setEntity("sodo", sodo);
query.setInteger("metaState", ACTIVE.ordinal());
return ((Long) query.uniqueResult()).intValue();
}
@Override
public boolean isExists(CfPeriod period, CfSodoCode sodo) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select count(v.id) from CfVote v where " +
"v.budget = :period " +
"and v.sodo = :sodo " +
"and v.metadata.state = :metaState");
query.setEntity("period", period);
query.setEntity("sodo", sodo);
query.setCacheable(Boolean.TRUE);
query.setInteger("metaState", ACTIVE.ordinal());
return 0 < ((Long) query.uniqueResult()).intValue();
}
@Override
public BigDecimal checkBalance(CfVoteTransactionType transactionType, CfVote vote) {
// validate precondition
Validate.notNull(vote, "Vote cannot be null");
Validate.notNull(vote.getApprovedAmount(), "Approve amount cannot be null");
// sum it up
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.transactionCode = :transactionCode " +
"and vt.metadata.state = :metaState");
query.setEntity("vote", vote);
query.setInteger("transactionCode", transactionType.ordinal());
query.setInteger("metaState", ACTIVE.ordinal());
BigDecimal sum = (BigDecimal) query.uniqueResult();
// if null, something is wrong
if (null == sum) return BigDecimal.ZERO;
Validate.notNull(sum, "Sum cannot be null");
return sum;
}
@Override
public BigDecimal checkShadowBalance(CfVote vote) {
// validate precondition
Validate.notNull(vote, "Vote cannot be null");
Validate.notNull(vote.getApprovedAmount(), "Approve amount cannot be null");
// sum it up
Session session = sessionFactory.getCurrentSession();
Query query1 = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.transactionCode in :transactionCode " +
"and vt.metadata.state = :metaState");
query1.setEntity("vote", vote);
query1.setParameterList("transactionCode",
new Integer[]{BALANCE_OPENING.ordinal(),
BALANCE_ADDITION.ordinal(),
BALANCE_VIREMENT.ordinal()});
query1.setInteger("metaState", ACTIVE.ordinal());
BigDecimal sum = (BigDecimal) query1.uniqueResult();
if (null == sum) sum = BigDecimal.ZERO;
Query query2 = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.transactionCode in :transactionCode " +
"and vt.metadata.state = :metaState");
query2.setEntity("vote", vote);
query1.setParameterList("transactionCode",
new Integer[]{CUMULATIVE_EXPENDITURE.ordinal()});
query2.setInteger("metaState", ACTIVE.ordinal());
BigDecimal expenses = (BigDecimal) query2.uniqueResult();
if (null == expenses) expenses = BigDecimal.ZERO;
return sum.subtract(expenses);
}
@Override
public BigDecimal checkCurrentBalance(CfVote vote) {
// validate precondition
Validate.notNull(vote, "Vote cannot be null");
Validate.notNull(vote.getApprovedAmount(), "Approve amount cannot be null");
// sum it up
Session session = sessionFactory.getCurrentSession();
Query query1 = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.transactionCode in (:transactionCode) " +
"and vt.metadata.state = :metaState");
query1.setEntity("vote", vote);
query1.setParameterList("transactionCode",
new Integer[]{BALANCE_OPENING.ordinal(),
BALANCE_ADDITION.ordinal(),
BALANCE_VIREMENT.ordinal()});
query1.setInteger("metaState", ACTIVE.ordinal());
BigDecimal sum = (BigDecimal) query1.uniqueResult();
if (null == sum) sum = BigDecimal.ZERO;
Query query2 = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.transactionCode = :transactionCode " +
"and vt.metadata.state = :metaState");
query2.setEntity("vote", vote);
query2.setInteger("transactionCode", CUMULATIVE_EXPENDITURE.ordinal());
query2.setInteger("metaState", ACTIVE.ordinal());
BigDecimal expenses = (BigDecimal) query2.uniqueResult();
if (null == expenses) expenses = BigDecimal.ZERO;
return sum.subtract(expenses);
}
@Override
public BigDecimal checkBalance(CfVoteTransactionType transactionType, CfVote vote, boolean positiveTx) {
// validate precondition
Validate.notNull(vote, "Vote cannot be null");
Validate.notNull(vote.getApprovedAmount(), "Approve amount cannot be null");
String descriminator = "";
if (positiveTx) descriminator = "and vt.amount < 0";
else descriminator = "and vt.amount > 0";
// sum it up
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.transactionCode = :transactionCode " +
"and vt.metadata.state = :metaState " + descriminator);
query.setEntity("vote", vote);
query.setInteger("transactionCode", transactionType.ordinal());
query.setInteger("metaState", ACTIVE.ordinal());
BigDecimal sum = (BigDecimal) query.uniqueResult();
// if null, something is wrong
if (null == sum) return BigDecimal.ZERO;
Validate.notNull(sum, "Sum cannot be null");
return sum;
}
@Override
public BigDecimal checkBalance(CfVoteTransactionType transactionType, CfVote vote, Date startDate, Date endDate) {
// validate precondition
Validate.notNull(vote, "Vote cannot be null");
Validate.notNull(vote.getApprovedAmount(), "Approve amount cannot be null");
// sum it up
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.metadata.createdDate >= :startDate " +
"and vt.metadata.createdDate <= :endDate " +
"and vt.transactionCode = :transactionCode " +
"and vt.metadata.state = :metaState");
query.setEntity("vote", vote);
query.setDate("startDate", startDate);
query.setDate("endDate", endDate);
query.setCacheable(Boolean.TRUE);
query.setInteger("transactionCode", transactionType.ordinal());
query.setInteger("metaState", ACTIVE.ordinal());
BigDecimal sum = (BigDecimal) query.uniqueResult();
// if null, something is wrong
if (null == sum) return BigDecimal.ZERO;
Validate.notNull(sum, "Sum cannot be null");
return sum;
}
@Override
public BigDecimal checkBalance(CfVoteTransactionType transactionType, CfVote vote, Date startDate, Date endDate, boolean positiveTx) {
// validate precondition
Validate.notNull(vote, "Vote cannot be null");
Validate.notNull(vote.getApprovedAmount(), "Approve amount cannot be null");
String descriminator = "";
if (positiveTx) descriminator = "and vt.amount < 0";
else descriminator = "and vt.amount > 0";
// sum it up
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.metadata.createdDate >= :startDate " +
"and vt.metadata.createdDate <= :endDate " +
"and vt.transactionCode = :transactionCode " +
"and vt.metadata.state = :metaState " + descriminator);
query.setEntity("vote", vote);
query.setDate("startDate", startDate);
query.setDate("endDate", endDate);
query.setInteger("transactionCode", transactionType.ordinal());
query.setInteger("metaState", ACTIVE.ordinal());
BigDecimal sum = (BigDecimal) query.uniqueResult();
// if null, something is wrong
if (null == sum) return BigDecimal.ZERO;
Validate.notNull(sum, "Sum cannot be null");
return sum;
}
@Override
public BigDecimal checkShadowBalance(CfVote vote, Date startDate, Date endDate) {
// validate precondition
Validate.notNull(vote, "Vote cannot be null");
Validate.notNull(vote.getApprovedAmount(), "Approve amount cannot be null");
// sum it up
Session session = sessionFactory.getCurrentSession();
Query query1 = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.metadata.createdDate >= :startDate " +
"and vt.metadata.createdDate <= :endDate " +
"and vt.transactionCode in (:transactionCode) " +
"and vt.metadata.state = :metaState");
query1.setEntity("vote", vote);
query1.setParameterList("transactionCode",
new Integer[]{CfVoteTransactionType.BALANCE_OPENING.ordinal(),
CfVoteTransactionType.BALANCE_ADDITION.ordinal(),
CfVoteTransactionType.BALANCE_VIREMENT.ordinal()});
query1.setInteger("metaState", ACTIVE.ordinal());
query1.setDate("startDate", startDate);
query1.setDate("endDate", endDate);
BigDecimal sum = (BigDecimal) query1.uniqueResult();
if (null == sum) sum = BigDecimal.ZERO;
Query query2 = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.metadata.createdDate >= :startDate " +
"and vt.metadata.createdDate <= :endDate " +
"and vt.transactionCode in (:transactionCode) " +
"and vt.metadata.state = :metaState");
query2.setEntity("vote", vote);
query1.setParameterList("transactionCode",
new Integer[]{CfVoteTransactionType.CUMULATIVE_EXPENDITURE.ordinal()});
query2.setInteger("metaState", ACTIVE.ordinal());
query2.setDate("startDate", startDate);
query2.setDate("endDate", endDate);
BigDecimal expenses = (BigDecimal) query2.uniqueResult();
if (null == expenses) expenses = BigDecimal.ZERO;
return sum.subtract(expenses);
}
@Override
public BigDecimal checkCurrentBalance(CfVote vote, Date startDate, Date endDate) {
// validate precondition
Validate.notNull(vote, "Vote cannot be null");
Validate.notNull(vote.getApprovedAmount(), "Approve amount cannot be null");
// sum it up
Session session = sessionFactory.getCurrentSession();
Query query1 = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.metadata.createdDate >= :startDate " +
"and vt.metadata.createdDate <= :endDate " +
"and vt.transactionCode in (:transactionCode) " +
"and vt.metadata.state = :metaState");
query1.setEntity("vote", vote);
query1.setParameterList("transactionCode",
new Integer[]{CfVoteTransactionType.BALANCE_OPENING.ordinal(),
CfVoteTransactionType.BALANCE_ADDITION.ordinal(),
CfVoteTransactionType.BALANCE_VIREMENT.ordinal()});
query1.setInteger("metaState", ACTIVE.ordinal());
query1.setDate("startDate", startDate);
query1.setDate("endDate", endDate);
BigDecimal sum = (BigDecimal) query1.uniqueResult();
if (null == sum) sum = BigDecimal.ZERO;
Query query2 = session.createQuery("select sum(amount) from CfVoteTransaction vt where " +
"vt.vote = :vote " +
"and vt.metadata.createdDate >= :startDate " +
"and vt.metadata.createdDate <= :endDate " +
"and vt.transactionCode = :transactionCode " +
"and vt.metadata.state = :metaState");
query2.setEntity("vote", vote);
query2.setInteger("transactionCode", CUMULATIVE_EXPENDITURE.ordinal());
query2.setInteger("metaState", ACTIVE.ordinal());
query2.setDate("startDate", startDate);
query2.setDate("endDate", endDate);
BigDecimal expenses = (BigDecimal) query2.uniqueResult();
if (null == expenses) expenses = BigDecimal.ZERO;
return sum.subtract(expenses);
}
private BigDecimal getSumSodo(CfAccountCode account, CfPeriod period) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select sum(v.approvedAmount) from CfVote v inner join v.transactions t " +
"where " +
"v.budget = :budget " +
"and v.sodo.account = :account " +
"and t.metadata.state = :metaState");
query.setEntity("budget", period);
query.setEntity("account", account);
query.setInteger("metaState", ACTIVE.ordinal());
return (BigDecimal) query.uniqueResult();
}
private BigDecimal getSumSodo(CfPeriod period, CfSodoCode sodo) {
Session session = sessionFactory.getCurrentSession();
Query query = session.createQuery("select sum(v.approvedAmount) from CfVote v inner join v.transactions t " +
"where v.budget = :period " +
"and v.sodo = :sodo " +
"and t.metadata.state = :metaState");
query.setEntity("period", period);
query.setEntity("sodo", sodo);
query.setInteger("metaState", ACTIVE.ordinal());
return (BigDecimal) query.uniqueResult();
}
}
| apache-2.0 |
toff63/eip | src/main/java/net/francesbagual/github/eip/pattern/router/contentbased/mdb/ContentBasedRouterMDB.java | 2036 | package net.francesbagual.github.eip.pattern.router.contentbased.mdb;
import javax.annotation.Resource;
import javax.ejb.ActivationConfigProperty;
import javax.ejb.MessageDriven;
import javax.inject.Inject;
import javax.jms.JMSContext;
import javax.jms.JMSDestinationDefinition;
import javax.jms.JMSDestinationDefinitions;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.Queue;
import javax.jms.TextMessage;
@JMSDestinationDefinitions(
value = {
@JMSDestinationDefinition(
name = "jms/queue/contentbasedgreeting",
interfaceName = "javax.jms.Queue",
destinationName = "contentbasedgreeting"
),
@JMSDestinationDefinition(
name = "jms/queue/contentbasedecho",
interfaceName = "javax.jms.Queue",
destinationName = "contentbasedecho"
)
})
@MessageDriven(name = "ContentBasedRouterMDB", activationConfig = {
@ActivationConfigProperty(propertyName = "destinationLookup", propertyValue = "queue/contentbasedrouter"),
@ActivationConfigProperty(propertyName = "destinationType", propertyValue = "javax.jms.Queue"),
@ActivationConfigProperty(propertyName = "acknowledgeMode", propertyValue = "Auto-acknowledge") })
public class ContentBasedRouterMDB implements MessageListener {
@Inject
private JMSContext context;
@Resource(lookup = "jms/queue/contentbasedgreeting")
private Queue greeting;
@Resource(lookup = "jms/queue/contentbasedecho")
private Queue echo;
@Resource(lookup = "jms/queue/invalidmessage")
private Queue invalidMessageQueue;
@Override
public void onMessage(Message message) {
try {
if (message instanceof TextMessage) {
String text = ((TextMessage) message).getText();
if (text.startsWith("greeting:")) context.createProducer().send(greeting, message);
else if (text.startsWith("echo:")) context.createProducer().send(echo, message);
else context.createProducer().send(invalidMessageQueue, message);
}
} catch (JMSException exc) {
exc.printStackTrace();
}
}
}
| apache-2.0 |
Albul/support-date-time-pickers | support-date-time-pickers/src/main/java/com/albul/supportdatetimepickers/month/YearView.java | 11142 | /*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.albul.supportdatetimepickers.month;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Paint.Align;
import android.graphics.Paint.Style;
import android.graphics.Rect;
import android.graphics.Typeface;
import android.support.annotation.NonNull;
import android.support.v4.content.ContextCompat;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import com.albul.commonhelpers.CommonHelper;
import com.albul.supportdatetimepickers.R;
import com.albul.supportdatetimepickers.month.YearAdapter.CalendarMonth;
import java.util.Calendar;
public abstract class YearView extends View {
public static final int NUM_ROWS = 4;
public static final int NUM_COLUMNS = 3;
public static final int FIRST_MONTH = Calendar.JANUARY;
private final Calendar mCalendarForDraw;
private final Calendar mCalendar;
private final boolean mIsRtl;
protected final int mMonthTitleTextSize;
protected final int mYearTextSize;
protected final int mYearHeaderSize;
protected float mSelectedRectHeight;
private float mYearTopPadding;
private String mYearTitleTypeface;
protected Typeface mDefNormalTypeface;
protected Typeface mDefBoldTypeface;
protected Paint mMonthTitlePaint;
protected Paint mYearTitlePaint;
protected Paint mSelectedRectPaint;
protected int mPrimaryTextColor;
protected int mSecondaryTextColor;
protected int mSelectedMonthTextColor;
protected int mDisabledDayTextColor;
protected final IMonthPickerController mController;
// Quick reference to the width of this view, matches parent
protected int mWidth;
protected final int mRowHeight;
// Optional listener for handling day click actions
protected OnMonthClickListener mOnMonthClickListener;
protected int mCurrantMonth;
protected int mCurrantYear;
protected int mSelectedMonth;
protected int mSelectedYear;
protected int mYear;
//--------------------------------------------------------------------------------------------------
//Constructors
//--------------------------------------------------------------------------------------------------
public YearView(Context context) {
this(context, null, null);
}
public YearView(Context context, AttributeSet attr, IMonthPickerController controller) {
super(context, attr);
mSelectedMonth = -1;
mSelectedYear = -1;
mController = controller;
final Resources res = context.getResources();
mCalendarForDraw = Calendar.getInstance();
mCalendar = Calendar.getInstance();
mIsRtl = CommonHelper.isLayoutRtl();
mCurrantMonth = mCalendar.get(Calendar.MONTH);
mCurrantYear = mCalendar.get(Calendar.YEAR);
mYearTitleTypeface = res.getString(R.string.mdtp_sans_serif);
mDefNormalTypeface = Typeface.create(Typeface.DEFAULT, Typeface.NORMAL);
mDefBoldTypeface = Typeface.create(Typeface.DEFAULT, Typeface.BOLD);
final boolean darkTheme = mController != null && mController.isThemeDark();
if (darkTheme) {
mPrimaryTextColor = ContextCompat.getColor(context, R.color.mdtp_date_picker_text_normal_dark_theme);
mSecondaryTextColor = ContextCompat.getColor(context, R.color.mdtp_date_picker_month_day_dark_theme);
mDisabledDayTextColor = ContextCompat.getColor(context, R.color.mdtp_date_picker_text_disabled_dark_theme);
} else {
mPrimaryTextColor = ContextCompat.getColor(context, R.color.mdtp_date_picker_text_normal);
mSecondaryTextColor = ContextCompat.getColor(context, R.color.mdtp_date_picker_month_day);
mDisabledDayTextColor = ContextCompat.getColor(context, R.color.mdtp_date_picker_text_disabled);
}
mSelectedMonthTextColor = ContextCompat.getColor(context, R.color.mdtp_white);
mMonthTitleTextSize = res.getDimensionPixelSize(R.dimen.mdtp_day_number_size);
mYearTextSize = res.getDimensionPixelSize(R.dimen.mdtp_month_label_size);
mYearHeaderSize = res.getDimensionPixelOffset(R.dimen.mdtp_year_list_item_header_height);
mYearTopPadding = mYearHeaderSize / 2 + mYearTextSize * 0.4F;
mRowHeight = (res.getDimensionPixelOffset(R.dimen.mdtp_date_picker_view_animator_height)
- getMonthHeaderSize() * 2) / NUM_ROWS;
// Sets up any standard paints that will be used
initView();
}
/**
* Sets up the text and style properties for painting. Override this if you
* want to use a different paint.
*/
protected void initView() {
mYearTitlePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mYearTitlePaint.setFakeBoldText(true);
mYearTitlePaint.setTextSize(mYearTextSize);
mYearTitlePaint.setTypeface(Typeface.create(mYearTitleTypeface, Typeface.BOLD));
mYearTitlePaint.setColor(mPrimaryTextColor);
mYearTitlePaint.setTextAlign(Align.CENTER);
mYearTitlePaint.setStyle(Style.FILL);
mMonthTitlePaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mMonthTitlePaint.setTextSize(mMonthTitleTextSize);
mMonthTitlePaint.setStyle(Style.FILL);
mMonthTitlePaint.setTextAlign(Align.CENTER);
mMonthTitlePaint.setFakeBoldText(true);
final Rect bounds = new Rect();
mMonthTitlePaint.getTextBounds("0", 0, 1, bounds);
mSelectedRectHeight = bounds.height();
mSelectedRectPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mSelectedRectPaint.setColor(mController.getAccentColor());
mSelectedRectPaint.setStyle(Style.FILL);
}
//--------------------------------------------------------------------------------------------------
public void setSelectedMonth(final int selectedMonth) {
mSelectedMonth = selectedMonth;
}
public void setSelectedYear(int selectedYear) {
mSelectedYear = selectedYear;
}
public void setOnDayClickListener(final OnMonthClickListener listener) {
mOnMonthClickListener = listener;
}
public void setMonthParams(final int year) {
mYear = year;
mCalendar.set(Calendar.YEAR, mYear);
mCalendar.set(Calendar.MONTH, FIRST_MONTH);
mCalendar.set(Calendar.DAY_OF_MONTH, 1);
}
public int getYear() {
return mYear;
}
public void reuse() {
requestLayout();
}
@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
mWidth = w;
}
//--------------------------------------------------------------------------------------------------
//Draw methods
//--------------------------------------------------------------------------------------------------
@Override
protected void onDraw(Canvas canvas) {
drawYearTitle(canvas);
drawYearMonths(canvas);
}
protected void drawYearTitle(final Canvas canvas) {
final int year = mCalendar.get(Calendar.YEAR);
canvas.drawText(getYearString(year), mWidth / 2, mYearTopPadding, mYearTitlePaint);
}
/**
* A wrapper to the MonthHeaderSize to allow override it in children
*/
protected int getMonthHeaderSize() {
return mYearHeaderSize;
}
//return current year(String, format: yyyy)
@NonNull
private String getYearString(final int year) {
return CommonHelper.formatInt(year, mController.getNumeralSystem());
}
protected void drawYearMonths(final Canvas canvas) {
final float columnW = mWidth / NUM_COLUMNS;
final float rowH = mRowHeight;
final float firstColumnX = mIsRtl ? mWidth - columnW / 2 : columnW / 2;
final float nextColumnDelta = mIsRtl ? -columnW : columnW;
float x = firstColumnX;
float y = getMonthHeaderSize() + rowH / 2;
for (int i = 1; i <= NUM_COLUMNS * NUM_ROWS; i++) {
mCalendarForDraw.set(Calendar.MONTH, FIRST_MONTH - 1 + i);
mCalendarForDraw.set(Calendar.YEAR, mYear);
drawYearMonth(canvas, mCalendarForDraw.get(Calendar.YEAR), mCalendarForDraw.get(Calendar.MONTH), x, y);
x += nextColumnDelta;
if (i % NUM_COLUMNS == 0) {
x = firstColumnX;
y += rowH;
}
}
}
public abstract void drawYearMonth(Canvas canvas, int year, int month, float x, float y);
//--------------------------------------------------------------------------------------------------
// Handler for event
//--------------------------------------------------------------------------------------------------
/**
* Handles callbacks when the user clicks on a time object.
*/
public interface OnMonthClickListener {
void onMonthClick(YearView view, CalendarMonth month);
}
@Override
public boolean onTouchEvent(@NonNull MotionEvent event) {
switch (event.getAction()) {
case MotionEvent.ACTION_UP:
final int month = getMonthFromLocation(event.getX(), event.getY());
if (month >= 0) {
onMonthClick(month);
}
break;
}
return true;
}
public int getMonthFromLocation(final float x, final float y) {
final float columnW = mWidth / NUM_COLUMNS;
final float rowH = mRowHeight;
final int cellX = (int) Math.floor(mIsRtl ? (mWidth - x) / columnW : x / columnW);
final int cellY = (int) Math.floor((y - getMonthHeaderSize()) / rowH);
return cellY * NUM_COLUMNS + cellX;
}
/**
* Called when the user clicks on a month. Handles callbacks to the
* {@link OnMonthClickListener} if one is set.
* <p/>
*
* @param month The month that was clicked
*/
private void onMonthClick(final int month) {
if (mController.isOutOfRange(mYear, month)) {
return;
}
if (mOnMonthClickListener != null) {
mOnMonthClickListener.onMonthClick(this, new CalendarMonth(mYear, month));
}
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
setMeasuredDimension(MeasureSpec.getSize(widthMeasureSpec),
getMonthHeaderSize() + mRowHeight * NUM_ROWS);
}
//--------------------------------------------------------------------------------------------------
} | apache-2.0 |
hortonworks/cloudbreak | mock-infrastructure/src/main/java/com/sequenceiq/mock/experience/LiftieExperienceStoreService.java | 2899 | package com.sequenceiq.mock.experience;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.springframework.stereotype.Service;
import com.sequenceiq.mock.experience.response.liftie.LiftieClusterView;
import com.sequenceiq.mock.experience.response.liftie.ListClustersResponse;
import com.sequenceiq.mock.experience.response.liftie.PageStats;
import com.sequenceiq.mock.experience.response.liftie.StatusMessage;
@Service
public class LiftieExperienceStoreService {
private final Map<String, LiftieClusterView> store = new ConcurrentHashMap<>();
private final AtomicLong idCounter = new AtomicLong();
private String createID() {
return String.valueOf(idCounter.getAndIncrement());
}
public void create(String env, String tenant) {
String id = "liftie" + createID();
StatusMessage clusterStatus = new StatusMessage();
clusterStatus.setMessage("");
clusterStatus.setStatus("RUNNING");
store.put(id, new LiftieClusterView(id, id, env, tenant, "X", clusterStatus));
}
public void createIfNotExist(String env, String tenant) {
if (store.values().stream().noneMatch(cluster -> env.equals(cluster.getEnv()))) {
create(env, tenant);
}
}
public void deleteById(String id) {
setStatusById(id, "DELETED");
}
public LiftieClusterView setStatusById(String id, String status) {
LiftieClusterView cluster = store.get(id);
if (cluster != null) {
cluster.getClusterStatus().setStatus(status);
}
return cluster;
}
public ListClustersResponse get(String env) {
Map<String, LiftieClusterView> clusters = store.values().stream()
.filter(cluster -> env.equals(cluster.getEnv()))
.collect(Collectors.toMap(LiftieClusterView::getClusterId, Function.identity()));
return create(clusters);
}
private ListClustersResponse create(Map<String, LiftieClusterView> clusters) {
ListClustersResponse listClustersResponse = new ListClustersResponse();
listClustersResponse.setClusters(clusters);
PageStats pageStat = new PageStats();
pageStat.setTotalElements(1);
pageStat.setTotalPages(1);
pageStat.setNumber(1);
pageStat.setSize(clusters.size());
listClustersResponse.setPage(pageStat);
return listClustersResponse;
}
public LiftieClusterView getById(String id) {
return store.get(id);
}
public LiftieClusterView changeById(String id, LiftieClusterView liftieClusterView) {
liftieClusterView.setClusterId(id);
liftieClusterView.setName(id);
store.put(id, liftieClusterView);
return liftieClusterView;
}
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-simpleworkflow/src/main/java/com/amazonaws/services/simpleworkflow/model/transform/PendingTaskCountJsonUnmarshaller.java | 3011 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.simpleworkflow.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* PendingTaskCount JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class PendingTaskCountJsonUnmarshaller implements Unmarshaller<PendingTaskCount, JsonUnmarshallerContext> {
public PendingTaskCount unmarshall(JsonUnmarshallerContext context) throws Exception {
PendingTaskCount pendingTaskCount = new PendingTaskCount();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return pendingTaskCount;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("count", targetDepth)) {
context.nextToken();
pendingTaskCount.setCount(context.getUnmarshaller(Integer.class).unmarshall(context));
}
if (context.testExpression("truncated", targetDepth)) {
context.nextToken();
pendingTaskCount.setTruncated(context.getUnmarshaller(Boolean.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return pendingTaskCount;
}
private static PendingTaskCountJsonUnmarshaller instance;
public static PendingTaskCountJsonUnmarshaller getInstance() {
if (instance == null)
instance = new PendingTaskCountJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
webadvancedservicescom/magento | dev/tests/integration/testsuite/Magento/Core/Model/_files/design/frontend/Vendor/default/web/scripts.js | 108 | /**
* @copyright Copyright (c) 2014 X.commerce, Inc. (http://www.magentocommerce.com)
*/
/* scripts.js */
| apache-2.0 |
klu2/structurizr-java | structurizr-spring/test/unit/com/structurizr/analysis/SpringMvcControllerComponentFinderStrategyTests.java | 1425 | package com.structurizr.analysis;
import com.structurizr.Workspace;
import com.structurizr.model.Component;
import com.structurizr.model.Container;
import com.structurizr.model.Model;
import com.structurizr.model.SoftwareSystem;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class SpringMvcControllerComponentFinderStrategyTests {
@Test
public void test_findComponents_FindsSpringMvcControllers() throws Exception {
Workspace workspace = new Workspace("Name", "Description");
Model model = workspace.getModel();
SoftwareSystem softwareSystem = model.addSoftwareSystem("Name", "Description");
Container container = softwareSystem.addContainer("Name", "Description", "Technology");
ComponentFinder componentFinder = new ComponentFinder(
container,
"test.SpringMvcControllerComponentFinderStrategy",
new SpringMvcControllerComponentFinderStrategy()
);
componentFinder.findComponents();
assertEquals(1, container.getComponents().size());
Component component = container.getComponentWithName("SomeController");
assertEquals("test.SpringMvcControllerComponentFinderStrategy.SomeController", component.getType().getType());
assertEquals("", component.getDescription());
assertEquals("Spring MVC Controller", component.getTechnology());
}
}
| apache-2.0 |
fungku/netsuite-php | src/Classes/GetServerTimeRequest.php | 830 | <?php
/**
* This file is part of the SevenShores/NetSuite library
* AND originally from the NetSuite PHP Toolkit.
*
* New content:
* @package ryanwinchester/netsuite-php
* @copyright Copyright (c) Ryan Winchester
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache-2.0
* @link https://github.com/ryanwinchester/netsuite-php
*
* Original content:
* @copyright Copyright (c) NetSuite Inc.
* @license https://raw.githubusercontent.com/ryanwinchester/netsuite-php/master/original/NetSuite%20Application%20Developer%20License%20Agreement.txt
* @link http://www.netsuite.com/portal/developers/resources/suitetalk-sample-applications.shtml
*
* generated: 2019-06-12 10:27:00 AM PDT
*/
namespace NetSuite\Classes;
class GetServerTimeRequest {
static $paramtypesmap = array(
);
}
| apache-2.0 |
hinkleung/coolweather | app/src/main/java/com/example/hasee/coolweather/gson/Suggestion.java | 586 | package com.example.hasee.coolweather.gson;
import com.google.gson.annotations.SerializedName;
/**
* Created by hasee on 2017/4/22.
*/
public class Suggestion {
@SerializedName("comf")
public Comfort comfort;
@SerializedName("cw")
public CarWash carWash;
public Sport sport;
public class Comfort {
@SerializedName("txt")
public String info;
}
public class CarWash {
@SerializedName("txt")
public String info;
}
public class Sport {
@SerializedName("txt")
public String info;
}
}
| apache-2.0 |
radicalbit/ambari | ambari-server/src/test/java/org/apache/ambari/server/security/authorization/TestUsers.java | 15430 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.security.authorization;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNotSame;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.sql.SQLException;
import java.util.List;
import org.apache.ambari.server.AmbariException;
import org.apache.ambari.server.H2DatabaseCleaner;
import org.apache.ambari.server.orm.GuiceJpaInitializer;
import org.apache.ambari.server.orm.InMemoryDefaultTestModule;
import org.apache.ambari.server.orm.dao.GroupDAO;
import org.apache.ambari.server.orm.dao.PermissionDAO;
import org.apache.ambari.server.orm.dao.PrincipalDAO;
import org.apache.ambari.server.orm.dao.PrincipalTypeDAO;
import org.apache.ambari.server.orm.dao.ResourceDAO;
import org.apache.ambari.server.orm.dao.ResourceTypeDAO;
import org.apache.ambari.server.orm.dao.UserDAO;
import org.apache.ambari.server.orm.entities.PermissionEntity;
import org.apache.ambari.server.orm.entities.PrincipalEntity;
import org.apache.ambari.server.orm.entities.PrincipalTypeEntity;
import org.apache.ambari.server.orm.entities.ResourceEntity;
import org.apache.ambari.server.orm.entities.ResourceTypeEntity;
import org.apache.ambari.server.orm.entities.UserEntity;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mockito;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.crypto.password.PasswordEncoder;
import com.google.inject.Guice;
import com.google.inject.Inject;
import com.google.inject.Injector;
import junit.framework.Assert;
public class TestUsers {
private Injector injector;
@Inject
protected Users users;
@Inject
protected UserDAO userDAO;
@Inject
protected GroupDAO groupDAO;
@Inject
protected PermissionDAO permissionDAO;
@Inject
protected ResourceDAO resourceDAO;
@Inject
protected ResourceTypeDAO resourceTypeDAO;
@Inject
protected PrincipalTypeDAO principalTypeDAO;
@Inject
protected PrincipalDAO principalDAO;
@Inject
protected PasswordEncoder passwordEncoder;
@Before
public void setup() throws AmbariException {
InMemoryDefaultTestModule module = new InMemoryDefaultTestModule();
injector = Guice.createInjector(module);
injector.getInstance(GuiceJpaInitializer.class);
injector.injectMembers(this);
Authentication auth = new UsernamePasswordAuthenticationToken("admin", null);
SecurityContextHolder.getContext().setAuthentication(auth);
// create admin permission
ResourceTypeEntity resourceTypeEntity = new ResourceTypeEntity();
resourceTypeEntity.setId(ResourceType.AMBARI.getId());
resourceTypeEntity.setName(ResourceType.AMBARI.name());
resourceTypeDAO.create(resourceTypeEntity);
ResourceEntity resourceEntity = new ResourceEntity();
resourceEntity.setId(ResourceEntity.AMBARI_RESOURCE_ID);
resourceEntity.setResourceType(resourceTypeEntity);
resourceDAO.create(resourceEntity);
PrincipalTypeEntity principalTypeEntity = new PrincipalTypeEntity();
principalTypeEntity.setName("ROLE");
principalTypeEntity = principalTypeDAO.merge(principalTypeEntity);
PrincipalEntity principalEntity = new PrincipalEntity();
principalEntity.setPrincipalType(principalTypeEntity);
principalEntity = principalDAO.merge(principalEntity);
PermissionEntity adminPermissionEntity = new PermissionEntity();
adminPermissionEntity.setId(PermissionEntity.AMBARI_ADMINISTRATOR_PERMISSION);
adminPermissionEntity.setPermissionName(PermissionEntity.AMBARI_ADMINISTRATOR_PERMISSION_NAME);
adminPermissionEntity.setPrincipal(principalEntity);
adminPermissionEntity.setResourceType(resourceTypeEntity);
permissionDAO.create(adminPermissionEntity);
}
@After
public void tearDown() throws AmbariException, SQLException {
H2DatabaseCleaner.clearDatabaseAndStopPersistenceService(injector);
}
@Test
public void testIsUserCanBeRemoved() throws Exception {
users.createUser("admin", "admin", UserType.LOCAL, true, true);
users.createUser("admin222", "admin222", UserType.LOCAL, true, true);
Assert.assertTrue(users.isUserCanBeRemoved(userDAO.findUserByName("admin")));
Assert.assertTrue(users.isUserCanBeRemoved(userDAO.findUserByName("admin222")));
users.removeUser(users.getAnyUser("admin222"));
Assert.assertFalse(users.isUserCanBeRemoved(userDAO.findUserByName("admin")));
users.createUser("user", "user");
Assert.assertFalse(users.isUserCanBeRemoved(userDAO.findUserByName("admin")));
users.createUser("admin333", "admin333", UserType.LOCAL, true, true);
Assert.assertTrue(users.isUserCanBeRemoved(userDAO.findUserByName("admin")));
Assert.assertTrue(users.isUserCanBeRemoved(userDAO.findUserByName("admin333")));
}
@Test
public void testModifyPassword_UserByAdmin() throws Exception {
users.createUser("admin", "admin", UserType.LOCAL, true, true);
users.createUser("user", "user");
UserEntity userEntity = userDAO.findUserByName("user");
assertNotSame("user", userEntity.getUserPassword());
assertTrue(passwordEncoder.matches("user", userEntity.getUserPassword()));
users.modifyPassword("user", "admin", "user_new_password");
assertTrue(passwordEncoder.matches("user_new_password", userDAO.findUserByName("user").getUserPassword()));
}
@Test
public void testRevokeAdminPrivilege() throws Exception {
users.createUser("old_admin", "old_admin", UserType.LOCAL, true, true);
final User admin = users.getAnyUser("old_admin");
users.revokeAdminPrivilege(admin.getUserId());
Assert.assertFalse(users.getAnyUser("old_admin").isAdmin());
}
@Test
public void testGrantAdminPrivilege() throws Exception {
users.createUser("user", "user");
final User user = users.getAnyUser("user");
users.grantAdminPrivilege(user.getUserId());
Assert.assertTrue(users.getAnyUser("user").isAdmin());
}
@Test
public void testCreateGetRemoveUser() throws Exception {
users.createUser("user1", "user1");
users.createUser("user", "user", UserType.LOCAL, false, false);
users.createUser("user_ldap", "user_ldap", UserType.LDAP, true, true);
User createdUser = users.getUser("user", UserType.LOCAL);
User createdUser1 = users.getAnyUser("user1");
User createdLdapUser = users.getUser("user_ldap", UserType.LDAP);
Assert.assertEquals("user1", createdUser1.getUserName());
Assert.assertEquals(true, createdUser1.isActive());
Assert.assertEquals(false, createdUser1.isLdapUser());
Assert.assertEquals(false, createdUser1.isAdmin());
Assert.assertEquals("user", createdUser.getUserName());
Assert.assertEquals(false, createdUser.isActive());
Assert.assertEquals(false, createdUser.isLdapUser());
Assert.assertEquals(false, createdUser.isAdmin());
Assert.assertEquals("user_ldap", createdLdapUser.getUserName());
Assert.assertEquals(true, createdLdapUser.isActive());
Assert.assertEquals(true, createdLdapUser.isLdapUser());
Assert.assertEquals(true, createdLdapUser.isAdmin());
assertEquals("user", users.getAnyUser("user").getUserName());
assertEquals("user_ldap", users.getAnyUser("user_ldap").getUserName());
Assert.assertNull(users.getAnyUser("non_existing"));
// create duplicate user
try {
users.createUser("user1", "user1");
Assert.fail("It shouldn't be possible to create duplicate user");
} catch (AmbariException e) {
}
try {
users.createUser("USER1", "user1");
Assert.fail("It shouldn't be possible to create duplicate user");
} catch (AmbariException e) {
}
// test get all users
List<User> userList = users.getAllUsers();
Assert.assertEquals(3, userList.size());
// check get any user case insensitive
assertEquals("user", users.getAnyUser("USER").getUserName());
assertEquals("user_ldap", users.getAnyUser("USER_LDAP").getUserName());
Assert.assertNull(users.getAnyUser("non_existing"));
// get user by id
User userById = users.getUser(createdUser.getUserId());
assertNotNull(userById);
assertEquals(createdUser.getUserId(), userById.getUserId());
// get user by invalid id
User userByInvalidId = users.getUser(-1);
assertNull(userByInvalidId);
// get user if unique
Assert.assertNotNull(users.getUserIfUnique("user"));
//remove user
Assert.assertEquals(3, users.getAllUsers().size());
users.removeUser(users.getAnyUser("user1"));
Assert.assertNull(users.getAnyUser("user1"));
Assert.assertEquals(2, users.getAllUsers().size());
}
@Test
public void testSetUserActive() throws Exception {
users.createUser("user", "user");
users.setUserActive("user", false);
Assert.assertEquals(false, users.getAnyUser("user").isActive());
users.setUserActive("user", true);
Assert.assertEquals(true, users.getAnyUser("user").isActive());
try {
users.setUserActive("fake user", true);
Assert.fail("It shouldn't be possible to call setUserActive() on non-existing user");
} catch (Exception ex) {
}
}
@Test
public void testSetUserLdap() throws Exception {
users.createUser("user", "user");
users.createUser("user_ldap", "user_ldap", UserType.LDAP, true, false);
users.setUserLdap("user");
Assert.assertEquals(true, users.getAnyUser("user").isLdapUser());
try {
users.setUserLdap("fake user");
Assert.fail("It shouldn't be possible to call setUserLdap() on non-existing user");
} catch (AmbariException ex) {
}
}
@Test
public void testSetGroupLdap() throws Exception {
users.createGroup("group", GroupType.LOCAL);
users.setGroupLdap("group");
Assert.assertNotNull(users.getGroup("group"));
Assert.assertTrue(users.getGroup("group").isLdapGroup());
try {
users.setGroupLdap("fake group");
Assert.fail("It shouldn't be possible to call setGroupLdap() on non-existing group");
} catch (AmbariException ex) {
}
}
@Test
public void testCreateGetRemoveGroup() throws Exception {
final String groupName = "engineering1";
final String groupName2 = "engineering2";
users.createGroup(groupName, GroupType.LOCAL);
users.createGroup(groupName2, GroupType.LOCAL);
final Group group = users.getGroup(groupName);
assertNotNull(group);
assertEquals(false, group.isLdapGroup());
assertEquals(groupName, group.getGroupName());
assertNotNull(groupDAO.findGroupByName(groupName));
// get all groups
final List<Group> groupList = users.getAllGroups();
assertEquals(2, groupList.size());
assertEquals(2, groupDAO.findAll().size());
// remove group
users.removeGroup(group);
assertNull(users.getGroup(group.getGroupName()));
assertEquals(1, users.getAllGroups().size());
}
@Test
public void testMembers() throws Exception {
final String groupName = "engineering";
final String groupName2 = "engineering2";
users.createGroup(groupName, GroupType.LOCAL);
users.createGroup(groupName2, GroupType.LOCAL);
users.createUser("user1", "user1");
users.createUser("user2", "user2");
users.createUser("user3", "user3");
users.addMemberToGroup(groupName, "user1");
users.addMemberToGroup(groupName, "user2");
assertEquals(2, users.getAllMembers(groupName).size());
assertEquals(0, users.getAllMembers(groupName2).size());
try {
users.getAllMembers("non existing");
Assert.fail("It shouldn't be possible to call getAllMembers() on non-existing group");
} catch (Exception ex) {
}
// get members from not unexisting group
assertEquals(users.getGroupMembers("unexisting"), null);
// remove member from group
users.removeMemberFromGroup(groupName, "user1");
assertEquals(1, groupDAO.findGroupByName(groupName).getMemberEntities().size());
assertEquals("user2", groupDAO.findGroupByName(groupName).getMemberEntities().iterator().next().getUser().getUserName());
}
@Test
public void testModifyPassword_UserByHimselfPasswordOk() throws Exception {
Authentication auth = new UsernamePasswordAuthenticationToken("user", null);
SecurityContextHolder.getContext().setAuthentication(auth);
users.createUser("user", "user");
UserEntity userEntity = userDAO.findUserByName("user");
assertNotSame("user", userEntity.getUserPassword());
assertTrue(passwordEncoder.matches("user", userEntity.getUserPassword()));
users.modifyPassword("user", "user", "user_new_password");
assertTrue(passwordEncoder.matches("user_new_password", userDAO.findUserByName("user").getUserPassword()));
}
@Test
public void testModifyPassword_UserByHimselfPasswordNotOk() throws Exception {
Authentication auth = new UsernamePasswordAuthenticationToken("user", null);
SecurityContextHolder.getContext().setAuthentication(auth);
users.createUser("user", "user");
UserEntity userEntity = userDAO.findUserByName("user");
assertNotSame("user", userEntity.getUserPassword());
assertTrue(passwordEncoder.matches("user", userEntity.getUserPassword()));
try {
users.modifyPassword("user", "admin", "user_new_password");
Assert.fail("Exception should be thrown here as password is incorrect");
} catch (AmbariException ex) {
}
}
@Test
public void testModifyPassword_UserByNonAdmin() throws Exception {
Authentication auth = new UsernamePasswordAuthenticationToken("user2", null);
SecurityContextHolder.getContext().setAuthentication(auth);
users.createUser("user", "user");
users.createUser("user2", "user2");
UserEntity userEntity = userDAO.findUserByName("user");
assertNotSame("user", userEntity.getUserPassword());
assertTrue(passwordEncoder.matches("user", userEntity.getUserPassword()));
try {
users.modifyPassword("user", "user2", "user_new_password");
Assert.fail("Exception should be thrown here as user2 can't change password of user");
} catch (AmbariException ex) {
}
}
@Test
@Ignore // TODO @Transactional annotation breaks this test
public void testCreateUserDefaultParams() throws Exception {
final Users spy = Mockito.spy(users);
spy.createUser("user", "user");
Mockito.verify(spy).createUser("user", "user", UserType.LOCAL, true, false);
}
}
| apache-2.0 |
SAP/openui5 | src/sap.m/test/sap/m/demokit/sample/RadioButtonGroup/Component.js | 207 | sap.ui.define([
"sap/ui/core/UIComponent"
], function (UIComponent) {
"use strict";
return UIComponent.extend("sap.m.sample.RadioButtonGroup.Component", {
metadata: {
manifest: "json"
}
});
}); | apache-2.0 |
jloisel/quiz | src/main/java/io/teamed/quizz/package-info.java | 106 | /**
* Readable and Writeable Text implementations.
*
* @author jerome
*
*/
package io.teamed.quizz;
| apache-2.0 |