code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
config = {
"interfaces": {
"google.bigtable.v2.Bigtable": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
"non_idempotent": [],
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 20000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 20000,
"total_timeout_millis": 600000,
},
"streaming": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 20000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 20000,
"total_timeout_millis": 3600000,
},
},
"methods": {
"ReadRows": {
"timeout_millis": 3600000,
"retry_codes_name": "idempotent",
"retry_params_name": "streaming",
},
"SampleRowKeys": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"MutateRow": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"MutateRows": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"CheckAndMutateRow": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ReadModifyWriteRow": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
},
}
}
}
| dhermes/google-cloud-python | bigtable/google/cloud/bigtable_v2/gapic/bigtable_client_config.py | Python | apache-2.0 | 2,407 |
/*******************************************************************************
* Copyright 2013
* Ubiquitous Knowledge Processing (UKP) Lab
* Technische Universität Darmstadt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
/**
* Contains components which are fundamental for all similarity measures.
*/
package dkpro.similarity.algorithms; | TitasNandi/Summer_Project | dkpro-similarity-master/dkpro-similarity-algorithms-core-asl/src/main/java/dkpro/similarity/algorithms/package-info.java | Java | apache-2.0 | 937 |
package org.apereo.cas.config;
import org.apereo.cas.authentication.AuthenticationServiceSelectionPlan;
import org.apereo.cas.configuration.CasConfigurationProperties;
import org.apereo.cas.consent.ConsentEngine;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.web.flow.CasWebflowConfigurer;
import org.apereo.cas.web.flow.CasWebflowExecutionPlan;
import org.apereo.cas.web.flow.CasWebflowExecutionPlanConfigurer;
import org.apereo.cas.web.flow.CheckConsentRequiredAction;
import org.apereo.cas.web.flow.ConfirmConsentAction;
import org.apereo.cas.web.flow.ConsentWebflowConfigurer;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.webflow.definition.registry.FlowDefinitionRegistry;
import org.springframework.webflow.engine.builder.support.FlowBuilderServices;
import org.springframework.webflow.execution.Action;
/**
* This is {@link CasConsentWebflowConfiguration}.
*
* @author Misagh Moayyed
* @since 5.1.0
*/
@Configuration("casConsentWebflowConfiguration")
@EnableConfigurationProperties(CasConfigurationProperties.class)
@ConditionalOnBean(name = "consentRepository")
public class CasConsentWebflowConfiguration implements CasWebflowExecutionPlanConfigurer {
@Autowired
@Qualifier("loginFlowRegistry")
private FlowDefinitionRegistry loginFlowDefinitionRegistry;
@Autowired
private FlowBuilderServices flowBuilderServices;
@Autowired
@Qualifier("authenticationServiceSelectionPlan")
private AuthenticationServiceSelectionPlan authenticationRequestServiceSelectionStrategies;
@Autowired
@Qualifier("consentEngine")
private ConsentEngine consentEngine;
@Autowired
private ApplicationContext applicationContext;
@Autowired
@Qualifier("servicesManager")
private ServicesManager servicesManager;
@Autowired
private CasConfigurationProperties casProperties;
@ConditionalOnMissingBean(name = "checkConsentRequiredAction")
@Bean
public Action checkConsentRequiredAction() {
return new CheckConsentRequiredAction(servicesManager,
authenticationRequestServiceSelectionStrategies, consentEngine, casProperties);
}
@ConditionalOnMissingBean(name = "confirmConsentAction")
@Bean
public Action confirmConsentAction() {
return new ConfirmConsentAction(servicesManager,
authenticationRequestServiceSelectionStrategies, consentEngine, casProperties);
}
@ConditionalOnMissingBean(name = "consentWebflowConfigurer")
@Bean
@DependsOn("defaultWebflowConfigurer")
public CasWebflowConfigurer consentWebflowConfigurer() {
return new ConsentWebflowConfigurer(flowBuilderServices, loginFlowDefinitionRegistry,
applicationContext, casProperties);
}
@Override
public void configureWebflowExecutionPlan(final CasWebflowExecutionPlan plan) {
plan.registerWebflowConfigurer(consentWebflowConfigurer());
}
}
| tduehr/cas | support/cas-server-support-consent-webflow/src/main/java/org/apereo/cas/config/CasConsentWebflowConfiguration.java | Java | apache-2.0 | 3,498 |
/* The copyright in this software is being made available under the BSD
* License, included below. This software may be subject to other third party
* and contributor rights, including patent rights, and no such rights are
* granted under this license.
*
* Copyright (c) 2010-2012, ITU/ISO/IEC
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the ITU/ISO/IEC nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
#include <vector>
#include <algorithm>
#include <ostream>
#include "NALread.h"
#include "TLibCommon/NAL.h"
#include "TLibCommon/TComBitStream.h"
using namespace std;
//! \ingroup TLibDecoder
//! \{
static void convertPayloadToRBSP(vector<uint8_t>& nalUnitBuf, TComInputBitstream *pcBitstream)
{
unsigned zeroCount = 0;
vector<uint8_t>::iterator it_read, it_write;
UInt *auiStoredTileMarkerLocation = new UInt[MAX_MARKER_PER_NALU];
// Remove tile markers and note the bitstream location
for (it_read = it_write = nalUnitBuf.begin(); it_read != nalUnitBuf.end(); it_read++ )
{
Bool bTileMarkerFound = false;
if ( ( it_read - nalUnitBuf.begin() ) < ( nalUnitBuf.size() - 2 ) )
{
if ( (*(it_read) == 0x00) && (*(it_read+1) == 0x00) && (*(it_read+2) == 0x02) ) // tile marker detected
{
it_read += 2;
UInt uiDistance = (UInt) (it_write - nalUnitBuf.begin());
UInt uiCount = pcBitstream->getTileMarkerLocationCount();
bTileMarkerFound = true;
pcBitstream->setTileMarkerLocation( uiCount, uiDistance );
auiStoredTileMarkerLocation[uiCount] = uiDistance;
pcBitstream->setTileMarkerLocationCount( uiCount + 1 );
}
}
if (!bTileMarkerFound)
{
*it_write = *it_read;
it_write++;
}
}
nalUnitBuf.resize(it_write - nalUnitBuf.begin());
for (it_read = it_write = nalUnitBuf.begin(); it_read != nalUnitBuf.end(); it_read++, it_write++)
{
if (zeroCount == 2 && *it_read == 0x03)
{
// update tile marker location
UInt uiDistance = (UInt) (it_read - nalUnitBuf.begin());
for (UInt uiIdx=0; uiIdx<pcBitstream->getTileMarkerLocationCount(); uiIdx++)
{
if (auiStoredTileMarkerLocation[ uiIdx ] >= uiDistance)
{
pcBitstream->setTileMarkerLocation( uiIdx, pcBitstream->getTileMarkerLocation( uiIdx )-1 );
}
}
it_read++;
zeroCount = 0;
}
zeroCount = (*it_read == 0x00) ? zeroCount+1 : 0;
*it_write = *it_read;
}
nalUnitBuf.resize(it_write - nalUnitBuf.begin());
delete [] auiStoredTileMarkerLocation;
}
/**
* create a NALunit structure with given header values and storage for
* a bitstream
*/
void read(InputNALUnit& nalu, vector<uint8_t>& nalUnitBuf)
{
/* perform anti-emulation prevention */
TComInputBitstream *pcBitstream = new TComInputBitstream(NULL);
convertPayloadToRBSP(nalUnitBuf, pcBitstream);
nalu.m_Bitstream = new TComInputBitstream(&nalUnitBuf);
// copy the tile marker location information
nalu.m_Bitstream->setTileMarkerLocationCount( pcBitstream->getTileMarkerLocationCount() );
for (UInt uiIdx=0; uiIdx < nalu.m_Bitstream->getTileMarkerLocationCount(); uiIdx++)
{
nalu.m_Bitstream->setTileMarkerLocation( uiIdx, pcBitstream->getTileMarkerLocation(uiIdx) );
}
delete pcBitstream;
TComInputBitstream& bs = *nalu.m_Bitstream;
bool forbidden_zero_bit = bs.read(1);
assert(forbidden_zero_bit == 0);
nalu.m_RefIDC = (NalRefIdc) bs.read(2);
nalu.m_UnitType = (NalUnitType) bs.read(5);
switch (nalu.m_UnitType)
{
case NAL_UNIT_CODED_SLICE:
case NAL_UNIT_CODED_SLICE_IDR:
case NAL_UNIT_CODED_SLICE_CDR:
{
nalu.m_TemporalID = bs.read(3);
nalu.m_OutputFlag = bs.read(1);
unsigned reserved_one_4bits = bs.read(4);
assert(reserved_one_4bits == 1);
}
break;
default:
nalu.m_TemporalID = 0;
nalu.m_OutputFlag = true;
break;
}
}
//! \}
| lheric/GitlHEVCAnalyzer | appgitlhevcdecoder/HM-5.2/source/Lib/TLibDecoder/NALread.cpp | C++ | apache-2.0 | 5,455 |
/*
* Copyright 2017-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.openstacknetworking.impl;
import com.google.common.collect.ImmutableSet;
import org.apache.felix.scr.annotations.Activate;
import org.apache.felix.scr.annotations.Component;
import org.apache.felix.scr.annotations.Deactivate;
import org.apache.felix.scr.annotations.Reference;
import org.apache.felix.scr.annotations.ReferenceCardinality;
import org.apache.felix.scr.annotations.Service;
import org.onlab.packet.IpAddress;
import org.onlab.packet.MacAddress;
import org.onlab.util.Tools;
import org.onosproject.event.ListenerRegistry;
import org.onosproject.net.Host;
import org.onosproject.net.HostId;
import org.onosproject.net.host.HostEvent;
import org.onosproject.net.host.HostListener;
import org.onosproject.net.host.HostService;
import org.onosproject.openstacknetworking.api.InstancePort;
import org.onosproject.openstacknetworking.api.InstancePortEvent;
import org.onosproject.openstacknetworking.api.InstancePortListener;
import org.onosproject.openstacknetworking.api.InstancePortService;
import org.slf4j.Logger;
import java.util.Set;
import java.util.stream.Collectors;
import static org.onosproject.openstacknetworking.api.InstancePortEvent.Type.OPENSTACK_INSTANCE_PORT_DETECTED;
import static org.onosproject.openstacknetworking.api.InstancePortEvent.Type.OPENSTACK_INSTANCE_PORT_UPDATED;
import static org.onosproject.openstacknetworking.api.InstancePortEvent.Type.OPENSTACK_INSTANCE_PORT_VANISHED;
import static org.onosproject.openstacknetworking.impl.HostBasedInstancePort.ANNOTATION_NETWORK_ID;
import static org.onosproject.openstacknetworking.impl.HostBasedInstancePort.ANNOTATION_PORT_ID;
import static org.slf4j.LoggerFactory.getLogger;
/**
* Provides implementation of administering and interfacing host based instance ports.
* It also provides instance port events for the hosts mapped to OpenStack VM interface.
*/
@Service
@Component(immediate = true)
public class HostBasedInstancePortManager
extends ListenerRegistry<InstancePortEvent, InstancePortListener>
implements InstancePortService {
protected final Logger log = getLogger(getClass());
@Reference(cardinality = ReferenceCardinality.MANDATORY_UNARY)
protected HostService hostService;
private final HostListener hostListener = new InternalHostListener();
@Activate
protected void activate() {
hostService.addListener(hostListener);
log.info("Started");
}
@Deactivate
protected void deactivate() {
hostService.removeListener(hostListener);
log.info("Stopped");
}
@Override
public InstancePort instancePort(MacAddress macAddress) {
Host host = hostService.getHost(HostId.hostId(macAddress));
if (host == null || !isValidHost(host)) {
return null;
}
return HostBasedInstancePort.of(host);
}
@Override
public InstancePort instancePort(IpAddress ipAddress, String osNetId) {
return Tools.stream(hostService.getHosts()).filter(this::isValidHost)
.map(HostBasedInstancePort::of)
.filter(instPort -> instPort.networkId().equals(osNetId))
.filter(instPort -> instPort.ipAddress().equals(ipAddress))
.findAny().orElse(null);
}
@Override
public InstancePort instancePort(String osPortId) {
return Tools.stream(hostService.getHosts()).filter(this::isValidHost)
.map(HostBasedInstancePort::of)
.filter(instPort -> instPort.portId().equals(osPortId))
.findAny().orElse(null);
}
@Override
public Set<InstancePort> instancePorts() {
Set<InstancePort> instPors = Tools.stream(hostService.getHosts())
.filter(this::isValidHost)
.map(HostBasedInstancePort::of)
.collect(Collectors.toSet());
return ImmutableSet.copyOf(instPors);
}
@Override
public Set<InstancePort> instancePorts(String osNetId) {
Set<InstancePort> instPors = Tools.stream(hostService.getHosts())
.filter(this::isValidHost)
.map(HostBasedInstancePort::of)
.filter(instPort -> instPort.networkId().equals(osNetId))
.collect(Collectors.toSet());
return ImmutableSet.copyOf(instPors);
}
private boolean isValidHost(Host host) {
return !host.ipAddresses().isEmpty() &&
host.annotations().value(ANNOTATION_NETWORK_ID) != null &&
host.annotations().value(ANNOTATION_PORT_ID) != null;
}
private class InternalHostListener implements HostListener {
@Override
public boolean isRelevant(HostEvent event) {
Host host = event.subject();
if (!isValidHost(host)) {
log.debug("Invalid host detected, ignore it {}", host);
return false;
}
return true;
}
@Override
public void event(HostEvent event) {
InstancePort instPort = HostBasedInstancePort.of(event.subject());
InstancePortEvent instPortEvent;
switch (event.type()) {
case HOST_UPDATED:
instPortEvent = new InstancePortEvent(
OPENSTACK_INSTANCE_PORT_UPDATED,
instPort);
log.debug("Instance port is updated: {}", instPort);
process(instPortEvent);
break;
case HOST_ADDED:
instPortEvent = new InstancePortEvent(
OPENSTACK_INSTANCE_PORT_DETECTED,
instPort);
log.debug("Instance port is detected: {}", instPort);
process(instPortEvent);
break;
case HOST_REMOVED:
instPortEvent = new InstancePortEvent(
OPENSTACK_INSTANCE_PORT_VANISHED,
instPort);
log.debug("Instance port is disabled: {}", instPort);
process(instPortEvent);
break;
default:
break;
}
}
}
}
| sdnwiselab/onos | apps/openstacknetworking/src/main/java/org/onosproject/openstacknetworking/impl/HostBasedInstancePortManager.java | Java | apache-2.0 | 6,879 |
package com.deliveredtechnologies.rulebook.runner.test.rulebooks;
/**
* Sample POJO rule with no annotations whatsoever.
*/
public class SampleRuleWithoutRuleAnnotation {
}
| Clayton7510/RuleBook | rulebook-core/src/test/java/com/deliveredtechnologies/rulebook/runner/test/rulebooks/SampleRuleWithoutRuleAnnotation.java | Java | apache-2.0 | 176 |
package play.curator.lock;
import java.util.concurrent.TimeUnit;
import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.framework.recipes.locks.InterProcessLock;
import org.apache.curator.framework.recipes.locks.InterProcessSemaphoreMutex;
import org.apache.curator.retry.ExponentialBackoffRetry;
public class LockClient {
public static final String NAMESPACE = "test/locks";
public static final String LOCK_NODE = "lockClient1";
/**
* Starts a process to test locking across Zookeeper using Curator
* @param args hostport waitTime processTime
* @throws Exception
*/
public static void main(String[] args)
throws Exception
{
new LockClient(args[0], Long.parseLong(args[1]), Long.parseLong(args[2])).run();
}
private String connectionString;
private long waitTime;
private long processTime;
public LockClient(String connectionString, long waitTime, long processTime) {
this.connectionString = connectionString;
this.waitTime = waitTime;
this.processTime = processTime;
}
public void run()
throws Exception
{
CuratorFramework framework = null;
try {
framework = CuratorFrameworkFactory.builder()
.connectString(connectionString)
.connectionTimeoutMs(3000)
.namespace(NAMESPACE)
.retryPolicy(new ExponentialBackoffRetry(1000, 3))
.build();
framework.start();
InterProcessLock lock = new InterProcessSemaphoreMutex(framework, LOCK_NODE);
System.out.println("Locking...");
if(lock.acquire(waitTime, TimeUnit.MILLISECONDS)) {
try {
System.out.println("Aquired Lock!");
System.out.println("Beginning 'Processing' ...");
Thread.sleep(processTime);
System.out.println("'Processing' finished...");
} finally {
lock.release();
System.out.println("Lock Released");
}
} else {
System.out.println("Could not aquire lock. Exiting...");
}
} finally {
if(framework != null) {
framework.close();
}
}
}
}
| jbaiera/zookeeper-play | src/main/java/play/curator/lock/LockClient.java | Java | apache-2.0 | 2,027 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.sql.parser.mysql.parser;
import org.apache.shardingsphere.sql.parser.api.parser.SQLLexer;
import org.apache.shardingsphere.sql.parser.api.parser.SQLParser;
import org.apache.shardingsphere.sql.parser.spi.DatabaseTypedSQLParserFacade;
/**
* SQL parser facade for MySQL.
*/
public final class MySQLParserFacade implements DatabaseTypedSQLParserFacade {
@Override
public Class<? extends SQLLexer> getLexerClass() {
return MySQLLexer.class;
}
@Override
public Class<? extends SQLParser> getParserClass() {
return MySQLParser.class;
}
@Override
public String getDatabaseType() {
return "MySQL";
}
}
| apache/incubator-shardingsphere | shardingsphere-sql-parser/shardingsphere-sql-parser-dialect/shardingsphere-sql-parser-mysql/src/main/java/org/apache/shardingsphere/sql/parser/mysql/parser/MySQLParserFacade.java | Java | apache-2.0 | 1,510 |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
using System;
using System.Management.Automation;
using System.Net;
using Microsoft.Azure.Common.Authentication.Models;
using Microsoft.WindowsAzure.Commands.ServiceManagement.Extensions;
using Microsoft.WindowsAzure.Commands.ServiceManagement.Helpers;
using Microsoft.WindowsAzure.Commands.ServiceManagement.Properties;
using Microsoft.WindowsAzure.Commands.Utilities.Common;
using Microsoft.WindowsAzure.Management.Compute.Models;
using Microsoft.WindowsAzure.Management.Compute;
using Hyak.Common;
namespace Microsoft.WindowsAzure.Commands.ServiceManagement.HostedServices
{
/// <summary>
/// Update deployment configuration, upgrade or status
/// </summary>
[Cmdlet(VerbsCommon.Set, "AzureDeployment"), OutputType(typeof(ManagementOperationContext))]
public class SetAzureDeploymentCommand : ServiceManagementBaseCmdlet
{
[Parameter(Position = 0, Mandatory = true, ParameterSetName = "Upgrade", HelpMessage = "Upgrade Deployment")]
public SwitchParameter Upgrade
{
get;
set;
}
[Parameter(Position = 0, Mandatory = true, ParameterSetName = "Config", HelpMessage = "Change Configuration of Deployment")]
public SwitchParameter Config
{
get;
set;
}
[Parameter(Position = 0, Mandatory = true, ParameterSetName = "Status", HelpMessage = "Change Status of Deployment")]
public SwitchParameter Status
{
get;
set;
}
[Parameter(Position = 1, Mandatory = true, ParameterSetName = "Upgrade", ValueFromPipelineByPropertyName = true, HelpMessage = "Service name")]
[Parameter(Position = 1, Mandatory = true, ParameterSetName = "Config", ValueFromPipelineByPropertyName = true, HelpMessage = "Service name")]
[Parameter(Position = 1, Mandatory = true, ParameterSetName = "Status", ValueFromPipelineByPropertyName = true, HelpMessage = "Service name")]
[ValidateNotNullOrEmpty]
public string ServiceName
{
get;
set;
}
[Parameter(Position = 2, Mandatory = true, ParameterSetName = "Upgrade", HelpMessage = "Package location. This parameter should have the local file path or URI to a .cspkg in blob storage whose storage account is part of the same subscription/project.")]
[ValidateNotNullOrEmpty]
public string Package
{
get;
set;
}
[Parameter(Position = 2, Mandatory = true, ParameterSetName = "Config", HelpMessage = "Configuration file path. This parameter should specifiy a .cscfg file on disk.")]
[Parameter(Position = 3, Mandatory = true, ParameterSetName = "Upgrade", HelpMessage = "Configuration file path. This parameter should specifiy a .cscfg file on disk.")]
[ValidateNotNullOrEmpty]
public string Configuration
{
get;
set;
}
[Parameter(Position = 4, Mandatory = true, ParameterSetName = "Upgrade", ValueFromPipelineByPropertyName = true, HelpMessage = "Deployment slot. Staging | Production")]
[Parameter(Position = 3, Mandatory = true, ParameterSetName = "Config", ValueFromPipelineByPropertyName = true, HelpMessage = "Deployment slot. Staging | Production")]
[Parameter(Position = 2, Mandatory = true, ParameterSetName = "Status", ValueFromPipelineByPropertyName = true, HelpMessage = "Deployment slot. Staging | Production")]
[ValidateSet(Model.DeploymentSlotType.Staging, Model.DeploymentSlotType.Production, IgnoreCase = true)]
public string Slot
{
get;
set;
}
[Parameter(Position = 5, ParameterSetName = "Upgrade", HelpMessage = "Upgrade mode. Auto | Manual | Simultaneous")]
[ValidateSet(Model.UpgradeType.Auto, Model.UpgradeType.Manual, Model.UpgradeType.Simultaneous, IgnoreCase = true)]
public string Mode
{
get;
set;
}
[Parameter(Position = 6, Mandatory = false, ParameterSetName = "Upgrade", HelpMessage = "Label name for the new deployment. Default: <Service Name> + <date time>")]
[ValidateNotNullOrEmpty]
public string Label
{
get;
set;
}
[Parameter(Position = 7, Mandatory = false, ParameterSetName = "Upgrade", HelpMessage = "Name of role to upgrade.")]
public string RoleName
{
get;
set;
}
[Parameter(Position = 8, Mandatory = false, ParameterSetName = "Upgrade", HelpMessage = "Force upgrade.")]
public SwitchParameter Force
{
get;
set;
}
[Parameter(Position = 3, Mandatory = true, ParameterSetName = "Status", HelpMessage = "New deployment status. Running | Suspended")]
[ValidateSet(Model.DeploymentStatus.Running, Model.DeploymentStatus.Suspended, IgnoreCase = true)]
public string NewStatus
{
get;
set;
}
[Parameter(Position = 9, ValueFromPipelineByPropertyName = true, Mandatory = false, ParameterSetName = "Upgrade", HelpMessage = "Extension configurations.")]
[Parameter(Position = 4, ValueFromPipelineByPropertyName = true, Mandatory = false, ParameterSetName = "Config", HelpMessage = "HelpMessage")]
public ExtensionConfigurationInput[] ExtensionConfiguration
{
get;
set;
}
public void ExecuteCommand()
{
string configString = string.Empty;
if (!string.IsNullOrEmpty(Configuration))
{
configString = GeneralUtilities.GetConfiguration(Configuration);
}
ExtensionConfiguration extConfig = null;
if (ExtensionConfiguration != null)
{
string errorConfigInput = null;
if (!ExtensionManager.Validate(ExtensionConfiguration, out errorConfigInput))
{
throw new Exception(string.Format(Resources.ServiceExtensionCannotApplyExtensionsInSameType, errorConfigInput));
}
foreach (ExtensionConfigurationInput context in ExtensionConfiguration)
{
if (context != null && context.X509Certificate != null)
{
ExecuteClientActionNewSM(
null,
string.Format(Resources.ServiceExtensionUploadingCertificate, CommandRuntime, context.X509Certificate.Thumbprint),
() => this.ComputeClient.ServiceCertificates.Create(this.ServiceName, CertUtilsNewSM.Create(context.X509Certificate)));
}
}
Func<DeploymentSlot, DeploymentGetResponse> func = t =>
{
DeploymentGetResponse d = null;
try
{
d = this.ComputeClient.Deployments.GetBySlot(this.ServiceName, t);
}
catch (CloudException ex)
{
if (ex.Response.StatusCode != HttpStatusCode.NotFound && IsVerbose() == false)
{
this.WriteExceptionDetails(ex);
}
}
return d;
};
var slotType = (DeploymentSlot)Enum.Parse(typeof(DeploymentSlot), this.Slot, true);
DeploymentGetResponse currentDeployment = null;
InvokeInOperationContext(() => currentDeployment = func(slotType));
var peerSlottype = slotType == DeploymentSlot.Production ? DeploymentSlot.Staging : DeploymentSlot.Production;
DeploymentGetResponse peerDeployment = null;
InvokeInOperationContext(() => peerDeployment = func(peerSlottype));
ExtensionManager extensionMgr = new ExtensionManager(this, ServiceName);
extConfig = extensionMgr.Add(currentDeployment, peerDeployment, ExtensionConfiguration, this.Slot);
}
// Upgrade Parameter Set
if (string.Compare(ParameterSetName, "Upgrade", StringComparison.OrdinalIgnoreCase) == 0)
{
bool removePackage = false;
var storageName = Profile.Context.Subscription.GetProperty(AzureSubscription.Property.StorageAccount);
Uri packageUrl = null;
if (Package.StartsWith(Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase) ||
Package.StartsWith(Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))
{
packageUrl = new Uri(Package);
}
else
{
if (string.IsNullOrEmpty(storageName))
{
throw new ArgumentException(Resources.CurrentStorageAccountIsNotSet);
}
var progress = new ProgressRecord(0, Resources.WaitForUploadingPackage, Resources.UploadingPackage);
WriteProgress(progress);
removePackage = true;
InvokeInOperationContext(() => packageUrl = RetryCall(s => AzureBlob.UploadPackageToBlob(this.StorageClient, storageName, Package, null)));
}
DeploymentUpgradeMode upgradeMode;
if (!Enum.TryParse<DeploymentUpgradeMode>(Mode, out upgradeMode))
{
upgradeMode = DeploymentUpgradeMode.Auto;
}
var upgradeDeploymentInput = new DeploymentUpgradeParameters
{
Mode = upgradeMode,
Configuration = configString,
ExtensionConfiguration = extConfig,
PackageUri = packageUrl,
Label = Label ?? ServiceName,
Force = Force.IsPresent
};
if (!string.IsNullOrEmpty(RoleName))
{
upgradeDeploymentInput.RoleToUpgrade = RoleName;
}
InvokeInOperationContext(() =>
{
try
{
ExecuteClientActionNewSM(
upgradeDeploymentInput,
CommandRuntime.ToString(),
() => this.ComputeClient.Deployments.UpgradeBySlot(
this.ServiceName,
(DeploymentSlot)Enum.Parse(typeof(DeploymentSlot), this.Slot, true),
upgradeDeploymentInput));
if (removePackage == true)
{
this.RetryCall(s =>
AzureBlob.DeletePackageFromBlob(
this.StorageClient,
storageName,
packageUrl));
}
}
catch (CloudException ex)
{
this.WriteExceptionDetails(ex);
}
});
}
else if (string.Compare(this.ParameterSetName, "Config", StringComparison.OrdinalIgnoreCase) == 0)
{
// Config parameter set
var changeDeploymentStatusParams = new DeploymentChangeConfigurationParameters
{
Configuration = configString,
ExtensionConfiguration = extConfig
};
ExecuteClientActionNewSM(
changeDeploymentStatusParams,
CommandRuntime.ToString(),
() => this.ComputeClient.Deployments.ChangeConfigurationBySlot(
this.ServiceName,
(DeploymentSlot)Enum.Parse(typeof(DeploymentSlot), this.Slot, true),
changeDeploymentStatusParams));
}
else
{
// Status parameter set
var updateDeploymentStatusParams = new DeploymentUpdateStatusParameters
{
Status = (UpdatedDeploymentStatus)Enum.Parse(typeof(UpdatedDeploymentStatus), this.NewStatus, true)
};
ExecuteClientActionNewSM(
null,
CommandRuntime.ToString(),
() => this.ComputeClient.Deployments.UpdateStatusByDeploymentSlot(
this.ServiceName,
(DeploymentSlot)Enum.Parse(typeof(DeploymentSlot), this.Slot, true),
updateDeploymentStatusParams));
}
}
protected override void OnProcessRecord()
{
ServiceManagementProfile.Initialize();
this.ExecuteCommand();
}
}
}
| praveennet/azure-powershell | src/ServiceManagement/Compute/Commands.ServiceManagement/HostedServices/SetAzureDeployment.cs | C# | apache-2.0 | 13,839 |
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
module Puppet::Parser::Functions
newfunction(:hdp_host, :type => :rvalue) do |args|
args = function_hdp_args_as_array(args)
var = args[0]
val = lookupvar("::"+var)
function_hdp_is_empty(val) ? "" : val
end
end
| telefonicaid/fiware-cosmos-ambari | ambari-agent/src/main/puppet/modules/hdp/lib/puppet/parser/functions/hdp_host.rb | Ruby | apache-2.0 | 1,024 |
/*
* Copyright 2015-2025 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package sockslib.common;
import java.security.Principal;
/**
* The class <code>Credentials</code> represents a credentials.
*
* @author Youchao Feng
* @version 1.0
* @date May 14, 2015 2:35:26 PM
*/
public interface Credentials {
/**
* Returns principal.
*
* @return principal.
*/
Principal getUserPrincipal();
/**
* Returns password.
*
* @return password.
*/
String getPassword();
}
| fengyouchao/fucksocks | src/main/java/sockslib/common/Credentials.java | Java | apache-2.0 | 1,041 |
/*
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
using System;
using System.IO;
using System.Threading;
using System.Web;
using System.Web.Configuration;
using System.Web.Security;
using System.Xml;
using DotNetCasClient.Configuration;
using DotNetCasClient.Logging;
using DotNetCasClient.Security;
using DotNetCasClient.State;
using DotNetCasClient.Utils;
using DotNetCasClient.Validation;
using DotNetCasClient.Validation.Schema.Cas20;
using DotNetCasClient.Validation.TicketValidator;
using System.Collections.Generic;
namespace DotNetCasClient
{
/// <summary>
/// CasAuthentication exposes a public API for use in working with CAS Authentication
/// in the .NET framework. It also exposes all configured CAS client configuration
/// parameters as public static properties.
/// </summary>
/// <author>Marvin S. Addison</author>
/// <author>Scott Holodak</author>
/// <author>William G. Thompson, Jr.</author>
/// <author>Catherine D. Winfrey</author>
public sealed class CasAuthentication
{
#region Constants
private const string XML_SESSION_INDEX_ELEMENT_NAME = "samlp:SessionIndex";
private const string PARAM_PROXY_GRANTING_TICKET_IOU = "pgtIou";
private const string PARAM_PROXY_GRANTING_TICKET = "pgtId";
#endregion
#region Fields
// Loggers
private static readonly Logger configLogger = new Logger(Category.Config);
private static readonly Logger protoLogger = new Logger(Category.Protocol);
private static readonly Logger securityLogger = new Logger(Category.Security);
// Thread-safe initialization
private static readonly object LockObject;
private static bool initialized;
// System.Web/Authentication and System.Web/Authentication/Forms static classes
internal static AuthenticationSection AuthenticationConfig;
internal static CasClientConfiguration CasClientConfig;
// Ticket validator fields
private static string ticketValidatorName;
private static ITicketValidator ticketValidator;
// Ticket manager fields
private static string serviceTicketManagerProvider;
private static IServiceTicketManager serviceTicketManager;
// Proxy ticket fields
private static string proxyTicketManagerProvider;
private static IProxyTicketManager proxyTicketManager;
// Gateway fields
private static bool gateway;
private static string gatewayStatusCookieName;
// Configuration fields
private static string formsLoginUrl;
private static TimeSpan formsTimeout;
private static string casServerLoginUrl;
private static string casServerUrlPrefix;
private static long ticketTimeTolerance;
private static string serverName;
private static bool renew;
private static bool redirectAfterValidation;
private static bool singleSignOut;
private static string notAuthorizedUrl;
private static string cookiesRequiredUrl;
private static string gatewayParameterName;
private static string proxyCallbackParameterName;
private static string casProxyCallbackUrl;
private static bool requireCasForMissingContentTypes;
private static string[] requireCasForContentTypes;
private static string[] bypassCasForHandlers;
// Provide reliable way for arbitrary components in forms
// authentication pipeline to access CAS principal
[ThreadStatic]
private static ICasPrincipal currentPrincipal;
// XML Reader Settings for SAML parsing.
private static XmlReaderSettings xmlReaderSettings;
// XML Name Table for namespace resolution in SSO SAML Parsing routine
private static NameTable xmlNameTable;
/// XML Namespace Manager for namespace resolution in SSO SAML Parsing routine
private static XmlNamespaceManager xmlNamespaceManager;
#endregion
#region Methods
/// <summary>
/// Static constructor
/// </summary>
static CasAuthentication()
{
LockObject = new object();
}
/// <summary>
/// Current authenticated principal or null if current user is unauthenticated.
/// </summary>
public static ICasPrincipal CurrentPrincipal
{
get { return currentPrincipal; }
}
/// <summary>
/// Initializes configuration-related properties and validates configuration.
/// </summary>
public static void Initialize()
{
if (!initialized)
{
lock (LockObject)
{
if (!initialized)
{
FormsAuthentication.Initialize();
AuthenticationConfig = (AuthenticationSection)WebConfigurationManager.GetSection("system.web/authentication");
CasClientConfig = CasClientConfiguration.Config;
if (AuthenticationConfig == null)
{
LogAndThrowConfigurationException(
"The CAS authentication provider requires Forms authentication to be enabled in web.config.");
}
if (AuthenticationConfig.Mode != AuthenticationMode.Forms)
{
LogAndThrowConfigurationException(
"The CAS authentication provider requires Forms authentication to be enabled in web.config.");
}
if (FormsAuthentication.CookieMode != HttpCookieMode.UseCookies)
{
LogAndThrowConfigurationException(
"CAS requires Forms Authentication to use cookies (cookieless='UseCookies').");
}
xmlReaderSettings = new XmlReaderSettings();
xmlReaderSettings.ConformanceLevel = ConformanceLevel.Auto;
xmlReaderSettings.IgnoreWhitespace = true;
xmlNameTable = new NameTable();
xmlNamespaceManager = new XmlNamespaceManager(xmlNameTable);
xmlNamespaceManager.AddNamespace("cas", "http://www.yale.edu/tp/cas");
xmlNamespaceManager.AddNamespace("saml", "urn: oasis:names:tc:SAML:1.0:assertion");
xmlNamespaceManager.AddNamespace("saml2", "urn: oasis:names:tc:SAML:1.0:assertion");
xmlNamespaceManager.AddNamespace("samlp", "urn: oasis:names:tc:SAML:1.0:protocol");
formsLoginUrl = AuthenticationConfig.Forms.LoginUrl;
formsTimeout = AuthenticationConfig.Forms.Timeout;
if (string.IsNullOrEmpty(CasClientConfig.CasServerUrlPrefix))
{
LogAndThrowConfigurationException("The CasServerUrlPrefix is required");
}
casServerUrlPrefix = CasClientConfig.CasServerUrlPrefix;
configLogger.Info("casServerUrlPrefix = " + casServerUrlPrefix);
casServerLoginUrl = CasClientConfig.CasServerLoginUrl;
configLogger.Info("casServerLoginUrl = " + casServerLoginUrl);
ticketValidatorName = CasClientConfig.TicketValidatorName;
configLogger.Info("ticketValidatorName = " + ticketValidatorName);
ticketTimeTolerance = CasClientConfig.TicketTimeTolerance;
configLogger.Info("ticketTimeTolerance = " + ticketTimeTolerance);
serverName = CasClientConfig.ServerName;
configLogger.Info("serverName = " + serverName);
renew = CasClientConfig.Renew;
configLogger.Info("renew = " + renew);
gateway = CasClientConfig.Gateway;
configLogger.Info("gateway = " + gateway);
gatewayStatusCookieName = CasClientConfig.GatewayStatusCookieName;
configLogger.Info("gatewayStatusCookieName = " + gatewayStatusCookieName);
redirectAfterValidation = CasClientConfig.RedirectAfterValidation;
configLogger.Info("redirectAfterValidation = " + redirectAfterValidation);
singleSignOut = CasClientConfig.SingleSignOut;
configLogger.Info("singleSignOut = " + singleSignOut);
serviceTicketManagerProvider = CasClientConfig.ServiceTicketManager;
configLogger.Info("serviceTicketManagerProvider = " + serviceTicketManagerProvider);
proxyTicketManagerProvider = CasClientConfig.ProxyTicketManager;
configLogger.Info("proxyTicketManagerProvider = " + proxyTicketManagerProvider);
notAuthorizedUrl = CasClientConfig.NotAuthorizedUrl;
configLogger.Info("notAuthorizedUrl = " + notAuthorizedUrl);
cookiesRequiredUrl = CasClientConfig.CookiesRequiredUrl;
configLogger.Info("cookiesRequiredUrl = " + cookiesRequiredUrl);
gatewayParameterName = CasClientConfig.GatewayParameterName;
configLogger.Info("gatewayParameterName = " + gatewayParameterName);
proxyCallbackParameterName = CasClientConfig.ProxyCallbackParameterName;
configLogger.Info("proxyCallbackParameterName = " + proxyCallbackParameterName);
casProxyCallbackUrl = CasClientConfig.ProxyCallbackUrl;
configLogger.Info("proxyCallbackUrl = " + casProxyCallbackUrl);
requireCasForMissingContentTypes = CasClientConfig.RequireCasForMissingContentTypes;
configLogger.Info("requireCasForMissingContentTypes = " + requireCasForMissingContentTypes);
requireCasForContentTypes = CasClientConfig.RequireCasForContentTypes;
configLogger.Info("requireCasForContentTypes = " + requireCasForContentTypes);
bypassCasForHandlers = CasClientConfig.BypassCasForHandlers;
configLogger.Info("bypassCasForHandlers = " + bypassCasForHandlers);
if (!String.IsNullOrEmpty(ticketValidatorName))
{
if (String.Compare(CasClientConfiguration.CAS10_TICKET_VALIDATOR_NAME,ticketValidatorName) == 0)
ticketValidator = new Cas10TicketValidator();
else if (String.Compare(CasClientConfiguration.CAS20_TICKET_VALIDATOR_NAME, ticketValidatorName) == 0)
ticketValidator = new Cas20ServiceTicketValidator();
else if (String.Compare(CasClientConfiguration.SAML11_TICKET_VALIDATOR_NAME, ticketValidatorName) == 0)
ticketValidator = new Saml11TicketValidator();
else
{
// the ticket validator name is not recognized, let's try to get it using Reflection then
Type ticketValidatorType = Type.GetType(ticketValidatorName, false, true);
if (ticketValidatorType != null)
{
if (typeof(ITicketValidator).IsAssignableFrom(ticketValidatorType))
ticketValidator = (ITicketValidator)Activator.CreateInstance(ticketValidatorType);
else
LogAndThrowConfigurationException("Ticket validator type is not correct " + ticketValidatorName);
}
else
LogAndThrowConfigurationException("Could not find ticket validatory type " + ticketValidatorName);
}
configLogger.Info("TicketValidator type = " + ticketValidator.GetType().ToString());
}
else
LogAndThrowConfigurationException("Ticket validator name missing");
if (String.IsNullOrEmpty(serviceTicketManagerProvider))
{
// Web server cannot maintain ticket state, verify tickets, perform SSO, etc.
}
else
{
if (String.Compare(CasClientConfiguration.CACHE_SERVICE_TICKET_MANAGER, serviceTicketManagerProvider) == 0)
{
#if NET20 || NET35
// Use the service ticket manager that implements an in-memory cache supported by .NET 2.0/3.5.
serviceTicketManager = new CacheServiceTicketManager();
#endif
#if NET40 || NET45
// Use the service ticket manager that implements an in-memory cache supported by .NET 4.x.
serviceTicketManager = new MemoryCacheServiceTicketManager();
#endif
}
else
{
// the service ticket manager is not recognized, let's try to get it using Reflection then
Type serviceTicketManagerType = Type.GetType(serviceTicketManagerProvider, false, true);
if (serviceTicketManagerType != null)
{
if (typeof(IServiceTicketManager).IsAssignableFrom(serviceTicketManagerType))
serviceTicketManager = (IServiceTicketManager)Activator.CreateInstance(serviceTicketManagerType);
else
LogAndThrowConfigurationException("Service Ticket Manager type is not correct " + serviceTicketManagerProvider);
}
else
LogAndThrowConfigurationException("Could not find Service Ticket Manager type " + serviceTicketManagerProvider);
}
configLogger.Info("ServiceTicketManager type = " + serviceTicketManager.GetType().ToString());
}
if (String.IsNullOrEmpty(proxyTicketManagerProvider))
{
// Web server cannot generate proxy tickets
}
else
{
if (String.Compare(CasClientConfiguration.CACHE_PROXY_TICKET_MANAGER, proxyTicketManagerProvider) == 0)
{
#if NET20 || NET35
// Use the proxy ticket manager that implements an in-memory cache supported by .NET 2.0/3.5.
proxyTicketManager = new CacheProxyTicketManager();
#endif
#if NET40 || NET45
// Use the proxy ticket manager that implements an in-memory cache supported by .NET 4.x.
proxyTicketManager = new MemoryCacheProxyTicketManager();
#endif
}
else
{
// the proxy ticket manager is not recognized, let's try to get it using Reflection then
Type proxyTicketManagerType = Type.GetType(proxyTicketManagerProvider, false, true);
if (proxyTicketManagerType != null)
{
if (typeof(IProxyTicketManager).IsAssignableFrom(proxyTicketManagerType))
proxyTicketManager = (IProxyTicketManager)Activator.CreateInstance(proxyTicketManagerType);
else
LogAndThrowConfigurationException("Proxy Ticket Manager type is not correct " + proxyTicketManagerProvider);
}
else
LogAndThrowConfigurationException("Could not find Proxy Ticket Manager type " + proxyTicketManagerProvider);
}
configLogger.Info("ProxyTicketManager type = " + proxyTicketManager.GetType().ToString());
}
// Validate configuration
bool haveServerName = !String.IsNullOrEmpty(serverName);
if (!haveServerName)
{
LogAndThrowConfigurationException(CasClientConfiguration.SERVER_NAME + " cannot be null or empty.");
}
if (String.IsNullOrEmpty(casServerLoginUrl))
{
LogAndThrowConfigurationException(CasClientConfiguration.CAS_SERVER_LOGIN_URL + " cannot be null or empty.");
}
if (serviceTicketManager == null && singleSignOut)
{
LogAndThrowConfigurationException("Single Sign Out support requires a ServiceTicketManager.");
}
if (gateway && renew)
{
LogAndThrowConfigurationException("Gateway and Renew functionalities are mutually exclusive");
}
if (!redirectAfterValidation)
{
LogAndThrowConfigurationException(
"Forms Authentication based modules require RedirectAfterValidation to be set to true.");
}
initialized = true;
}
}
if (ServiceTicketManager != null) ServiceTicketManager.Initialize();
if (ProxyTicketManager != null) ProxyTicketManager.Initialize();
if (TicketValidator != null) TicketValidator.Initialize();
}
}
/// <summary>
/// Obtain a Proxy ticket and redirect to the foreign service url with
/// that ticket included in the url. The foreign service must be configured
/// to accept the ticket.
/// </summary>
/// <param name="url">The foreign service to redirect to</param>
/// <exception cref="ArgumentNullException">The url supplied is null</exception>
/// <exception cref="ArgumentException">The url supplied is empty</exception>
public static void ProxyRedirect(string url)
{
ProxyRedirect(url, "ticket", false);
}
/// <summary>
/// Obtain a Proxy ticket and redirect to the foreign service url with
/// that ticket included in the url. The foreign service must be configured
/// to accept the ticket.
/// </summary>
/// <param name="url">The foreign service to redirect to</param>
/// <param name="endResponse">
/// Boolean indicating whether or not to short circuit the remaining request
/// pipeline events
/// </param>
/// <exception cref="ArgumentNullException">The url supplied is null</exception>
/// <exception cref="ArgumentException">The url supplied is empty</exception>
public static void ProxyRedirect(string url, bool endResponse)
{
ProxyRedirect(url, "ticket", endResponse);
}
/// <summary>
/// Obtain a Proxy ticket and redirect to the foreign service url with
/// that ticket included in the url. The foreign service must be configured
/// to accept the ticket.
/// </summary>
/// <param name="url">The foreign service to redirect to</param>
/// <param name="proxyTicketUrlParameter">
/// The ticket parameter to include in the remote service Url.
/// </param>
/// <exception cref="ArgumentNullException">
/// The url or proxyTicketUrlParameter supplied is null
/// </exception>
/// <exception cref="ArgumentException">
/// The url or proxyTicketUrlParametersupplied is empty
/// </exception>
public static void ProxyRedirect(string url, string proxyTicketUrlParameter)
{
ProxyRedirect(url, proxyTicketUrlParameter, false);
}
/// <summary>
/// </summary>
/// <param name="url">The foreign service to redirect to</param>
/// <param name="proxyTicketUrlParameter">
/// The ticket parameter to include in the remote service Url.
/// </param>
/// <param name="endResponse">
/// Boolean indicating whether or not to short circuit the remaining request
/// pipeline events
/// </param>
/// <exception cref="ArgumentNullException">
/// The url or proxyTicketUrlParameter supplied is null
/// </exception>
/// <exception cref="ArgumentException">
/// The url or proxyTicketUrlParametersupplied is empty
/// </exception>
public static void ProxyRedirect(string url, string proxyTicketUrlParameter, bool endResponse)
{
CommonUtils.AssertNotNullOrEmpty(url, "url parameter cannot be null or empty.");
CommonUtils.AssertNotNull(proxyTicketUrlParameter, "proxyTicketUrlParameter parameter cannot be null or empty.");
HttpContext context = HttpContext.Current;
HttpResponse response = context.Response;
string proxyRedirectUrl = UrlUtil.GetProxyRedirectUrl(url, proxyTicketUrlParameter);
response.Redirect(proxyRedirectUrl, endResponse);
}
/// <summary>
/// Attempts to connect to the CAS server to retrieve a proxy ticket
/// for the target URL specified.
/// </summary>
/// <remarks>
/// Problems retrieving proxy tickets are generally caused by SSL misconfiguration.
/// The CAS server must be configured to trust the SSL certificate on the web application's
/// server. The CAS server will attempt to establish an SSL connection to this web
/// application server to confirm that the proxy ticket request is legitimate. If the
/// server does not trust the SSL certificate or the certificate authority/chain of the SSL
/// certificate, the request will fail.
/// </remarks>
/// <param name="targetServiceUrl">The target Url to obtain a proxy ticket for</param>
/// <returns>
/// A proxy ticket for the target Url or an empty string if the request failed.
/// </returns>
public static string GetProxyTicketIdFor(string targetServiceUrl)
{
CommonUtils.AssertNotNullOrEmpty(targetServiceUrl, "targetServiceUrl parameter cannot be null or empty.");
if (ServiceTicketManager == null)
{
LogAndThrowConfigurationException("Proxy authentication requires a ServiceTicketManager");
}
FormsAuthenticationTicket formsAuthTicket = GetFormsAuthenticationTicket();
if (formsAuthTicket == null)
{
LogAndThrowOperationException("The request is not authenticated (does not have a CAS Service or Proxy ticket).");
}
if (string.IsNullOrEmpty(formsAuthTicket.UserData))
{
LogAndThrowOperationException("The request does not have a CAS Service Ticket.");
}
CasAuthenticationTicket casTicket = ServiceTicketManager.GetTicket(formsAuthTicket.UserData);
if (casTicket == null)
{
LogAndThrowOperationException("The request does not have a valid CAS Service Ticket.");
}
string proxyTicketResponse = null;
try
{
string proxyUrl = UrlUtil.ConstructProxyTicketRequestUrl(casTicket.ProxyGrantingTicket, targetServiceUrl);
proxyTicketResponse = HttpUtil.PerformHttpGet(proxyUrl, true);
}
catch
{
LogAndThrowOperationException("Unable to obtain CAS Proxy Ticket.");
}
if (String.IsNullOrEmpty(proxyTicketResponse))
{
LogAndThrowOperationException("Unable to obtain CAS Proxy Ticket (response was empty)");
}
string proxyTicket = null;
try
{
ServiceResponse serviceResponse = ServiceResponse.ParseResponse(proxyTicketResponse);
if (serviceResponse.IsProxySuccess)
{
ProxySuccess success = (ProxySuccess)serviceResponse.Item;
if (!String.IsNullOrEmpty(success.ProxyTicket))
{
protoLogger.Info(String.Format("Proxy success: {0}", success.ProxyTicket));
}
proxyTicket = success.ProxyTicket;
}
else
{
ProxyFailure failure = (ProxyFailure)serviceResponse.Item;
if (!String.IsNullOrEmpty(failure.Message) && !String.IsNullOrEmpty(failure.Code))
{
protoLogger.Info(String.Format("Proxy failure: {0} ({1})", failure.Message, failure.Code));
}
else if (!String.IsNullOrEmpty(failure.Message))
{
protoLogger.Info(String.Format("Proxy failure: {0}", failure.Message));
}
else if (!String.IsNullOrEmpty(failure.Code))
{
protoLogger.Info(String.Format("Proxy failure: Code {0}", failure.Code));
}
}
}
catch (InvalidOperationException)
{
LogAndThrowOperationException("CAS Server response does not conform to CAS 2.0 schema");
}
return proxyTicket;
}
/// <summary>
/// Redirects the current request to the CAS Login page
/// </summary>
public static void RedirectToLoginPage()
{
RedirectToLoginPage(Renew);
}
/// <summary>
/// Redirects the current request to the Login page and requires renewed
/// CAS credentials
/// </summary>
public static void RedirectToLoginPage(bool forceRenew)
{
Initialize();
HttpContext context = HttpContext.Current;
HttpResponse response = context.Response;
string redirectUrl = UrlUtil.ConstructLoginRedirectUrl(false, forceRenew);
protoLogger.Info("Redirecting to " + redirectUrl);
response.Redirect(redirectUrl, false);
}
/// <summary>
/// Redirects the current request to the Cookies Required page
/// </summary>
public static void RedirectToCookiesRequiredPage()
{
Initialize();
HttpContext context = HttpContext.Current;
HttpResponse response = context.Response;
response.Redirect(UrlUtil.ResolveUrl(CookiesRequiredUrl), false);
}
/// <summary>
/// Redirects the current request to the Not Authorized page
/// </summary>
public static void RedirectToNotAuthorizedPage()
{
Initialize();
HttpContext context = HttpContext.Current;
HttpResponse response = context.Response;
response.Redirect(UrlUtil.ResolveUrl(NotAuthorizedUrl), false);
}
/// <summary>
/// Redirects the current request back to the requested page without
/// the CAS ticket artifact in the URL.
/// </summary>
internal static void RedirectFromLoginCallback()
{
Initialize();
HttpContext context = HttpContext.Current;
HttpRequest request = context.Request;
HttpResponse response = context.Response;
if (RequestEvaluator.GetRequestHasGatewayParameter())
{
// TODO: Only set Success if request is authenticated? Otherwise Failure.
// Doesn't make a difference from a security perspective, but may be clearer for users
SetGatewayStatusCookie(GatewayStatus.Success);
}
response.Redirect(UrlUtil.RemoveCasArtifactsFromUrl(request.Url.AbsoluteUri), false);
}
/// <summary>
/// Redirects the current request back to the requested page without
/// the gateway callback artifact in the URL.
/// </summary>
internal static void RedirectFromFailedGatewayCallback()
{
Initialize();
HttpContext context = HttpContext.Current;
HttpRequest request = context.Request;
HttpResponse response = context.Response;
SetGatewayStatusCookie(GatewayStatus.Failed);
string urlWithoutCasArtifact = UrlUtil.RemoveCasArtifactsFromUrl(request.Url.AbsoluteUri);
response.Redirect(urlWithoutCasArtifact, false);
}
/// <summary>
/// Attempt to perform a CAS gateway authentication. This causes a transparent
/// redirection out to the CAS server and back to the requesting page with or
/// without a CAS service ticket. If the user has already authenticated for
/// another service against the CAS server and the CAS server supports Single
/// Sign On, this will result in the user being automatically authenticated.
/// Otherwise, the user will remain anonymous.
/// </summary>
/// <param name="ignoreGatewayStatusCookie">
/// The Gateway Status Cookie reflects whether a gateway authentication has
/// already been attempted, in which case the redirection is generally
/// unnecessary. This property allows you to override the behavior and
/// perform a redirection regardless of whether it has already been attempted.
/// </param>
public static void GatewayAuthenticate(bool ignoreGatewayStatusCookie)
{
Initialize();
HttpContext context = HttpContext.Current;
HttpResponse response = context.Response;
HttpApplication application = context.ApplicationInstance;
if (!ignoreGatewayStatusCookie)
{
if (GetGatewayStatus() != GatewayStatus.NotAttempted)
{
return;
}
}
SetGatewayStatusCookie(GatewayStatus.Attempting);
string redirectUrl = UrlUtil.ConstructLoginRedirectUrl(true, false);
protoLogger.Info("Performing gateway redirect to " + redirectUrl);
response.Redirect(redirectUrl, false);
application.CompleteRequest();
}
/// <summary>
/// Logs the user out of the application and attempts to perform a Single Sign
/// Out against the CAS server. If the CAS server is configured to support
/// Single Sign Out, this will prevent users from gateway authenticating
/// to other services. The CAS server will attempt to notify any other
/// applications to revoke the session. Each of the applications must be
/// configured to maintain session state on the server. In the case of
/// ASP.NET web applications using DotNetCasClient, this requires defining a
/// serviceTicketManager. The configuration for other client types (Java,
/// PHP) varies based on the client implementation. Consult the Apereo wiki
/// for more details.
/// </summary>
public static void SingleSignOut()
{
Initialize();
HttpContext context = HttpContext.Current;
HttpResponse response = context.Response;
// Necessary for ASP.NET MVC Support.
if (context.User.Identity.IsAuthenticated)
{
ClearAuthCookie();
string singleSignOutRedirectUrl = UrlUtil.ConstructSingleSignOutRedirectUrl();
// Leave endResponse as true. This will throw a handled ThreadAbortException
// but it is necessary to support SingleSignOut in ASP.NET MVC applications.
response.Redirect(singleSignOutRedirectUrl, true);
}
}
/// <summary>
/// Process SingleSignOut requests originating from another web application by removing the ticket
/// from the ServiceTicketManager (assuming one is configured). Without a ServiceTicketManager
/// configured, this method will not execute and this web application cannot respect external
/// SingleSignOut requests.
/// </summary>
/// <returns>
/// Boolean indicating whether the request was a SingleSignOut request, regardless of
/// whether or not the request actually required processing (non-existent/already expired).
/// </returns>
internal static void ProcessSingleSignOutRequest()
{
HttpContext context = HttpContext.Current;
HttpRequest request = context.Request;
HttpResponse response = context.Response;
protoLogger.Debug("Examining request for single sign-out signature");
if (request.HttpMethod == "POST" && request.Form["logoutRequest"] != null)
{
protoLogger.Debug("Attempting to get CAS service ticket from request");
// TODO: Should we be checking to make sure that this special POST is coming from a trusted source?
// It would be tricky to do this by IP address because there might be a white list or something.
string casTicket = ExtractSingleSignOutTicketFromSamlResponse(request.Params["logoutRequest"]);
if (!String.IsNullOrEmpty(casTicket))
{
protoLogger.Info("Processing single sign-out request for " + casTicket);
ServiceTicketManager.RevokeTicket(casTicket);
protoLogger.Debug("Successfully removed " + casTicket);
response.StatusCode = 200;
response.ContentType = "text/plain";
response.Clear();
response.Write("OK");
context.ApplicationInstance.CompleteRequest();
}
}
}
/// <summary>
/// Process a Proxy Callback request from the CAS server. Proxy Callback requests occur as a part
/// of a proxy ticket request. When the web application requests a proxy ticket for a third party
/// service from the CAS server, the CAS server attempts to connect back to the web application
/// over an HTTPS connection. The success of this callback is essential for the proxy ticket
/// request to succeed. Failures are generally caused by SSL configuration errors. See the
/// description of the SingleSignOut method for more details. Assuming the SSL configuration is
/// correct, this method is responsible for handling the callback from the CAS server. For
/// more details, see the CAS protocol specification.
/// </summary>
/// <returns>
/// A Boolean indicating whether or not the proxy callback request is valid and mapped to a valid,
/// outstanding Proxy Granting Ticket IOU.
/// </returns>
internal static bool ProcessProxyCallbackRequest()
{
HttpContext context = HttpContext.Current;
HttpApplication application = context.ApplicationInstance;
HttpRequest request = context.Request;
HttpResponse response = context.Response;
string proxyGrantingTicketIou = request.Params[PARAM_PROXY_GRANTING_TICKET_IOU];
string proxyGrantingTicket = request.Params[PARAM_PROXY_GRANTING_TICKET];
if (String.IsNullOrEmpty(proxyGrantingTicket))
{
protoLogger.Info("Invalid request - {0} parameter not found", PARAM_PROXY_GRANTING_TICKET);
return false;
}
else if (String.IsNullOrEmpty(proxyGrantingTicketIou))
{
protoLogger.Info("Invalid request - {0} parameter not found", PARAM_PROXY_GRANTING_TICKET_IOU);
return false;
}
protoLogger.Info("Recieved proxyGrantingTicketId [{0}] for proxyGrantingTicketIou [{1}]", proxyGrantingTicket, proxyGrantingTicketIou);
ProxyTicketManager.InsertProxyGrantingTicketMapping(proxyGrantingTicketIou, proxyGrantingTicket);
// TODO: Consider creating a DotNetCasClient.Validation.Schema.Cas20.ProxySuccess object and serializing it.
response.Write("<?xml version=\"1.0\"?>");
response.Write("<casClient:proxySuccess xmlns:casClient=\"http://www.yale.edu/tp/casClient\" />");
application.CompleteRequest();
return true;
}
/// <summary>
/// Validates a ticket contained in the URL, presumably generated by
/// the CAS server after a successful authentication. The actual ticket
/// validation is performed by the configured TicketValidator
/// (i.e., CAS 1.0, CAS 2.0, SAML 1.0). If the validation succeeds, the
/// request is authenticated and a FormsAuthenticationCookie and
/// corresponding CasAuthenticationTicket are created for the purpose of
/// authenticating subsequent requests (see ProcessTicketValidation
/// method). If the validation fails, the authentication status remains
/// unchanged (generally the user is and remains anonymous).
/// </summary>
internal static void ProcessTicketValidation()
{
HttpContext context = HttpContext.Current;
HttpApplication app = context.ApplicationInstance;
HttpRequest request = context.Request;
CasAuthenticationTicket casTicket;
ICasPrincipal principal;
string ticket = request[TicketValidator.ArtifactParameterName];
try
{
// Attempt to authenticate the ticket and resolve to an ICasPrincipal
principal = TicketValidator.Validate(ticket);
// Save the ticket in the FormsAuthTicket. Encrypt the ticket and send it as a cookie.
casTicket = new CasAuthenticationTicket(
ticket,
UrlUtil.RemoveCasArtifactsFromUrl(request.Url.AbsoluteUri),
request.UserHostAddress,
principal.Assertion
);
if (ProxyTicketManager != null && !string.IsNullOrEmpty(principal.ProxyGrantingTicket))
{
casTicket.ProxyGrantingTicketIou = principal.ProxyGrantingTicket;
casTicket.Proxies.AddRange(principal.Proxies);
string proxyGrantingTicket = ProxyTicketManager.GetProxyGrantingTicket(casTicket.ProxyGrantingTicketIou);
if (!string.IsNullOrEmpty(proxyGrantingTicket))
{
casTicket.ProxyGrantingTicket = proxyGrantingTicket;
}
}
// TODO: Check the last 2 parameters. We want to take the from/to dates from the FormsAuthenticationTicket. However, we may need to do some clock drift correction.
FormsAuthenticationTicket formsAuthTicket = CreateFormsAuthenticationTicket(principal.Identity.Name, FormsAuthentication.FormsCookiePath, ticket, null, null);
SetAuthCookie(formsAuthTicket);
// Also save the ticket in the server store (if configured)
if (ServiceTicketManager != null)
{
ServiceTicketManager.UpdateTicketExpiration(casTicket, formsAuthTicket.Expiration);
}
// Jump directly to EndRequest. Don't allow the Page and/or Handler to execute.
// EndRequest will redirect back without the ticket in the URL
app.CompleteRequest();
return;
}
catch (TicketValidationException e)
{
// Leave principal null. This might not have been a CAS service ticket.
protoLogger.Error("Ticket validation error: " + e);
}
}
/// <summary>
/// Attempts to authenticate requests subsequent to the initial authentication
/// request (handled by ProcessTicketValidation). This method looks for a
/// FormsAuthenticationCookie containing a FormsAuthenticationTicket and attempts
/// to confirms its validitiy. It either contains the CAS service ticket or a
/// reference to a CasAuthenticationTicket stored in the ServiceTicketManager
/// (if configured). If it succeeds, the context.User and Thread.CurrentPrincipal
/// are set with a ICasPrincipal and the current request is considered
/// authenticated. Otherwise, the current request is effectively anonymous.
/// </summary>
internal static void ProcessRequestAuthentication()
{
HttpContext context = HttpContext.Current;
// Look for a valid FormsAuthenticationTicket encrypted in a cookie.
CasAuthenticationTicket casTicket = null;
FormsAuthenticationTicket formsAuthenticationTicket = GetFormsAuthenticationTicket();
if (formsAuthenticationTicket != null)
{
ICasPrincipal principal;
if (ServiceTicketManager != null)
{
string serviceTicket = formsAuthenticationTicket.UserData;
casTicket = ServiceTicketManager.GetTicket(serviceTicket);
if (casTicket != null)
{
IAssertion assertion = casTicket.Assertion;
if (!ServiceTicketManager.VerifyClientTicket(casTicket))
{
securityLogger.Warn("CasAuthenticationTicket failed verification: " + casTicket);
// Deletes the invalid FormsAuthentication cookie from the client.
ClearAuthCookie();
ServiceTicketManager.RevokeTicket(serviceTicket);
// Don't give this request a User/Principal. Remove it if it was created
// by the underlying FormsAuthenticationModule or another module.
principal = null;
}
else
{
if (ProxyTicketManager != null && !string.IsNullOrEmpty(casTicket.ProxyGrantingTicketIou) && string.IsNullOrEmpty(casTicket.ProxyGrantingTicket))
{
string proxyGrantingTicket = ProxyTicketManager.GetProxyGrantingTicket(casTicket.ProxyGrantingTicketIou);
if (!string.IsNullOrEmpty(proxyGrantingTicket))
{
casTicket.ProxyGrantingTicket = proxyGrantingTicket;
}
}
principal = new CasPrincipal(assertion);
}
}
else
{
// This didn't resolve to a ticket in the TicketStore. Revoke it.
ClearAuthCookie();
securityLogger.Debug("Revoking ticket " + serviceTicket);
ServiceTicketManager.RevokeTicket(serviceTicket);
// Don't give this request a User/Principal. Remove it if it was created
// by the underlying FormsAuthenticationModule or another module.
principal = null;
}
}
else
{
principal = new CasPrincipal(new Assertion(formsAuthenticationTicket.Name));
}
context.User = principal;
Thread.CurrentPrincipal = principal;
currentPrincipal = principal;
if (principal == null)
{
// Remove the cookie from the client
ClearAuthCookie();
}
else
{
// Extend the expiration of the cookie if FormsAuthentication is configured to do so.
if (FormsAuthentication.SlidingExpiration)
{
FormsAuthenticationTicket newTicket = FormsAuthentication.RenewTicketIfOld(formsAuthenticationTicket);
if (newTicket != null && newTicket != formsAuthenticationTicket)
{
SetAuthCookie(newTicket);
if (ServiceTicketManager != null)
{
ServiceTicketManager.UpdateTicketExpiration(casTicket, newTicket.Expiration);
}
}
}
}
}
}
/// <summary>
/// Attempts to set the GatewayStatus client cookie. If the cookie is not
/// present and equal to GatewayStatus.Attempting when a CAS Gateway request
/// comes in (indicated by the presence of the 'gatewayParameterName'
/// defined in web.config appearing in the URL), the server knows that the
/// client is not accepting session cookies and will optionally redirect
/// the user to the 'cookiesRequiredUrl' (also defined in web.config). If
/// 'cookiesRequiredUrl' is not defined but 'gateway' is, every page request
/// will result in a round-trip to the CAS server.
/// </summary>
/// <param name="gatewayStatus">The GatewayStatus to attempt to store</param>
internal static void SetGatewayStatusCookie(GatewayStatus gatewayStatus)
{
Initialize();
HttpContext current = HttpContext.Current;
HttpCookie cookie = new HttpCookie(GatewayStatusCookieName, gatewayStatus.ToString());
cookie.HttpOnly = false;
cookie.Path = FormsAuthentication.FormsCookiePath;
cookie.Secure = false;
if (FormsAuthentication.CookieDomain != null)
{
cookie.Domain = FormsAuthentication.CookieDomain;
}
// Add it to the request collection for later processing during this request
current.Request.Cookies.Remove(GatewayStatusCookieName);
current.Request.Cookies.Add(cookie);
// Add it to the response collection for delivery to client
current.Response.Cookies.Add(cookie);
}
/// <summary>
/// Retrieves the GatewayStatus from the client cookie.
/// </summary>
/// <returns>
/// The GatewayStatus stored in the cookie if present, otherwise
/// GatewayStatus.NotAttempted.
/// </returns>
public static GatewayStatus GetGatewayStatus()
{
Initialize();
HttpContext context = HttpContext.Current;
HttpCookie cookie = context.Request.Cookies[GatewayStatusCookieName];
GatewayStatus status;
if (cookie != null && !string.IsNullOrEmpty(cookie.Value))
{
try
{
// Parse the value out of the cookie
status = (GatewayStatus) Enum.Parse(typeof (GatewayStatus), cookie.Value);
}
catch (ArgumentException)
{
// If the cookie contains an invalid value, clear the cookie
// and return GatewayStatus.NotAttempted
SetGatewayStatusCookie(GatewayStatus.NotAttempted);
status = GatewayStatus.NotAttempted;
}
}
else
{
// Use the default value GatewayStatus.NotAttempted
status = GatewayStatus.NotAttempted;
}
return status;
}
/// <summary>
/// Sends a blank and expired FormsAuthentication cookie to the
/// client response. This effectively removes the FormsAuthentication
/// cookie and revokes the FormsAuthenticationTicket. It also removes
/// the cookie from the current Request object, preventing subsequent
/// code from being able to access it during the execution of the
/// current request.
/// </summary>
public static void ClearAuthCookie()
{
Initialize();
HttpContext current = HttpContext.Current;
// Don't let anything see the incoming cookie
current.Request.Cookies.Remove(FormsAuthentication.FormsCookieName);
// Remove the cookie from the response collection (by adding an expired/empty version).
HttpCookie cookie = new HttpCookie(FormsAuthentication.FormsCookieName);
cookie.Expires = DateTime.Now.AddMonths(-1);
cookie.Domain = FormsAuthentication.CookieDomain;
cookie.Path = FormsAuthentication.FormsCookiePath;
current.Response.Cookies.Add(cookie);
}
/// <summary>
/// Encrypts a FormsAuthenticationTicket in an HttpCookie (using
/// GetAuthCookie) and includes it in both the request and the response.
/// </summary>
/// <param name="clientTicket">The FormsAuthenticationTicket to encode</param>
public static void SetAuthCookie(FormsAuthenticationTicket clientTicket)
{
Initialize();
HttpContext current = HttpContext.Current;
if (!current.Request.IsSecureConnection && FormsAuthentication.RequireSSL)
{
throw new HttpException("Connection not secure while creating secure cookie");
}
// Obtain the forms authentication cookie from the ticket
HttpCookie authCookie = GetAuthCookie(clientTicket);
// Clear the previous cookie from the current HTTP request
current.Request.Cookies.Remove(FormsAuthentication.FormsCookieName);
// Store the new cookie in both the request and response objects
current.Request.Cookies.Add(authCookie);
current.Response.Cookies.Add(authCookie);
}
/// <summary>
/// Creates an HttpCookie containing an encrypted FormsAuthenticationTicket,
/// which in turn contains a CAS service ticket.
/// </summary>
/// <param name="ticket">The FormsAuthenticationTicket to encode</param>
/// <returns>An HttpCookie containing the encrypted FormsAuthenticationTicket</returns>
public static HttpCookie GetAuthCookie(FormsAuthenticationTicket ticket)
{
Initialize();
string str = FormsAuthentication.Encrypt(ticket);
if (String.IsNullOrEmpty(str))
{
throw new HttpException("Unable to encrypt cookie ticket");
}
HttpCookie cookie = new HttpCookie(FormsAuthentication.FormsCookieName, str);
// Per http://support.microsoft.com/kb/900111 :
// In ASP.NET 2.0, forms authentication cookies are HttpOnly cookies.
// HttpOnly cookies cannot be accessed through client script. This
// functionality helps reduce the chances of replay attacks.
cookie.HttpOnly = true;
cookie.Path = FormsAuthentication.FormsCookiePath;
cookie.Secure = FormsAuthentication.RequireSSL;
if (FormsAuthentication.CookieDomain != null)
{
cookie.Domain = FormsAuthentication.CookieDomain;
}
if (ticket.IsPersistent)
{
cookie.Expires = ticket.Expiration;
}
return cookie;
}
/// <summary>
/// Creates a FormsAuthenticationTicket for storage on the client.
/// The UserData field contains the CAS Service Ticket which can be
/// used by the server-side ServiceTicketManager to retrieve additional
/// details about the ticket (e.g. assertions)
/// </summary>
/// <param name="netId">User associated with the ticket</param>
/// <param name="cookiePath">Relative path on server in which cookie is valid</param>
/// <param name="serviceTicket">CAS service ticket</param>
/// <param name="validFromDate">Ticket valid from date</param>
/// <param name="validUntilDate">Ticket valid too date</param>
/// <returns>Instance of a FormsAuthenticationTicket</returns>
public static FormsAuthenticationTicket CreateFormsAuthenticationTicket(string netId, string cookiePath, string serviceTicket, DateTime? validFromDate, DateTime? validUntilDate)
{
Initialize();
protoLogger.Debug("Creating FormsAuthenticationTicket for " + serviceTicket);
DateTime fromDate = validFromDate.HasValue ? validFromDate.Value : DateTime.Now;
DateTime toDate = validUntilDate.HasValue ? validUntilDate.Value : fromDate.Add(FormsTimeout);
FormsAuthenticationTicket ticket = new FormsAuthenticationTicket(
2,
netId,
fromDate,
toDate,
false,
serviceTicket,
cookiePath ?? FormsAuthentication.FormsCookiePath
);
return ticket;
}
/// <summary>
/// Looks for a FormsAuthentication cookie and attempts to
/// parse a valid, non-expired FormsAuthenticationTicket.
/// It ensures that the UserData field has a value (presumed
/// to be a CAS Service Ticket).
/// </summary>
/// <returns>
/// Returns the FormsAuthenticationTicket contained in the
/// cookie or null if any issues are encountered.
/// </returns>
public static FormsAuthenticationTicket GetFormsAuthenticationTicket()
{
Initialize();
HttpContext context = HttpContext.Current;
HttpCookie cookie = context.Request.Cookies[FormsAuthentication.FormsCookieName];
if (cookie == null)
{
return null;
}
if (cookie.Expires != DateTime.MinValue && cookie.Expires < DateTime.Now)
{
ClearAuthCookie();
return null;
}
if (String.IsNullOrEmpty(cookie.Value))
{
ClearAuthCookie();
return null;
}
FormsAuthenticationTicket formsAuthTicket;
try
{
formsAuthTicket = FormsAuthentication.Decrypt(cookie.Value);
}
catch
{
ClearAuthCookie();
return null;
}
if (formsAuthTicket == null)
{
ClearAuthCookie();
return null;
}
if (formsAuthTicket.Expired)
{
ClearAuthCookie();
return null;
}
if (String.IsNullOrEmpty(formsAuthTicket.UserData))
{
ClearAuthCookie();
return null;
}
return formsAuthTicket;
}
/// <summary>
/// Extracts the CAS ticket from the SAML message supplied.
/// </summary>
/// <param name="xmlAsString">SAML message from CAS server</param>
/// <returns>The CAS ticket contained in SAML message</returns>
private static string ExtractSingleSignOutTicketFromSamlResponse(string xmlAsString)
{
XmlParserContext xmlParserContext = new XmlParserContext(null, xmlNamespaceManager, null, XmlSpace.None);
string elementText = null;
if (!String.IsNullOrEmpty(xmlAsString) && !String.IsNullOrEmpty(XML_SESSION_INDEX_ELEMENT_NAME))
{
using (TextReader textReader = new StringReader(xmlAsString))
{
XmlReader reader = XmlReader.Create(textReader, xmlReaderSettings, xmlParserContext);
bool foundElement = reader.ReadToFollowing(XML_SESSION_INDEX_ELEMENT_NAME);
if (foundElement)
{
elementText = reader.ReadElementString();
}
reader.Close();
}
}
return elementText;
}
private static void LogAndThrowConfigurationException(string message)
{
configLogger.Error(message);
throw new CasConfigurationException(message);
}
private static void LogAndThrowOperationException(string message)
{
protoLogger.Error(message);
throw new InvalidOperationException(message);
}
#endregion
#region Properties
/// <summary>
/// Name of ticket validator that validates CAS tickets using a
/// particular protocol. Valid values are Cas10, Cas20, and Saml11.
/// </summary>
public static string TicketValidatorName
{
get
{
Initialize();
return ticketValidatorName;
}
}
/// <summary>
/// An instance of the TicketValidator specified in the
/// TicketValidatorName property. This will either be an instance of
/// a Cas10TicketValidator, Cas20TicketValidator, or
/// Saml11TicketValidator.
/// </summary>
internal static ITicketValidator TicketValidator
{
get
{
Initialize();
return ticketValidator;
}
}
/// <summary>
/// The ticket manager to use to store tickets returned by the CAS server
/// for validation, revocation, and single sign out support.
/// <remarks>
/// Currently supported values: CacheServiceTicketManager
/// </remarks>
/// </summary>
public static string ServiceTicketManagerProvider
{
get
{
Initialize();
return serviceTicketManagerProvider;
}
}
/// <summary>
/// An instance of the provider specified in the ServiceTicketManagerProvider property.
/// ServiceTicketManager will be null if no serviceTicketManager is
/// defined in web.config. If a ServiceTicketManager is defined, this will allow
/// access to and revocation of outstanding CAS service tickets along with
/// additional information about the service tickets (i.e., IP address,
/// assertions, etc.).
/// </summary>
public static IServiceTicketManager ServiceTicketManager
{
get
{
Initialize();
return serviceTicketManager;
}
}
/// <summary>
/// The ticket manager to use to store and resolve ProxyGrantingTicket IOUs to
/// ProxyGrantingTickets
/// <remarks>
/// Currently supported values: CacheProxyTicketManager
/// </remarks>
/// </summary>
public static string ProxyTicketManagerProvider
{
get
{
Initialize();
return proxyTicketManagerProvider;
}
}
/// <summary>
/// An instance of the provider specified in the ProxyTicketManagerProvider property.
/// ProxyTicketManager will be null if no proxyTicketManager is
/// defined in web.config. If a ProxyTicketManager is defined, this will allow
/// generation of proxy tickets for external sites and services.
/// </summary>
public static IProxyTicketManager ProxyTicketManager
{
get
{
Initialize();
return proxyTicketManager;
}
}
/// <summary>
/// Enable CAS gateway feature, see https://apereo.github.io/cas/5.1.x/protocol/CAS-Protocol-Specification.html section 2.1.1.
/// Default is false.
/// </summary>
public static bool Gateway
{
get
{
Initialize();
return gateway;
}
}
/// <summary>
/// The name of the cookie used to store the Gateway status (NotAttempted,
/// Success, Failed). This cookie is used to prevent the client from
/// attempting to gateway authenticate every request.
/// </summary>
public static string GatewayStatusCookieName
{
get
{
Initialize();
return gatewayStatusCookieName;
}
}
/// <summary>
/// The Forms LoginUrl property set in system.web/authentication/forms
/// </summary>
public static string FormsLoginUrl
{
get
{
Initialize();
return formsLoginUrl;
}
}
/// <summary>
/// The Forms Timeout property set in system.web/authentication/forms
/// </summary>
public static TimeSpan FormsTimeout
{
get
{
Initialize();
return formsTimeout;
}
}
/// <summary>
/// URL of CAS login form.
/// </summary>
public static string CasServerLoginUrl
{
get
{
Initialize();
return casServerLoginUrl;
}
}
/// <summary>
/// URL to root of CAS server application. For example, if your
/// CasServerLoginUrl is https://fed.example.com/cas/login
/// then your CasServerUrlPrefix would be https://fed.example.com/cas/
/// </summary>
public static string CasServerUrlPrefix
{
get
{
Initialize();
return casServerUrlPrefix;
}
}
/// <summary>
/// SAML ticket validator property to allow at most the given time
/// difference in ms between artifact (ticket) timestamp and CAS server
/// system time. Increasing this may have negative security consequences;
/// we recommend fixing sources of clock drift rather than increasing
/// this value.
/// </summary>
public static long TicketTimeTolerance
{
get
{
Initialize();
return ticketTimeTolerance;
}
}
/// <summary>
/// The server name of the server hosting the client application. Service URL
/// will be dynamically constructed using this value if Service is not specified.
/// e.g. https://app.princeton.edu/
/// </summary>
public static string ServerName
{
get
{
Initialize();
return serverName;
}
}
/// <summary>
/// Force user to reauthenticate to CAS before accessing this application.
/// This provides additional security at the cost of usability since it effectively
/// disables SSO for this application.
/// </summary>
public static bool Renew
{
get
{
Initialize();
return renew;
}
}
/// <summary>
/// Whether to redirect to the same URL after ticket validation, but without the ticket
/// in the parameter.
/// </summary>
public static bool RedirectAfterValidation
{
get
{
Initialize();
return redirectAfterValidation;
}
}
/// <summary>
/// Specifies whether external single sign out requests should be processed.
/// </summary>
public static bool ProcessIncomingSingleSignOutRequests
{
get
{
Initialize();
return singleSignOut;
}
}
/// <summary>
/// The URL to redirect to when the request has a valid CAS ticket but the user is
/// not authorized to access the URL or resource. If this option is set, users will
/// be redirected to this URL. If it is not set, the user will be redirected to the
/// CAS login screen with a Renew option in the URL (to force for alternate credential
/// collection).
/// </summary>
public static string NotAuthorizedUrl
{
get
{
Initialize();
return notAuthorizedUrl;
}
}
/// <summary>
/// The URL to redirect to when the client is not accepting session
/// cookies. This condition is detected only when gateway is enabled.
/// This will lock the users onto a specific page. Otherwise, every
/// request will cause a silent round-trip to the CAS server, adding
/// a parameter to the URL.
/// </summary>
public static string CookiesRequiredUrl
{
get
{
Initialize();
return cookiesRequiredUrl;
}
}
/// <summary>
/// The URL parameter to append to outbound CAS request's ServiceName
/// when initiating an automatic CAS Gateway request. This parameter
/// plays a role in detecting whether or not the client has cookies
/// enabled. The default value is 'gatewayResponse' and only needs to
/// be explicitly defined if that URL parameter has a meaning elsewhere
/// in your application.
/// </summary>
public static string GatewayParameterName
{
get
{
Initialize();
return gatewayParameterName;
}
}
/// <summary>
/// The URL parameter to append to outbound CAS proxy request's pgtUrl
/// when initiating an proxy ticket service validation. This is used
/// to determine whether the request is originating from the CAS server
/// and contains a pgtIou.
/// </summary>
public static string ProxyCallbackParameterName
{
get
{
Initialize();
return proxyCallbackParameterName;
}
}
/// <summary>
/// URL for CAS Proxy callback
/// </summary>
public static String CasProxyCallbackUrl
{
get
{
Initialize();
return casProxyCallbackUrl;
}
}
/// <summary>
/// Specifies whether to require CAS for requests that have null/empty content-types
/// </summary>
public static bool RequireCasForMissingContentTypes
{
get
{
Initialize();
return requireCasForMissingContentTypes;
}
}
/// <summary>
/// Content-types for which CAS authentication will be required
/// </summary>
public static string[] RequireCasForContentTypes
{
get
{
Initialize();
return requireCasForContentTypes;
}
}
/// <summary>
/// Handlers for which CAS authentication will be bypassed.
/// </summary>
public static string[] BypassCasForHandlers
{
get
{
Initialize();
return bypassCasForHandlers;
}
}
#endregion
}
} | apereo/dotnet-cas-client | DotNetCasClient/CasAuthentication.cs | C# | apache-2.0 | 71,180 |
#include <3rdparty/yaml-cpp/node/node.h>
#include "nodebuilder.h"
#include "nodeevents.h"
namespace YAML
{
Node Clone(const Node& node)
{
NodeEvents events(node);
NodeBuilder builder;
events.Emit(builder);
return builder.Root();
}
}
| izenecloud/izenelib | source/3rdparty/yaml-cpp/node.cpp | C++ | apache-2.0 | 272 |
package com.miaotu.adapter;
import android.content.Context;
import android.content.Intent;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
import com.koushikdutta.urlimageviewhelper.UrlImageViewHelper;
import com.miaotu.R;
import com.miaotu.activity.PersonCenterActivity;
import com.miaotu.model.GroupUserInfo;
import com.miaotu.util.Util;
import com.miaotu.view.CircleImageView;
import java.util.List;
/**
* Created by Jayden on 2015/5/29.
*/
public class GroupUserAdapter extends BaseAdapter{
private List<GroupUserInfo> groupInfos;
private LayoutInflater mLayoutInflater = null;
private Context mContext;
public GroupUserAdapter(Context context, List<GroupUserInfo> groupInfos){
this.groupInfos = groupInfos;
mLayoutInflater = LayoutInflater.from(context);
this.mContext = context;
}
@Override
public int getCount() {
return groupInfos == null?0:groupInfos.size();
}
@Override
public Object getItem(int i) {
return groupInfos.get(i);
}
@Override
public long getItemId(int i) {
return i;
}
@Override
public View getView(final int i, View view, ViewGroup viewGroup) {
ViewHolder holder = null;
if(view == null){
view = mLayoutInflater.inflate(R.layout.item_group_user, null);
holder = new ViewHolder();
holder.ivPhoto = (CircleImageView) view.findViewById(R.id.iv_head_photo);
holder.tvName = (TextView) view.findViewById(R.id.tv_name);
view.setTag(holder);
}else {
holder = (ViewHolder) view.getTag();
}
UrlImageViewHelper.setUrlDrawable(holder.ivPhoto, groupInfos.get(i).getHeadurl(),
R.drawable.icon_default_head_photo);
holder.tvName.setText(groupInfos.get(i).getNickname());
holder.ivPhoto.setTag(i);
holder.ivPhoto.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if(!Util.isNetworkConnected(mContext)) {
return;
}
int pos = (int) view.getTag();
Intent intent = new Intent(mContext, PersonCenterActivity.class);
intent.putExtra("uid", groupInfos.get(pos).getUid());
mContext.startActivity(intent);
}
});
return view;
}
public class ViewHolder{
CircleImageView ivPhoto;
TextView tvName;
}
}
| miaotu3/Mitotu | MiaoTu/src/main/java/com/miaotu/adapter/GroupUserAdapter.java | Java | apache-2.0 | 2,624 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
#nullable enable
using System.Collections.Generic;
namespace Microsoft.CodeAnalysis.Shared.Extensions
{
internal static class IAssemblySymbolExtensions
{
private const string AttributeSuffix = "Attribute";
public static bool ContainsNamespaceName(
this List<IAssemblySymbol> assemblies,
string namespaceName)
{
// PERF: Expansion of "assemblies.Any(a => a.NamespaceNames.Contains(namespaceName))"
// to avoid allocating a lambda.
foreach (var a in assemblies)
{
if (a.NamespaceNames.Contains(namespaceName))
{
return true;
}
}
return false;
}
public static bool ContainsTypeName(this List<IAssemblySymbol> assemblies, string typeName, bool tryWithAttributeSuffix = false)
{
if (!tryWithAttributeSuffix)
{
// PERF: Expansion of "assemblies.Any(a => a.TypeNames.Contains(typeName))"
// to avoid allocating a lambda.
foreach (var a in assemblies)
{
if (a.TypeNames.Contains(typeName))
{
return true;
}
}
}
else
{
var attributeName = typeName + AttributeSuffix;
foreach (var a in assemblies)
{
var typeNames = a.TypeNames;
if (typeNames.Contains(typeName) || typeNames.Contains(attributeName))
{
return true;
}
}
}
return false;
}
public static bool IsSameAssemblyOrHasFriendAccessTo(this IAssemblySymbol assembly, IAssemblySymbol toAssembly)
{
return
Equals(assembly, toAssembly) ||
(assembly.IsInteractive && toAssembly.IsInteractive) ||
toAssembly.GivesAccessTo(assembly);
}
}
}
| nguerrera/roslyn | src/Workspaces/Core/Portable/Shared/Extensions/IAssemblySymbolExtensions.cs | C# | apache-2.0 | 2,282 |
package com.custardsource.parfait.spring;
import java.util.Random;
@Profiled
public class DelayingBean {
private final int delay;
public DelayingBean() {
this.delay = new Random().nextInt(100);
}
@Profiled
public void doThing() {
try {
Thread.sleep(delay);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
| akshayahn/parfait | parfait-spring/src/test/java/com/custardsource/parfait/spring/DelayingBean.java | Java | apache-2.0 | 355 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-06 01:35
from __future__ import unicode_literals
from django.db import migrations, models
def load_settings(apps, schema_editor):
Setting = apps.get_model("climate_data", "Setting")
Setting(
name="receiving_data",
value="0"
).save()
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0013_setting'),
]
operations = [
migrations.RunPython(load_settings)
]
| qubs/data-centre | climate_data/migrations/0014_auto_20160906_0135.py | Python | apache-2.0 | 506 |
/*
Copyright 2012, Strategic Gains, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.strategicgains.restexpress.response;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
/**
* @author toddf
* @since May 14, 2012
*/
public class ResponseProcessorResolver
{
private Map<String, ResponseProcessor> processors = new HashMap<String, ResponseProcessor>();
private String defaultFormat;
public ResponseProcessorResolver()
{
super();
}
public ResponseProcessorResolver(Map<String, ResponseProcessor> processors, String defaultFormat)
{
super();
this.processors.putAll(processors);
this.defaultFormat = defaultFormat;
}
public ResponseProcessor put(String format, ResponseProcessor processor)
{
return processors.put(format, processor);
}
public void setDefaultFormat(String format)
{
this.defaultFormat = format;
}
public ResponseProcessor resolve(String requestFormat)
{
if (requestFormat == null || requestFormat.trim().isEmpty())
{
return getDefault();
}
return resolveViaSpecifiedFormat(requestFormat);
}
public ResponseProcessor getDefault()
{
return resolveViaSpecifiedFormat(defaultFormat);
}
private ResponseProcessor resolveViaSpecifiedFormat(String format)
{
if (format == null || format.trim().isEmpty())
{
return null;
}
return processors.get(format);
}
/**
* @return
*/
public Collection<String> getSupportedFormats()
{
return processors.keySet();
}
}
| kushalagrawal/RestExpress | src/java/com/strategicgains/restexpress/response/ResponseProcessorResolver.java | Java | apache-2.0 | 2,010 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.example.fhir.osgi;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import javax.inject.Inject;
import org.apache.camel.CamelContext;
import org.apache.camel.ServiceStatus;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ops4j.pax.exam.Configuration;
import org.ops4j.pax.exam.CoreOptions;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.PaxExam;
import org.ops4j.pax.exam.spi.reactors.ExamReactorStrategy;
import org.ops4j.pax.exam.spi.reactors.PerClass;
import org.ops4j.pax.tinybundles.core.TinyBundles;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.ops4j.pax.exam.CoreOptions.options;
import static org.ops4j.pax.exam.CoreOptions.streamBundle;
import static org.ops4j.pax.exam.CoreOptions.when;
import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.debugConfiguration;
import static org.ops4j.pax.exam.karaf.options.KarafDistributionOption.editConfigurationFilePut;
@RunWith(PaxExam.class)
@ExamReactorStrategy(PerClass.class)
public class FhirOsgiIT {
@Inject
private CamelContext context;
@Configuration
public Option[] config() throws IOException {
return options(
PaxExamOptions.KARAF.option(),
PaxExamOptions.CAMEL_FHIR.option(),
streamBundle(
TinyBundles.bundle()
.read(
Files.newInputStream(
Paths.get("target")
.resolve("camel-example-fhir-osgi.jar")))
.build()),
when(false)
.useOptions(
debugConfiguration("5005", true)),
CoreOptions.composite(editConfigurationFilePut(
"etc/org.apache.camel.example.fhir.osgi.configuration.cfg",
new File("org.apache.camel.example.fhir.osgi.configuration.cfg")))
);
}
@Test
public void testRouteStatus() {
assertNotNull(context);
assertEquals("Route status is incorrect!", ServiceStatus.Started, context.getRouteController().getRouteStatus("fhir-example-osgi"));
}
}
| kevinearls/camel | examples/camel-example-fhir-osgi/src/test/java/org/apache/camel/example/fhir/osgi/FhirOsgiIT.java | Java | apache-2.0 | 3,055 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package myservice.mynamespace.web;
import java.io.IOException;
import java.lang.Override;import java.lang.RuntimeException;import java.util.ArrayList;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import myservice.mynamespace.data.Storage;
import myservice.mynamespace.service.DemoBatchProcessor;
import myservice.mynamespace.service.DemoEdmProvider;
import myservice.mynamespace.service.DemoEntityCollectionProcessor;
import myservice.mynamespace.service.DemoEntityProcessor;
import myservice.mynamespace.service.DemoPrimitiveProcessor;
import org.apache.olingo.server.api.OData;
import org.apache.olingo.server.api.ODataHttpHandler;
import org.apache.olingo.server.api.ServiceMetadata;
import org.apache.olingo.commons.api.edmx.EdmxReference;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DemoServlet extends HttpServlet {
private static final long serialVersionUID = 1L;
private static final Logger LOG = LoggerFactory.getLogger(DemoServlet.class);
@Override
protected void service(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
try {
HttpSession session = req.getSession(true);
Storage storage = (Storage) session.getAttribute(Storage.class.getName());
if (storage == null) {
storage = new Storage();
session.setAttribute(Storage.class.getName(), storage);
}
// create odata handler and configure it with EdmProvider and Processor
OData odata = OData.newInstance();
ServiceMetadata edm = odata.createServiceMetadata(new DemoEdmProvider(), new ArrayList<EdmxReference>());
ODataHttpHandler handler = odata.createHandler(edm);
handler.register(new DemoEntityCollectionProcessor(storage));
handler.register(new DemoEntityProcessor(storage));
handler.register(new DemoPrimitiveProcessor(storage));
handler.register(new DemoBatchProcessor(storage));
// let the handler do the work
handler.process(req, resp);
} catch (RuntimeException e) {
LOG.error("Server Error occurred in ExampleServlet", e);
throw new ServletException(e);
}
}
}
| apache/olingo-odata4 | samples/tutorials/p11_batch/src/main/java/myservice/mynamespace/web/DemoServlet.java | Java | apache-2.0 | 3,114 |
package ingvar.android.processor.service;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.os.IBinder;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Future;
import java.util.concurrent.ScheduledFuture;
import ingvar.android.processor.exception.ProcessorException;
import ingvar.android.processor.observation.IObserver;
import ingvar.android.processor.observation.ScheduledObserver;
import ingvar.android.processor.persistence.Time;
import ingvar.android.processor.task.AbstractTask;
import ingvar.android.processor.task.Execution;
import ingvar.android.processor.task.ITask;
import ingvar.android.processor.task.ScheduledExecution;
import ingvar.android.processor.util.LW;
/**
* Wrapper for processing service.
* Just provide helper methods.
* Logged under DEBUG level.
*
* <br/><br/>Created by Igor Zubenko on 2015.03.19.
*/
public class Processor<S extends ProcessorService> {
public static final String TAG = Processor.class.getSimpleName();
private Class<? extends ProcessorService> serviceClass;
private Map<AbstractTask, IObserver[]> plannedTasks;
private ServiceConnection connection;
private S service;
public Processor(Class<? extends ProcessorService> serviceClass) {
this.serviceClass = serviceClass;
this.service = null;
this.connection = new Connection();
this.plannedTasks = new ConcurrentHashMap<>();
}
/**
* Send task for execution.
*
* @param task task
* @param observers task observers
* @return {@link Future} of task execution
*/
public Execution execute(AbstractTask task, IObserver... observers) {
if(service == null) {
throw new ProcessorException("Service is not bound yet!");
}
return service.execute(task, observers);
}
/**
* If service is bound execute task, otherwise add to queue.
*
* @param task task
* @param observers task observers
*/
public void planExecute(AbstractTask task, IObserver... observers) {
if(isBound()) {
execute(task, observers);
} else {
plannedTasks.put(task, observers);
LW.d(TAG, "Queued task %s", task);
}
}
/**
* Schedule task for single execution.
* If task with same key & cache class already exists it will be cancelled and their observers will be removed.
*
* @param task task
* @param delay the time from now to delay execution (millis)
* @param observers task observers
* @return {@link ScheduledFuture} of task execution
*/
public ScheduledExecution schedule(AbstractTask task, long delay, ScheduledObserver... observers) {
if(!isBound()) {
throw new ProcessorException("Service is not bound yet!");
}
return service.schedule(task, delay, observers);
}
/**
* Schedule task for multiple executions.
* If task with same key & cache class already exists it will be cancelled and their observers will be removed.
*
* @param task task
* @param initialDelay the time to delay first execution
* @param delay the delay between the termination of one execution and the commencement of the next.
* @param observers task observers
* @return {@link ScheduledFuture} of task execution
*/
public ScheduledExecution schedule(AbstractTask task, long initialDelay, long delay, ScheduledObserver... observers) {
if(!isBound()) {
throw new ProcessorException("Service is not bound yet!");
}
return service.schedule(task, initialDelay, delay, observers);
}
public void cancel(AbstractTask task) {
if(!isBound()) {
throw new ProcessorException("Service is not bound yet!");
}
service.cancel(task);
}
public ScheduledExecution getScheduled(AbstractTask task) {
if(!isBound()) {
throw new ProcessorException("Service is not bound yet!");
}
return service.getScheduled(task);
}
/**
* Remove registered observers from task.
*
* @param task task
*/
public void removeObservers(ITask task) {
service.getObserverManager().remove(task);
}
/**
* Obtain task result from cache.
*
* @param key result identifier
* @param dataClass single result item class
* @param expiryTime how much time data consider valid in the repository
* @param <R> returned result class
* @return cached result if exists and did not expired, null otherwise
*/
public <R> R obtainFromCache(Object key, Class dataClass, long expiryTime) {
return service.getCacheManager().obtain(key, dataClass, expiryTime);
}
/**
* Obtain task result from cache if exists.
*
* @param key result identifier
* @param dataClass single result item class
* @param <R> returned result class
* @return cached result if exists, null otherwise
*/
public <R> R obtainFromCache(Object key, Class dataClass) {
return obtainFromCache(key, dataClass, Time.ALWAYS_RETURNED);
}
public void removeFromCache(Object key, Class dataClass) {
service.getCacheManager().remove(key, dataClass);
}
/**
* Remove all data by class.
*
* @param dataClass data class
*/
public void clearCache(Class dataClass) {
service.getCacheManager().remove(dataClass);
}
/**
* Remove all data from cache.
*/
public void clearCache() {
service.clearCache();
}
/**
* Bind service to context.
*
* @param context context
*/
public void bind(Context context) {
LW.d(TAG, "Bind service '%s' to context '%s'", serviceClass.getSimpleName(), context.getClass().getSimpleName());
Intent intent = new Intent(context, serviceClass);
context.startService(intent); //keep service alive after context unbound.
if(!context.bindService(intent, connection, Context.BIND_AUTO_CREATE)) {
throw new ProcessorException("Connection is not made. Maybe you forgot add your service to AndroidManifest.xml?");
}
}
/**
* Unbind service from context.
* Remove all planned tasks if exist.
*
* @param context
*/
public void unbind(Context context) {
LW.d(TAG, "Unbind service '%s' from context '%s'", serviceClass.getSimpleName(), context.getClass().getSimpleName());
if(service != null) {
service.removeObservers(context);
}
context.unbindService(connection);
plannedTasks.clear();
service = null;
}
/**
* Check bound service or not.
*
* @return true if bound, false otherwise
*/
public boolean isBound() {
return service != null;
}
/**
* Get service.
*
* @return service or null if not bound
*/
public S getService() {
return service;
}
private class Connection implements ServiceConnection {
@Override
@SuppressWarnings("unchecked")
public void onServiceConnected(ComponentName name, IBinder service) {
LW.d(TAG, "Service '%s' connected.", name);
Processor.this.service = (S) ((ProcessorService.ProcessorBinder) service).getService();
if(plannedTasks.size() > 0) {
LW.d(TAG, "Execute planned %d tasks.", plannedTasks.size());
for (Map.Entry<AbstractTask, IObserver[]> entry : plannedTasks.entrySet()) {
Processor.this.service.execute(entry.getKey(), entry.getValue());
}
plannedTasks.clear();
}
}
@Override
public void onServiceDisconnected(ComponentName name) {
LW.d(TAG, "Service '%s' disconnected.", name);
plannedTasks.clear();
Processor.this.service = null;
}
}
}
| orwir/processor | core/src/main/java/ingvar/android/processor/service/Processor.java | Java | apache-2.0 | 8,123 |
/*******************************************************************************
* Copyright (c) quickfixengine.org All rights reserved.
*
* This file is part of the QuickFIX FIX Engine
*
* This file may be distributed under the terms of the quickfixengine.org
* license as defined by quickfixengine.org and appearing in the file
* LICENSE included in the packaging of this file.
*
* This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING
* THE WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE.
*
* See http://www.quickfixengine.org/LICENSE for licensing information.
*
* Contact ask@quickfixengine.org if any conditions of this licensing
* are not clear to you.
******************************************************************************/
package quickfix.field;
import quickfix.DecimalField;
public class OfferSwapPoints extends DecimalField
{
static final long serialVersionUID = 20050617;
public static final int FIELD = 1066;
public OfferSwapPoints()
{
super(1066);
}
public OfferSwapPoints(java.math.BigDecimal data)
{
super(1066, data);
}
public OfferSwapPoints(double data)
{
super(1066, new java.math.BigDecimal(data));
}
}
| Forexware/quickfixj | src/main/java/quickfix/field/OfferSwapPoints.java | Java | apache-2.0 | 1,279 |
// Copyright 2017 The Prometheus Authors
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package index
import (
"container/heap"
"encoding/binary"
"runtime"
"sort"
"strings"
"sync"
"github.com/prometheus/prometheus/pkg/labels"
)
var allPostingsKey = labels.Label{}
// AllPostingsKey returns the label key that is used to store the postings list of all existing IDs.
func AllPostingsKey() (name, value string) {
return allPostingsKey.Name, allPostingsKey.Value
}
// MemPostings holds postings list for series ID per label pair. They may be written
// to out of order.
// ensureOrder() must be called once before any reads are done. This allows for quick
// unordered batch fills on startup.
type MemPostings struct {
mtx sync.RWMutex
m map[string]map[string][]uint64
ordered bool
}
// NewMemPostings returns a memPostings that's ready for reads and writes.
func NewMemPostings() *MemPostings {
return &MemPostings{
m: make(map[string]map[string][]uint64, 512),
ordered: true,
}
}
// NewUnorderedMemPostings returns a memPostings that is not safe to be read from
// until ensureOrder was called once.
func NewUnorderedMemPostings() *MemPostings {
return &MemPostings{
m: make(map[string]map[string][]uint64, 512),
ordered: false,
}
}
// SortedKeys returns a list of sorted label keys of the postings.
func (p *MemPostings) SortedKeys() []labels.Label {
p.mtx.RLock()
keys := make([]labels.Label, 0, len(p.m))
for n, e := range p.m {
for v := range e {
keys = append(keys, labels.Label{Name: n, Value: v})
}
}
p.mtx.RUnlock()
sort.Slice(keys, func(i, j int) bool {
if d := strings.Compare(keys[i].Name, keys[j].Name); d != 0 {
return d < 0
}
return keys[i].Value < keys[j].Value
})
return keys
}
// LabelNames returns all the unique label names.
func (p *MemPostings) LabelNames() []string {
p.mtx.RLock()
defer p.mtx.RUnlock()
n := len(p.m)
if n == 0 {
return nil
}
names := make([]string, 0, n-1)
for name := range p.m {
if name != allPostingsKey.Name {
names = append(names, name)
}
}
return names
}
// LabelValues returns label values for the given name.
func (p *MemPostings) LabelValues(name string) []string {
p.mtx.RLock()
defer p.mtx.RUnlock()
values := make([]string, 0, len(p.m[name]))
for v := range p.m[name] {
values = append(values, v)
}
return values
}
// PostingsStats contains cardinality based statistics for postings.
type PostingsStats struct {
CardinalityMetricsStats []Stat
CardinalityLabelStats []Stat
LabelValueStats []Stat
LabelValuePairsStats []Stat
NumLabelPairs int
}
// Stats calculates the cardinality statistics from postings.
func (p *MemPostings) Stats(label string) *PostingsStats {
const maxNumOfRecords = 10
var size uint64
p.mtx.RLock()
metrics := &maxHeap{}
labels := &maxHeap{}
labelValueLength := &maxHeap{}
labelValuePairs := &maxHeap{}
numLabelPairs := 0
metrics.init(maxNumOfRecords)
labels.init(maxNumOfRecords)
labelValueLength.init(maxNumOfRecords)
labelValuePairs.init(maxNumOfRecords)
for n, e := range p.m {
if n == "" {
continue
}
labels.push(Stat{Name: n, Count: uint64(len(e))})
numLabelPairs += len(e)
size = 0
for name, values := range e {
if n == label {
metrics.push(Stat{Name: name, Count: uint64(len(values))})
}
labelValuePairs.push(Stat{Name: n + "=" + name, Count: uint64(len(values))})
size += uint64(len(name))
}
labelValueLength.push(Stat{Name: n, Count: size})
}
p.mtx.RUnlock()
return &PostingsStats{
CardinalityMetricsStats: metrics.get(),
CardinalityLabelStats: labels.get(),
LabelValueStats: labelValueLength.get(),
LabelValuePairsStats: labelValuePairs.get(),
NumLabelPairs: numLabelPairs,
}
}
// Get returns a postings list for the given label pair.
func (p *MemPostings) Get(name, value string) Postings {
var lp []uint64
p.mtx.RLock()
l := p.m[name]
if l != nil {
lp = l[value]
}
p.mtx.RUnlock()
if lp == nil {
return EmptyPostings()
}
return newListPostings(lp...)
}
// All returns a postings list over all documents ever added.
func (p *MemPostings) All() Postings {
return p.Get(AllPostingsKey())
}
// EnsureOrder ensures that all postings lists are sorted. After it returns all further
// calls to add and addFor will insert new IDs in a sorted manner.
func (p *MemPostings) EnsureOrder() {
p.mtx.Lock()
defer p.mtx.Unlock()
if p.ordered {
return
}
n := runtime.GOMAXPROCS(0)
workc := make(chan []uint64)
var wg sync.WaitGroup
wg.Add(n)
for i := 0; i < n; i++ {
go func() {
for l := range workc {
sort.Slice(l, func(a, b int) bool { return l[a] < l[b] })
}
wg.Done()
}()
}
for _, e := range p.m {
for _, l := range e {
workc <- l
}
}
close(workc)
wg.Wait()
p.ordered = true
}
// Delete removes all ids in the given map from the postings lists.
func (p *MemPostings) Delete(deleted map[uint64]struct{}) {
var keys, vals []string
// Collect all keys relevant for deletion once. New keys added afterwards
// can by definition not be affected by any of the given deletes.
p.mtx.RLock()
for n := range p.m {
keys = append(keys, n)
}
p.mtx.RUnlock()
for _, n := range keys {
p.mtx.RLock()
vals = vals[:0]
for v := range p.m[n] {
vals = append(vals, v)
}
p.mtx.RUnlock()
// For each posting we first analyse whether the postings list is affected by the deletes.
// If yes, we actually reallocate a new postings list.
for _, l := range vals {
// Only lock for processing one postings list so we don't block reads for too long.
p.mtx.Lock()
found := false
for _, id := range p.m[n][l] {
if _, ok := deleted[id]; ok {
found = true
break
}
}
if !found {
p.mtx.Unlock()
continue
}
repl := make([]uint64, 0, len(p.m[n][l]))
for _, id := range p.m[n][l] {
if _, ok := deleted[id]; !ok {
repl = append(repl, id)
}
}
if len(repl) > 0 {
p.m[n][l] = repl
} else {
delete(p.m[n], l)
}
p.mtx.Unlock()
}
p.mtx.Lock()
if len(p.m[n]) == 0 {
delete(p.m, n)
}
p.mtx.Unlock()
}
}
// Iter calls f for each postings list. It aborts if f returns an error and returns it.
func (p *MemPostings) Iter(f func(labels.Label, Postings) error) error {
p.mtx.RLock()
defer p.mtx.RUnlock()
for n, e := range p.m {
for v, p := range e {
if err := f(labels.Label{Name: n, Value: v}, newListPostings(p...)); err != nil {
return err
}
}
}
return nil
}
// Add a label set to the postings index.
func (p *MemPostings) Add(id uint64, lset labels.Labels) {
p.mtx.Lock()
for _, l := range lset {
p.addFor(id, l)
}
p.addFor(id, allPostingsKey)
p.mtx.Unlock()
}
func (p *MemPostings) addFor(id uint64, l labels.Label) {
nm, ok := p.m[l.Name]
if !ok {
nm = map[string][]uint64{}
p.m[l.Name] = nm
}
list := append(nm[l.Value], id)
nm[l.Value] = list
if !p.ordered {
return
}
// There is no guarantee that no higher ID was inserted before as they may
// be generated independently before adding them to postings.
// We repair order violations on insert. The invariant is that the first n-1
// items in the list are already sorted.
for i := len(list) - 1; i >= 1; i-- {
if list[i] >= list[i-1] {
break
}
list[i], list[i-1] = list[i-1], list[i]
}
}
// ExpandPostings returns the postings expanded as a slice.
func ExpandPostings(p Postings) (res []uint64, err error) {
for p.Next() {
res = append(res, p.At())
}
return res, p.Err()
}
// Postings provides iterative access over a postings list.
type Postings interface {
// Next advances the iterator and returns true if another value was found.
Next() bool
// Seek advances the iterator to value v or greater and returns
// true if a value was found.
Seek(v uint64) bool
// At returns the value at the current iterator position.
At() uint64
// Err returns the last error of the iterator.
Err() error
}
// errPostings is an empty iterator that always errors.
type errPostings struct {
err error
}
func (e errPostings) Next() bool { return false }
func (e errPostings) Seek(uint64) bool { return false }
func (e errPostings) At() uint64 { return 0 }
func (e errPostings) Err() error { return e.err }
var emptyPostings = errPostings{}
// EmptyPostings returns a postings list that's always empty.
// NOTE: Returning EmptyPostings sentinel when index.Postings struct has no postings is recommended.
// It triggers optimized flow in other functions like Intersect, Without etc.
func EmptyPostings() Postings {
return emptyPostings
}
// ErrPostings returns new postings that immediately error.
func ErrPostings(err error) Postings {
return errPostings{err}
}
// Intersect returns a new postings list over the intersection of the
// input postings.
func Intersect(its ...Postings) Postings {
if len(its) == 0 {
return EmptyPostings()
}
if len(its) == 1 {
return its[0]
}
for _, p := range its {
if p == EmptyPostings() {
return EmptyPostings()
}
}
return newIntersectPostings(its...)
}
type intersectPostings struct {
arr []Postings
cur uint64
}
func newIntersectPostings(its ...Postings) *intersectPostings {
return &intersectPostings{arr: its}
}
func (it *intersectPostings) At() uint64 {
return it.cur
}
func (it *intersectPostings) doNext() bool {
Loop:
for {
for _, p := range it.arr {
if !p.Seek(it.cur) {
return false
}
if p.At() > it.cur {
it.cur = p.At()
continue Loop
}
}
return true
}
}
func (it *intersectPostings) Next() bool {
for _, p := range it.arr {
if !p.Next() {
return false
}
if p.At() > it.cur {
it.cur = p.At()
}
}
return it.doNext()
}
func (it *intersectPostings) Seek(id uint64) bool {
it.cur = id
return it.doNext()
}
func (it *intersectPostings) Err() error {
for _, p := range it.arr {
if p.Err() != nil {
return p.Err()
}
}
return nil
}
// Merge returns a new iterator over the union of the input iterators.
func Merge(its ...Postings) Postings {
if len(its) == 0 {
return EmptyPostings()
}
if len(its) == 1 {
return its[0]
}
p, ok := newMergedPostings(its)
if !ok {
return EmptyPostings()
}
return p
}
type postingsHeap []Postings
func (h postingsHeap) Len() int { return len(h) }
func (h postingsHeap) Less(i, j int) bool { return h[i].At() < h[j].At() }
func (h *postingsHeap) Swap(i, j int) { (*h)[i], (*h)[j] = (*h)[j], (*h)[i] }
func (h *postingsHeap) Push(x interface{}) {
*h = append(*h, x.(Postings))
}
func (h *postingsHeap) Pop() interface{} {
old := *h
n := len(old)
x := old[n-1]
*h = old[0 : n-1]
return x
}
type mergedPostings struct {
h postingsHeap
initialized bool
cur uint64
err error
}
func newMergedPostings(p []Postings) (m *mergedPostings, nonEmpty bool) {
ph := make(postingsHeap, 0, len(p))
for _, it := range p {
// NOTE: mergedPostings struct requires the user to issue an initial Next.
if it.Next() {
ph = append(ph, it)
} else {
if it.Err() != nil {
return &mergedPostings{err: it.Err()}, true
}
}
}
if len(ph) == 0 {
return nil, false
}
return &mergedPostings{h: ph}, true
}
func (it *mergedPostings) Next() bool {
if it.h.Len() == 0 || it.err != nil {
return false
}
// The user must issue an initial Next.
if !it.initialized {
heap.Init(&it.h)
it.cur = it.h[0].At()
it.initialized = true
return true
}
for {
cur := it.h[0]
if !cur.Next() {
heap.Pop(&it.h)
if cur.Err() != nil {
it.err = cur.Err()
return false
}
if it.h.Len() == 0 {
return false
}
} else {
// Value of top of heap has changed, re-heapify.
heap.Fix(&it.h, 0)
}
if it.h[0].At() != it.cur {
it.cur = it.h[0].At()
return true
}
}
}
func (it *mergedPostings) Seek(id uint64) bool {
if it.h.Len() == 0 || it.err != nil {
return false
}
if !it.initialized {
if !it.Next() {
return false
}
}
for it.cur < id {
cur := it.h[0]
if !cur.Seek(id) {
heap.Pop(&it.h)
if cur.Err() != nil {
it.err = cur.Err()
return false
}
if it.h.Len() == 0 {
return false
}
} else {
// Value of top of heap has changed, re-heapify.
heap.Fix(&it.h, 0)
}
it.cur = it.h[0].At()
}
return true
}
func (it mergedPostings) At() uint64 {
return it.cur
}
func (it mergedPostings) Err() error {
return it.err
}
// Without returns a new postings list that contains all elements from the full list that
// are not in the drop list.
func Without(full, drop Postings) Postings {
if full == EmptyPostings() {
return EmptyPostings()
}
if drop == EmptyPostings() {
return full
}
return newRemovedPostings(full, drop)
}
type removedPostings struct {
full, remove Postings
cur uint64
initialized bool
fok, rok bool
}
func newRemovedPostings(full, remove Postings) *removedPostings {
return &removedPostings{
full: full,
remove: remove,
}
}
func (rp *removedPostings) At() uint64 {
return rp.cur
}
func (rp *removedPostings) Next() bool {
if !rp.initialized {
rp.fok = rp.full.Next()
rp.rok = rp.remove.Next()
rp.initialized = true
}
for {
if !rp.fok {
return false
}
if !rp.rok {
rp.cur = rp.full.At()
rp.fok = rp.full.Next()
return true
}
fcur, rcur := rp.full.At(), rp.remove.At()
if fcur < rcur {
rp.cur = fcur
rp.fok = rp.full.Next()
return true
} else if rcur < fcur {
// Forward the remove postings to the right position.
rp.rok = rp.remove.Seek(fcur)
} else {
// Skip the current posting.
rp.fok = rp.full.Next()
}
}
}
func (rp *removedPostings) Seek(id uint64) bool {
if rp.cur >= id {
return true
}
rp.fok = rp.full.Seek(id)
rp.rok = rp.remove.Seek(id)
rp.initialized = true
return rp.Next()
}
func (rp *removedPostings) Err() error {
if rp.full.Err() != nil {
return rp.full.Err()
}
return rp.remove.Err()
}
// ListPostings implements the Postings interface over a plain list.
type ListPostings struct {
list []uint64
cur uint64
}
func NewListPostings(list []uint64) Postings {
return newListPostings(list...)
}
func newListPostings(list ...uint64) *ListPostings {
return &ListPostings{list: list}
}
func (it *ListPostings) At() uint64 {
return it.cur
}
func (it *ListPostings) Next() bool {
if len(it.list) > 0 {
it.cur = it.list[0]
it.list = it.list[1:]
return true
}
it.cur = 0
return false
}
func (it *ListPostings) Seek(x uint64) bool {
// If the current value satisfies, then return.
if it.cur >= x {
return true
}
if len(it.list) == 0 {
return false
}
// Do binary search between current position and end.
i := sort.Search(len(it.list), func(i int) bool {
return it.list[i] >= x
})
if i < len(it.list) {
it.cur = it.list[i]
it.list = it.list[i+1:]
return true
}
it.list = nil
return false
}
func (it *ListPostings) Err() error {
return nil
}
// bigEndianPostings implements the Postings interface over a byte stream of
// big endian numbers.
type bigEndianPostings struct {
list []byte
cur uint32
}
func newBigEndianPostings(list []byte) *bigEndianPostings {
return &bigEndianPostings{list: list}
}
func (it *bigEndianPostings) At() uint64 {
return uint64(it.cur)
}
func (it *bigEndianPostings) Next() bool {
if len(it.list) >= 4 {
it.cur = binary.BigEndian.Uint32(it.list)
it.list = it.list[4:]
return true
}
return false
}
func (it *bigEndianPostings) Seek(x uint64) bool {
if uint64(it.cur) >= x {
return true
}
num := len(it.list) / 4
// Do binary search between current position and end.
i := sort.Search(num, func(i int) bool {
return binary.BigEndian.Uint32(it.list[i*4:]) >= uint32(x)
})
if i < num {
j := i * 4
it.cur = binary.BigEndian.Uint32(it.list[j:])
it.list = it.list[j+4:]
return true
}
it.list = nil
return false
}
func (it *bigEndianPostings) Err() error {
return nil
}
| GoogleCloudPlatform/prometheus-engine | vendor/github.com/prometheus/prometheus/tsdb/index/postings.go | GO | apache-2.0 | 16,432 |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
namespace Restful.Collections.Generic
{
/// <summary>
/// 表示键值对的线程安全泛型集合
/// </summary>
/// <typeparam name="TKey">键类型</typeparam>
/// <typeparam name="TValue">值类型</typeparam>
[Serializable]
public class ThreadSafeDictionary<TKey, TValue> : IDictionary<TKey, TValue>
{
private object syncRoot;
private readonly IDictionary<TKey, TValue> dictionary;
public ThreadSafeDictionary()
{
this.dictionary = new Dictionary<TKey, TValue>();
}
public ThreadSafeDictionary( IDictionary<TKey, TValue> dictionary )
{
this.dictionary = dictionary;
}
public object SyncRoot
{
get
{
if( syncRoot == null )
{
Interlocked.CompareExchange( ref syncRoot, new object(), null );
}
return syncRoot;
}
}
#region IDictionary<TKey,TValue> Members
public bool ContainsKey( TKey key )
{
return dictionary.ContainsKey( key );
}
public void Add( TKey key, TValue value )
{
lock( SyncRoot )
{
dictionary.Add( key, value );
}
}
public bool Remove( TKey key )
{
lock( SyncRoot )
{
return dictionary.Remove( key );
}
}
public bool TryGetValue( TKey key, out TValue value )
{
return dictionary.TryGetValue( key, out value );
}
public TValue this[TKey key]
{
get
{
return dictionary[key];
}
set
{
lock( SyncRoot )
{
dictionary[key] = value;
}
}
}
public ICollection<TKey> Keys
{
get
{
lock( SyncRoot )
{
return dictionary.Keys;
}
}
}
public ICollection<TValue> Values
{
get
{
lock( SyncRoot )
{
return dictionary.Values;
}
}
}
#endregion
#region ICollection<KeyValuePair<TKey,TValue>> Members
public void Add( KeyValuePair<TKey, TValue> item )
{
lock( SyncRoot )
{
dictionary.Add( item );
}
}
public void Clear()
{
lock( SyncRoot )
{
dictionary.Clear();
}
}
public bool Contains( KeyValuePair<TKey, TValue> item )
{
return dictionary.Contains( item );
}
public void CopyTo( KeyValuePair<TKey, TValue>[] array, int arrayIndex )
{
lock( SyncRoot )
{
dictionary.CopyTo( array, arrayIndex );
}
}
public bool Remove( KeyValuePair<TKey, TValue> item )
{
lock( SyncRoot )
{
return dictionary.Remove( item );
}
}
public int Count
{
get { return dictionary.Count; }
}
public bool IsReadOnly
{
get { return false; }
}
#endregion
#region IEnumerable<KeyValuePair<TKey,TValue>> Members
IEnumerator<KeyValuePair<TKey, TValue>> IEnumerable<KeyValuePair<TKey, TValue>>.GetEnumerator()
{
lock( SyncRoot )
{
KeyValuePair<TKey, TValue>[] pairArray = new KeyValuePair<TKey, TValue>[dictionary.Count];
this.dictionary.CopyTo( pairArray, 0 );
return Array.AsReadOnly( pairArray ).GetEnumerator();
}
}
#endregion
#region IEnumerable Members
public IEnumerator GetEnumerator()
{
return ( (IEnumerable<KeyValuePair<TKey, TValue>>)this ).GetEnumerator();
}
#endregion
}
}
| chinadev/Restful | src/Restful/Restful/Collections/Generic/ThreadSafeDictionary.cs | C# | apache-2.0 | 4,347 |
/*
* Copyright (C) 2014 Pedro Vicente Gómez Sánchez.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pedrogomez.renderers.sample.ui.renderers;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.TextView;
import butterknife.Bind;
import butterknife.ButterKnife;
import com.pedrogomez.renderers.sample.R;
import com.pedrogomez.renderers.sample.model.Video;
import java.util.Date;
/**
* VideoRenderer created to contains the live video presentation logic. This VideoRenderer subtype
* change the inflated layout and override the renderer algorithm to add a new phase to render the
* date.
*
* @author Pedro Vicente Gómez Sánchez.
*/
public class LiveVideoRenderer extends VideoRenderer {
@Bind(R.id.date) TextView date;
@Override protected View inflate(LayoutInflater inflater, ViewGroup parent) {
View inflatedView = inflater.inflate(R.layout.live_video_renderer, parent, false);
ButterKnife.bind(this, inflatedView);
return inflatedView;
}
@Override protected void setUpView(View rootView) {
/*
* Empty implementation substituted with the usage of ButterKnife library by Jake Wharton.
*/
}
@Override protected void renderLabel() {
getLabel().setText(getContext().getString(R.string.live_label));
}
@Override protected void renderMarker(Video video) {
getMarker().setVisibility(View.GONE);
}
@Override public void render() {
super.render();
renderDate();
}
private void renderDate() {
String now = new Date().toLocaleString();
date.setText(now);
}
}
| manolovn/Renderers | sample/src/main/java/com/pedrogomez/renderers/sample/ui/renderers/LiveVideoRenderer.java | Java | apache-2.0 | 2,132 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.execute;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.logging.log4j.Logger;
import org.apache.geode.InternalGemFireException;
import org.apache.geode.SystemFailure;
import org.apache.geode.cache.LowMemoryException;
import org.apache.geode.cache.TransactionException;
import org.apache.geode.cache.client.internal.ProxyCache;
import org.apache.geode.cache.execute.Function;
import org.apache.geode.cache.execute.FunctionContext;
import org.apache.geode.cache.execute.FunctionException;
import org.apache.geode.cache.execute.FunctionInvocationTargetException;
import org.apache.geode.cache.execute.FunctionService;
import org.apache.geode.cache.execute.ResultCollector;
import org.apache.geode.cache.execute.ResultSender;
import org.apache.geode.cache.query.QueryInvalidException;
import org.apache.geode.distributed.internal.DM;
import org.apache.geode.distributed.internal.DistributionManager;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.tier.sockets.ServerConnection;
import org.apache.geode.internal.i18n.LocalizedStrings;
import org.apache.geode.internal.logging.LogService;
import org.apache.geode.internal.logging.log4j.LocalizedMessage;
/**
* Abstract implementation of InternalExecution interface.
*
* @since GemFire 5.8LA
*
*/
public abstract class AbstractExecution implements InternalExecution {
private static final Logger logger = LogService.getLogger();
protected boolean isMemberMappedArgument;
protected MemberMappedArgument memberMappedArg;
protected Object args;
protected ResultCollector rc;
protected Set filter = new HashSet();
protected boolean hasRoutingObjects;
protected volatile boolean isReExecute = false;
protected volatile boolean isClientServerMode = false;
protected Set<String> failedNodes = new HashSet<String>();
protected boolean isFnSerializationReqd;
/***
* yjing The following code is added to get a set of function executing nodes by the data aware
* procedure
*/
protected Collection<InternalDistributedMember> executionNodes = null;
public static interface ExecutionNodesListener {
public void afterExecutionNodesSet(AbstractExecution execution);
public void reset();
}
protected ExecutionNodesListener executionNodesListener = null;
protected boolean waitOnException = false;
protected boolean forwardExceptions = false;
protected boolean ignoreDepartedMembers = false;
protected ProxyCache proxyCache;
private final static ConcurrentHashMap<String, byte[]> idToFunctionAttributes =
new ConcurrentHashMap<String, byte[]>();
public static final byte NO_HA_NO_HASRESULT_NO_OPTIMIZEFORWRITE = 0;
public static final byte NO_HA_HASRESULT_NO_OPTIMIZEFORWRITE = 2;
public static final byte HA_HASRESULT_NO_OPTIMIZEFORWRITE = 3;
public static final byte NO_HA_NO_HASRESULT_OPTIMIZEFORWRITE = 4;
public static final byte NO_HA_HASRESULT_OPTIMIZEFORWRITE = 6;
public static final byte HA_HASRESULT_OPTIMIZEFORWRITE = 7;
public static final byte HA_HASRESULT_NO_OPTIMIZEFORWRITE_REEXECUTE = 11;
public static final byte HA_HASRESULT_OPTIMIZEFORWRITE_REEXECUTE = 15;
public static byte getFunctionState(boolean isHA, boolean hasResult, boolean optimizeForWrite) {
if (isHA) {
if (hasResult) {
if (optimizeForWrite) {
return HA_HASRESULT_OPTIMIZEFORWRITE;
} else {
return HA_HASRESULT_NO_OPTIMIZEFORWRITE;
}
}
return (byte) 1; // ERROR scenario
} else {
if (hasResult) {
if (optimizeForWrite) {
return NO_HA_HASRESULT_OPTIMIZEFORWRITE;
} else {
return NO_HA_HASRESULT_NO_OPTIMIZEFORWRITE;
}
} else {
if (optimizeForWrite) {
return NO_HA_NO_HASRESULT_OPTIMIZEFORWRITE;
} else {
return NO_HA_NO_HASRESULT_NO_OPTIMIZEFORWRITE;
}
}
}
}
public static byte getReexecuteFunctionState(byte fnState) {
if (fnState == HA_HASRESULT_NO_OPTIMIZEFORWRITE) {
return HA_HASRESULT_NO_OPTIMIZEFORWRITE_REEXECUTE;
} else if (fnState == HA_HASRESULT_OPTIMIZEFORWRITE) {
return HA_HASRESULT_OPTIMIZEFORWRITE_REEXECUTE;
}
throw new InternalGemFireException("Wrong fnState provided.");
}
protected AbstractExecution() {}
protected AbstractExecution(AbstractExecution ae) {
if (ae.args != null) {
this.args = ae.args;
}
if (ae.rc != null) {
this.rc = ae.rc;
}
if (ae.memberMappedArg != null) {
this.memberMappedArg = ae.memberMappedArg;
}
this.isMemberMappedArgument = ae.isMemberMappedArgument;
this.isClientServerMode = ae.isClientServerMode;
if (ae.proxyCache != null) {
this.proxyCache = ae.proxyCache;
}
this.isFnSerializationReqd = ae.isFnSerializationReqd;
}
protected AbstractExecution(AbstractExecution ae, boolean isReExecute) {
this(ae);
this.isReExecute = isReExecute;
}
public boolean isMemberMappedArgument() {
return this.isMemberMappedArgument;
}
public Object getArgumentsForMember(String memberId) {
if (!isMemberMappedArgument) {
return this.args;
} else {
return this.memberMappedArg.getArgumentsForMember(memberId);
}
}
public MemberMappedArgument getMemberMappedArgument() {
return this.memberMappedArg;
}
public Object getArguments() {
return this.args;
}
public ResultCollector getResultCollector() {
return this.rc;
}
public Set getFilter() {
return this.filter;
}
public AbstractExecution setIsReExecute() {
this.isReExecute = true;
if (this.executionNodesListener != null) {
this.executionNodesListener.reset();
}
return this;
}
public boolean isReExecute() {
return isReExecute;
}
public Set<String> getFailedNodes() {
return this.failedNodes;
}
public void addFailedNode(String failedNode) {
this.failedNodes.add(failedNode);
}
public void clearFailedNodes() {
this.failedNodes.clear();
}
public boolean isClientServerMode() {
return isClientServerMode;
}
public boolean isFnSerializationReqd() {
return isFnSerializationReqd;
}
public Collection<InternalDistributedMember> getExecutionNodes() {
return this.executionNodes;
}
public void setRequireExecutionNodes(ExecutionNodesListener listener) {
this.executionNodes = Collections.emptySet();
this.executionNodesListener = listener;
}
public void setExecutionNodes(Set<InternalDistributedMember> nodes) {
if (this.executionNodes != null) {
this.executionNodes = nodes;
if (this.executionNodesListener != null) {
this.executionNodesListener.afterExecutionNodesSet(this);
}
}
}
public void executeFunctionOnLocalPRNode(final Function fn, final FunctionContext cx,
final PartitionedRegionFunctionResultSender sender, DM dm, boolean isTx) {
if (dm instanceof DistributionManager && !isTx) {
if (ServerConnection.isExecuteFunctionOnLocalNodeOnly().byteValue() == 1) {
ServerConnection.executeFunctionOnLocalNodeOnly((byte) 3);// executed locally
executeFunctionLocally(fn, cx, sender, dm);
if (!sender.isLastResultReceived() && fn.hasResult()) {
((InternalResultSender) sender).setException(new FunctionException(
LocalizedStrings.ExecuteFunction_THE_FUNCTION_0_DID_NOT_SENT_LAST_RESULT
.toString(fn.getId())));
}
} else {
final DistributionManager newDM = (DistributionManager) dm;
newDM.getFunctionExcecutor().execute(new Runnable() {
public void run() {
executeFunctionLocally(fn, cx, sender, newDM);
if (!sender.isLastResultReceived() && fn.hasResult()) {
((InternalResultSender) sender).setException(new FunctionException(
LocalizedStrings.ExecuteFunction_THE_FUNCTION_0_DID_NOT_SENT_LAST_RESULT
.toString(fn.getId())));
}
}
});
}
} else {
executeFunctionLocally(fn, cx, sender, dm);
if (!sender.isLastResultReceived() && fn.hasResult()) {
((InternalResultSender) sender).setException(new FunctionException(
LocalizedStrings.ExecuteFunction_THE_FUNCTION_0_DID_NOT_SENT_LAST_RESULT
.toString(fn.getId())));
}
}
}
// Bug41118 : in case of lonerDistribuedSystem do local execution through
// main thread otherwise give execution to FunctionExecutor from
// DistributionManager
public void executeFunctionOnLocalNode(final Function<?> fn, final FunctionContext cx,
final ResultSender sender, DM dm, final boolean isTx) {
if (dm instanceof DistributionManager && !isTx) {
final DistributionManager newDM = (DistributionManager) dm;
newDM.getFunctionExcecutor().execute(new Runnable() {
public void run() {
executeFunctionLocally(fn, cx, sender, newDM);
if (!((InternalResultSender) sender).isLastResultReceived() && fn.hasResult()) {
((InternalResultSender) sender).setException(new FunctionException(
LocalizedStrings.ExecuteFunction_THE_FUNCTION_0_DID_NOT_SENT_LAST_RESULT
.toString(fn.getId())));
}
}
});
} else {
executeFunctionLocally(fn, cx, sender, dm);
if (!((InternalResultSender) sender).isLastResultReceived() && fn.hasResult()) {
((InternalResultSender) sender).setException(new FunctionException(
LocalizedStrings.ExecuteFunction_THE_FUNCTION_0_DID_NOT_SENT_LAST_RESULT
.toString(fn.getId())));
}
}
}
public void executeFunctionLocally(final Function<?> fn, final FunctionContext cx,
final ResultSender sender, DM dm) {
FunctionStats stats = FunctionStats.getFunctionStats(fn.getId(), dm.getSystem());
try {
long start = stats.startTime();
stats.startFunctionExecution(fn.hasResult());
if (logger.isDebugEnabled()) {
logger.debug("Executing Function: {} on local node with context: {}", fn.getId(),
cx.toString());
}
fn.execute(cx);
stats.endFunctionExecution(start, fn.hasResult());
} catch (FunctionInvocationTargetException fite) {
FunctionException functionException = null;
if (fn.isHA()) {
functionException =
new FunctionException(new InternalFunctionInvocationTargetException(fite.getMessage()));
} else {
functionException = new FunctionException(fite);
}
handleException(functionException, fn, cx, sender, dm);
} catch (BucketMovedException bme) {
FunctionException functionException = null;
if (fn.isHA()) {
functionException =
new FunctionException(new InternalFunctionInvocationTargetException(bme));
} else {
functionException = new FunctionException(bme);
}
handleException(functionException, fn, cx, sender, dm);
} catch (VirtualMachineError e) {
SystemFailure.initiateFailure(e);
throw e;
} catch (Throwable t) {
SystemFailure.checkFailure();
handleException(t, fn, cx, sender, dm);
}
}
public ResultCollector execute(final String functionName) {
if (functionName == null) {
throw new FunctionException(
LocalizedStrings.ExecuteFunction_THE_INPUT_FUNCTION_FOR_THE_EXECUTE_FUNCTION_REQUEST_IS_NULL
.toLocalizedString());
}
this.isFnSerializationReqd = false;
Function functionObject = FunctionService.getFunction(functionName);
if (functionObject == null) {
throw new FunctionException(
LocalizedStrings.ExecuteFunction_FUNCTION_NAMED_0_IS_NOT_REGISTERED
.toLocalizedString(functionName));
}
return executeFunction(functionObject);
}
public ResultCollector execute(Function function) throws FunctionException {
if (function == null) {
throw new FunctionException(
LocalizedStrings.ExecuteFunction_THE_INPUT_FUNCTION_FOR_THE_EXECUTE_FUNCTION_REQUEST_IS_NULL
.toLocalizedString());
}
if (function.isHA() && !function.hasResult()) {
throw new FunctionException(
LocalizedStrings.FunctionService_FUNCTION_ATTRIBUTE_MISMATCH.toLocalizedString());
}
String id = function.getId();
if (id == null) {
throw new IllegalArgumentException(
LocalizedStrings.ExecuteFunction_THE_FUNCTION_GET_ID_RETURNED_NULL.toLocalizedString());
}
this.isFnSerializationReqd = true;
return executeFunction(function);
}
public void setWaitOnExceptionFlag(boolean waitOnException) {
this.setForwardExceptions(waitOnException);
this.waitOnException = waitOnException;
}
public boolean getWaitOnExceptionFlag() {
return this.waitOnException;
}
public void setForwardExceptions(boolean forward) {
this.forwardExceptions = forward;
}
public boolean isForwardExceptions() {
return forwardExceptions;
}
@Override
public void setIgnoreDepartedMembers(boolean ignore) {
this.ignoreDepartedMembers = ignore;
if (ignore) {
setWaitOnExceptionFlag(true);
}
}
public boolean isIgnoreDepartedMembers() {
return this.ignoreDepartedMembers;
}
protected abstract ResultCollector executeFunction(Function fn);
/**
* validates whether a function should execute in presence of transaction and HeapCritical
* members. If the function is the first operation in a transaction, bootstraps the function.
*
* @param function the function
* @param targetMembers the set of members the function will be executed on
* @throws TransactionException if more than one nodes are targeted within a transaction
* @throws LowMemoryException if the set contains a heap critical member
*/
public abstract void validateExecution(Function function, Set targetMembers);
public LocalResultCollector<?, ?> getLocalResultCollector(Function function,
final ResultCollector<?, ?> rc) {
if (rc instanceof LocalResultCollector) {
return (LocalResultCollector) rc;
} else {
return new LocalResultCollectorImpl(function, rc, this);
}
}
/**
* Returns the function attributes defined by the functionId, returns null if no function is found
* for the specified functionId
*
* @param functionId
* @return byte[]
* @throws FunctionException if functionID passed is null
* @since GemFire 6.6
*/
public byte[] getFunctionAttributes(String functionId) {
if (functionId == null) {
throw new FunctionException(LocalizedStrings.FunctionService_0_PASSED_IS_NULL
.toLocalizedString("functionId instance "));
}
return idToFunctionAttributes.get(functionId);
}
public void removeFunctionAttributes(String functionId) {
idToFunctionAttributes.remove(functionId);
}
public void addFunctionAttributes(String functionId, byte[] functionAttributes) {
idToFunctionAttributes.put(functionId, functionAttributes);
}
private void handleException(Throwable functionException, final Function fn,
final FunctionContext cx, final ResultSender sender, DM dm) {
FunctionStats stats = FunctionStats.getFunctionStats(fn.getId(), dm.getSystem());
if (logger.isDebugEnabled()) {
logger.debug("Exception occurred on local node while executing Function: {}", fn.getId(),
functionException);
}
stats.endFunctionExecutionWithException(fn.hasResult());
if (fn.hasResult()) {
if (waitOnException || forwardExceptions) {
if (functionException instanceof FunctionException
&& functionException.getCause() instanceof QueryInvalidException) {
// Handle this exception differently since it can contain
// non-serializable objects.
// java.io.NotSerializableException: antlr.CommonToken
// create a new FunctionException on the original one's message (not cause).
functionException = new FunctionException(functionException.getLocalizedMessage());
}
sender.lastResult(functionException);
} else {
((InternalResultSender) sender).setException(functionException);
}
} else {
logger.warn(LocalizedMessage.create(LocalizedStrings.FunctionService_EXCEPTION_ON_LOCAL_NODE),
functionException);
}
}
}
| pivotal-amurmann/geode | geode-core/src/main/java/org/apache/geode/internal/cache/execute/AbstractExecution.java | Java | apache-2.0 | 17,394 |
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import logging
import uuid
import six
from stevedore import named
from oslo.config import cfg
from oslo.messaging import serializer as msg_serializer
from oslo.utils import timeutils
_notifier_opts = [
cfg.MultiStrOpt('notification_driver',
default=[],
help='Driver or drivers to handle sending notifications.'),
cfg.ListOpt('notification_topics',
default=['notifications', ],
deprecated_name='topics',
deprecated_group='rpc_notifier2',
help='AMQP topic used for OpenStack notifications.'),
]
_LOG = logging.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class _Driver(object):
def __init__(self, conf, topics, transport):
self.conf = conf
self.topics = topics
self.transport = transport
@abc.abstractmethod
def notify(self, ctxt, msg, priority, retry):
pass
class Notifier(object):
"""Send notification messages.
The Notifier class is used for sending notification messages over a
messaging transport or other means.
Notification messages follow the following format::
{'message_id': six.text_type(uuid.uuid4()),
'publisher_id': 'compute.host1',
'timestamp': timeutils.utcnow(),
'priority': 'WARN',
'event_type': 'compute.create_instance',
'payload': {'instance_id': 12, ... }}
A Notifier object can be instantiated with a transport object and a
publisher ID:
notifier = messaging.Notifier(get_transport(CONF), 'compute')
and notifications are sent via drivers chosen with the notification_driver
config option and on the topics chosen with the notification_topics config
option.
Alternatively, a Notifier object can be instantiated with a specific
driver or topic::
notifier = notifier.Notifier(RPC_TRANSPORT,
'compute.host',
driver='messaging',
topic='notifications')
Notifier objects are relatively expensive to instantiate (mostly the cost
of loading notification drivers), so it is possible to specialize a given
Notifier object with a different publisher id using the prepare() method::
notifier = notifier.prepare(publisher_id='compute')
notifier.info(ctxt, event_type, payload)
"""
def __init__(self, transport, publisher_id=None,
driver=None, topic=None,
serializer=None, retry=None):
"""Construct a Notifier object.
:param transport: the transport to use for sending messages
:type transport: oslo.messaging.Transport
:param publisher_id: field in notifications sent, for example
'compute.host1'
:type publisher_id: str
:param driver: a driver to lookup from oslo.messaging.notify.drivers
:type driver: str
:param topic: the topic which to send messages on
:type topic: str
:param serializer: an optional entity serializer
:type serializer: Serializer
:param retry: an connection retries configuration
None or -1 means to retry forever
0 means no retry
N means N retries
:type retry: int
"""
transport.conf.register_opts(_notifier_opts)
self.transport = transport
self.publisher_id = publisher_id
self.retry = retry
self._driver_names = ([driver] if driver is not None
else transport.conf.notification_driver)
self._topics = ([topic] if topic is not None
else transport.conf.notification_topics)
self._serializer = serializer or msg_serializer.NoOpSerializer()
self._driver_mgr = named.NamedExtensionManager(
'oslo.messaging.notify.drivers',
names=self._driver_names,
invoke_on_load=True,
invoke_args=[transport.conf],
invoke_kwds={
'topics': self._topics,
'transport': self.transport,
}
)
_marker = object()
def prepare(self, publisher_id=_marker, retry=_marker):
"""Return a specialized Notifier instance.
Returns a new Notifier instance with the supplied publisher_id. Allows
sending notifications from multiple publisher_ids without the overhead
of notification driver loading.
:param publisher_id: field in notifications sent, for example
'compute.host1'
:type publisher_id: str
:param retry: an connection retries configuration
None or -1 means to retry forever
0 means no retry
N means N retries
:type retry: int
"""
return _SubNotifier._prepare(self, publisher_id, retry=retry)
def _notify(self, ctxt, event_type, payload, priority, publisher_id=None,
retry=None):
payload = self._serializer.serialize_entity(ctxt, payload)
ctxt = self._serializer.serialize_context(ctxt)
msg = dict(message_id=six.text_type(uuid.uuid4()),
publisher_id=publisher_id or self.publisher_id,
event_type=event_type,
priority=priority,
payload=payload,
timestamp=six.text_type(timeutils.utcnow()))
def do_notify(ext):
try:
ext.obj.notify(ctxt, msg, priority, retry or self.retry)
except Exception as e:
_LOG.exception("Problem '%(e)s' attempting to send to "
"notification system. Payload=%(payload)s",
dict(e=e, payload=payload))
if self._driver_mgr.extensions:
self._driver_mgr.map(do_notify)
def audit(self, ctxt, event_type, payload):
"""Send a notification at audit level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'AUDIT')
def debug(self, ctxt, event_type, payload):
"""Send a notification at debug level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'DEBUG')
def info(self, ctxt, event_type, payload):
"""Send a notification at info level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'INFO')
def warn(self, ctxt, event_type, payload):
"""Send a notification at warning level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'WARN')
warning = warn
def error(self, ctxt, event_type, payload):
"""Send a notification at error level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'ERROR')
def critical(self, ctxt, event_type, payload):
"""Send a notification at critical level.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'CRITICAL')
def sample(self, ctxt, event_type, payload):
"""Send a notification at sample level.
Sample notifications are for high-frequency events
that typically contain small payloads. eg: "CPU = 70%"
Not all drivers support the sample level
(log, for example) so these could be dropped.
:param ctxt: a request context dict
:type ctxt: dict
:param event_type: describes the event, for example
'compute.create_instance'
:type event_type: str
:param payload: the notification payload
:type payload: dict
:raises: MessageDeliveryFailure
"""
self._notify(ctxt, event_type, payload, 'SAMPLE')
class _SubNotifier(Notifier):
_marker = Notifier._marker
def __init__(self, base, publisher_id, retry):
self._base = base
self.transport = base.transport
self.publisher_id = publisher_id
self.retry = retry
self._serializer = self._base._serializer
self._driver_mgr = self._base._driver_mgr
def _notify(self, ctxt, event_type, payload, priority):
super(_SubNotifier, self)._notify(ctxt, event_type, payload, priority)
@classmethod
def _prepare(cls, base, publisher_id=_marker, retry=_marker):
if publisher_id is cls._marker:
publisher_id = base.publisher_id
if retry is cls._marker:
retry = base.retry
return cls(base, publisher_id, retry=retry)
| eayunstack/oslo.messaging | oslo/messaging/notify/notifier.py | Python | apache-2.0 | 11,163 |
// Copyright 2015 The LUCI Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package authdb
import (
"fmt"
"net"
"go.chromium.org/luci/auth/identity"
"go.chromium.org/luci/server/auth/service/protocol"
)
// validateAuthDB returns nil if AuthDB looks correct.
func validateAuthDB(db *protocol.AuthDB) error {
groups := make(map[string]*protocol.AuthGroup, len(db.Groups))
for _, g := range db.Groups {
groups[g.Name] = g
}
for name := range groups {
if err := validateAuthGroup(name, groups); err != nil {
return err
}
}
for _, wl := range db.IpWhitelists {
if err := validateIPWhitelist(wl); err != nil {
return fmt.Errorf("auth: bad IP whitlist %q - %s", wl.Name, err)
}
}
if db.Realms != nil {
perms := uint32(len(db.Realms.Permissions))
conds := uint32(len(db.Realms.Conditions))
for _, realm := range db.Realms.Realms {
if err := validateRealm(realm, perms, conds); err != nil {
return fmt.Errorf("auth: bad realm %q - %s", realm.Name, err)
}
}
}
return nil
}
// validateAuthGroup returns nil if AuthGroup looks correct.
func validateAuthGroup(name string, groups map[string]*protocol.AuthGroup) error {
g := groups[name]
for _, ident := range g.Members {
if _, err := identity.MakeIdentity(ident); err != nil {
return fmt.Errorf("auth: invalid identity %q in group %q - %s", ident, name, err)
}
}
for _, glob := range g.Globs {
if _, err := identity.MakeGlob(glob); err != nil {
return fmt.Errorf("auth: invalid glob %q in group %q - %s", glob, name, err)
}
}
for _, nested := range g.Nested {
if groups[nested] == nil {
return fmt.Errorf("auth: unknown nested group %q in group %q", nested, name)
}
}
if cycle := findGroupCycle(name, groups); len(cycle) != 0 {
return fmt.Errorf("auth: dependency cycle found - %v", cycle)
}
return nil
}
// findGroupCycle searches for a group dependency cycle that contains group
// `name`. Returns list of groups that form the cycle if found, empty list
// if no cycles. Unknown groups are considered empty.
func findGroupCycle(name string, groups map[string]*protocol.AuthGroup) []string {
// Set of groups that are completely explored (all subtree is traversed).
visited := map[string]bool{}
// Stack of groups that are being explored now. In case a cycle is detected
// it would contain that cycle.
var visiting []string
// Recursively explores `group` subtree, returns true if finds a cycle.
var visit func(string) bool
visit = func(group string) bool {
g := groups[group]
if g == nil {
visited[group] = true
return false
}
visiting = append(visiting, group)
for _, nested := range g.GetNested() {
// Cross edge. Can happen in diamond-like graph, not a cycle.
if visited[nested] {
continue
}
// Is `group` references its own ancestor -> cycle is detected.
for _, v := range visiting {
if v == nested {
return true
}
}
// Explore subtree.
if visit(nested) {
return true
}
}
visiting = visiting[:len(visiting)-1]
visited[group] = true
return false
}
visit(name)
return visiting // will contain a cycle, if any
}
// validateIPWhitelist checks IPs in the whitelist are parsable.
func validateIPWhitelist(wl *protocol.AuthIPWhitelist) error {
for _, subnet := range wl.Subnets {
if _, _, err := net.ParseCIDR(subnet); err != nil {
return fmt.Errorf("bad subnet %q - %s", subnet, err)
}
}
return nil
}
// validateRealm checks indexes of permissions and conditions in bindings.
func validateRealm(r *protocol.Realm, permsCount, condsCount uint32) error {
for _, b := range r.Bindings {
for _, perm := range b.Permissions {
if perm >= permsCount {
return fmt.Errorf("referencing out-of-bounds permission: %d>=%d", perm, permsCount)
}
}
for _, cond := range b.Conditions {
if cond >= condsCount {
return fmt.Errorf("referencing out-of-bounds condition: %d>=%d", cond, condsCount)
}
}
}
return nil
}
| luci/luci-go | server/auth/authdb/validation.go | GO | apache-2.0 | 4,474 |
/*
* Minimalist Object Storage, (C) 2014 Minio, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Minio - Object storage inspired by Amazon S3 and Facebook Haystack.
package main
| flandr/minio | doc.go | GO | apache-2.0 | 701 |
#!/usr/bin/env python
# Copyright (c) 2015 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""pylint error checking."""
from __future__ import print_function
import json
import os
import re
import sys
from pylint import lint
from pylint.reporters import text
from six.moves import cStringIO as StringIO
# enabled checks
# http://pylint-messages.wikidot.com/all-codes
ENABLED_CODES=(
# refactor category
"R0801", "R0911", "R0912", "R0913", "R0914", "R0915",
# warning category
"W0612", "W0613", "W0703",
# convention category
"C1001")
KNOWN_PYLINT_EXCEPTIONS_FILE = "tools/pylint_exceptions"
class LintOutput(object):
_cached_filename = None
_cached_content = None
def __init__(self, filename, lineno, line_content, code, message,
lintoutput):
self.filename = filename
self.lineno = lineno
self.line_content = line_content
self.code = code
self.message = message
self.lintoutput = lintoutput
@classmethod
def get_duplicate_code_location(cls, remaining_lines):
module, lineno = remaining_lines.pop(0)[2:].split(":")
filename = module.replace(".", os.sep) + ".py"
return filename, int(lineno)
@classmethod
def get_line_content(cls, filename, lineno):
if cls._cached_filename != filename:
with open(filename) as f:
cls._cached_content = list(f.readlines())
cls._cached_filename = filename
# find first non-empty line
lineno -= 1
while True:
line_content = cls._cached_content[lineno].rstrip()
lineno +=1
if line_content:
return line_content
@classmethod
def from_line(cls, line, remaining_lines):
m = re.search(r"(\S+):(\d+): \[(\S+)(, \S*)?] (.*)", line)
if not m:
return None
matched = m.groups()
filename, lineno, code, message = (matched[0], int(matched[1]),
matched[2], matched[-1])
# duplicate code output needs special handling
if "duplicate-code" in code:
filename, lineno = cls.get_duplicate_code_location(remaining_lines)
# fixes incorrectly reported file path
line = line.replace(matched[0], filename)
line_content = cls.get_line_content(filename, lineno)
return cls(filename, lineno, line_content, code, message,
line.rstrip())
@classmethod
def from_msg_to_dict(cls, msg):
"""From the output of pylint msg, to a dict, where each key
is a unique error identifier, value is a list of LintOutput
"""
result = {}
lines = msg.splitlines()
while lines:
line = lines.pop(0)
obj = cls.from_line(line, lines)
if not obj:
continue
key = obj.key()
if key not in result:
result[key] = []
result[key].append(obj)
return result
def key(self):
return self.message, self.line_content.strip()
def json(self):
return json.dumps(self.__dict__)
def review_str(self):
return ("File %(filename)s\nLine %(lineno)d:%(line_content)s\n"
"%(code)s: %(message)s" % self.__dict__)
class ErrorKeys(object):
@classmethod
def print_json(cls, errors, output=sys.stdout):
print("# automatically generated by tools/lintstack.py", file=output)
for i in sorted(errors.keys()):
print(json.dumps(i), file=output)
@classmethod
def from_file(cls, filename):
keys = set()
for line in open(filename):
if line and line[0] != "#":
d = json.loads(line)
keys.add(tuple(d))
return keys
def run_pylint():
buff = StringIO()
reporter = text.ParseableTextReporter(output=buff)
args = ["-rn", "--disable=all", "--enable=" + ",".join(ENABLED_CODES),"murano"]
lint.Run(args, reporter=reporter, exit=False)
val = buff.getvalue()
buff.close()
return val
def generate_error_keys(msg=None):
print("Generating", KNOWN_PYLINT_EXCEPTIONS_FILE)
if msg is None:
msg = run_pylint()
errors = LintOutput.from_msg_to_dict(msg)
with open(KNOWN_PYLINT_EXCEPTIONS_FILE, "w") as f:
ErrorKeys.print_json(errors, output=f)
def validate(newmsg=None):
print("Loading", KNOWN_PYLINT_EXCEPTIONS_FILE)
known = ErrorKeys.from_file(KNOWN_PYLINT_EXCEPTIONS_FILE)
if newmsg is None:
print("Running pylint. Be patient...")
newmsg = run_pylint()
errors = LintOutput.from_msg_to_dict(newmsg)
print()
print("Unique errors reported by pylint: was %d, now %d."
% (len(known), len(errors)))
passed = True
for err_key, err_list in errors.items():
for err in err_list:
if err_key not in known:
print()
print(err.lintoutput)
print(err.review_str())
passed = False
if passed:
print("Congrats! pylint check passed.")
redundant = known - set(errors.keys())
if redundant:
print("Extra credit: some known pylint exceptions disappeared.")
for i in sorted(redundant):
print(json.dumps(i))
print("Consider regenerating the exception file if you will.")
else:
print()
print("Please fix the errors above. If you believe they are false "
"positives, run 'tools/lintstack.py generate' to overwrite.")
sys.exit(1)
def usage():
print("""Usage: tools/lintstack.py [generate|validate]
To generate pylint_exceptions file: tools/lintstack.py generate
To validate the current commit: tools/lintstack.py
""")
def main():
option = "validate"
if len(sys.argv) > 1:
option = sys.argv[1]
if option == "generate":
generate_error_keys()
elif option == "validate":
validate()
else:
usage()
if __name__ == "__main__":
main()
| chenyujie/hybrid-murano | tools/lintstack.py | Python | apache-2.0 | 6,700 |
# Copyright 2013 Josh Durgin
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from lxml import etree
import webob
from cinder.api import extensions
from cinder.api.v1 import volumes
from cinder import context
from cinder import db
from cinder import exception
from cinder import flags
from cinder import test
from cinder.tests.api import fakes
from cinder.tests.api.v2 import stubs
from cinder.tests.image import fake as fake_image
from cinder.volume import api as volume_api
FLAGS = flags.FLAGS
NS = '{http://docs.openstack.org/volume/api/v1}'
TEST_SNAPSHOT_UUID = '00000000-0000-0000-0000-000000000001'
def stub_snapshot_get(self, context, snapshot_id):
if snapshot_id != TEST_SNAPSHOT_UUID:
raise exception.NotFound
return {'id': snapshot_id,
'volume_id': 12,
'status': 'available',
'volume_size': 100,
'created_at': None,
'display_name': 'Default name',
'display_description': 'Default description', }
class VolumeApiTest(test.TestCase):
def setUp(self):
super(VolumeApiTest, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
fake_image.stub_out_image_service(self.stubs)
self.controller = volumes.VolumeController(self.ext_mgr)
self.stubs.Set(db, 'volume_get_all', stubs.stub_volume_get_all)
self.stubs.Set(db, 'volume_get_all_by_project',
stubs.stub_volume_get_all_by_project)
self.stubs.Set(volume_api.API, 'get', stubs.stub_volume_get)
self.stubs.Set(volume_api.API, 'delete', stubs.stub_volume_delete)
def test_volume_create(self):
self.stubs.Set(volume_api.API, "create", stubs.stub_volume_create)
vol = {"size": 100,
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "zone1:host1"}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v1/volumes')
res_dict = self.controller.create(req, body)
expected = {'volume': {'status': 'fakestatus',
'display_description': 'Volume Test Desc',
'availability_zone': 'zone1:host1',
'display_name': 'Volume Test Name',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'bootable': 'false',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 100}}
self.assertEqual(res_dict, expected)
def test_volume_create_with_type(self):
vol_type = FLAGS.default_volume_type
db.volume_type_create(context.get_admin_context(),
dict(name=vol_type, extra_specs={}))
db_vol_type = db.volume_type_get_by_name(context.get_admin_context(),
vol_type)
vol = {"size": 100,
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "zone1:host1",
"volume_type": db_vol_type['name'], }
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v1/volumes')
res_dict = self.controller.create(req, body)
self.assertEquals(res_dict['volume']['volume_type'],
db_vol_type['name'])
def test_volume_creation_fails_with_bad_size(self):
vol = {"size": '',
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "zone1:host1"}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v1/volumes')
self.assertRaises(exception.InvalidInput,
self.controller.create,
req,
body)
def test_volume_create_with_image_id(self):
self.stubs.Set(volume_api.API, "create", stubs.stub_volume_create)
self.ext_mgr.extensions = {'os-image-create': 'fake'}
test_id = "c905cedb-7281-47e4-8a62-f26bc5fc4c77"
vol = {"size": '1',
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "nova",
"imageRef": test_id}
expected = {'volume': {'status': 'fakestatus',
'display_description': 'Volume Test Desc',
'availability_zone': 'nova',
'display_name': 'Volume Test Name',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'bootable': 'false',
'volume_type': 'vol_type_name',
'image_id': test_id,
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': '1'}}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v1/volumes')
res_dict = self.controller.create(req, body)
self.assertEqual(res_dict, expected)
def test_volume_create_with_image_id_and_snapshot_id(self):
self.stubs.Set(volume_api.API, "create", stubs.stub_volume_create)
self.stubs.Set(volume_api.API, "get_snapshot", stub_snapshot_get)
self.ext_mgr.extensions = {'os-image-create': 'fake'}
vol = {"size": '1',
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "cinder",
"imageRef": 'c905cedb-7281-47e4-8a62-f26bc5fc4c77',
"source_volid": None,
"snapshot_id": TEST_SNAPSHOT_UUID}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v1/volumes')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_volume_create_with_image_id_is_integer(self):
self.stubs.Set(volume_api.API, "create", stubs.stub_volume_create)
self.ext_mgr.extensions = {'os-image-create': 'fake'}
vol = {"size": '1',
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "cinder",
"imageRef": 1234}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v1/volumes')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_volume_create_with_image_id_not_uuid_format(self):
self.stubs.Set(volume_api.API, "create", stubs.stub_volume_create)
self.ext_mgr.extensions = {'os-image-create': 'fake'}
vol = {"size": '1',
"display_name": "Volume Test Name",
"display_description": "Volume Test Desc",
"availability_zone": "cinder",
"imageRef": '12345'}
body = {"volume": vol}
req = fakes.HTTPRequest.blank('/v1/volumes')
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.create,
req,
body)
def test_volume_update(self):
self.stubs.Set(volume_api.API, "update", stubs.stub_volume_update)
updates = {
"display_name": "Updated Test Name",
}
body = {"volume": updates}
req = fakes.HTTPRequest.blank('/v1/volumes/1')
res_dict = self.controller.update(req, '1', body)
expected = {'volume': {
'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'Updated Test Name',
'attachments': [{
'id': '1',
'volume_id': '1',
'server_id': 'fakeuuid',
'device': '/',
}],
'bootable': 'false',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'size': 1,
}}
self.assertEquals(res_dict, expected)
def test_volume_update_metadata(self):
self.stubs.Set(volume_api.API, "update", stubs.stub_volume_update)
updates = {
"metadata": {"qos_max_iops": 2000}
}
body = {"volume": updates}
req = fakes.HTTPRequest.blank('/v1/volumes/1')
res_dict = self.controller.update(req, '1', body)
expected = {'volume': {
'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{
'id': '1',
'volume_id': '1',
'server_id': 'fakeuuid',
'device': '/',
}],
'bootable': 'false',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {"qos_max_iops": 2000},
'id': '1',
'created_at': datetime.datetime(1, 1, 1, 1, 1, 1),
'size': 1,
}}
self.assertEquals(res_dict, expected)
def test_update_empty_body(self):
body = {}
req = fakes.HTTPRequest.blank('/v1/volumes/1')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.update,
req, '1', body)
def test_update_invalid_body(self):
body = {'display_name': 'missing top level volume key'}
req = fakes.HTTPRequest.blank('/v1/volumes/1')
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.update,
req, '1', body)
def test_update_not_found(self):
self.stubs.Set(volume_api.API, "get", stubs.stub_volume_get_notfound)
updates = {
"display_name": "Updated Test Name",
}
body = {"volume": updates}
req = fakes.HTTPRequest.blank('/v1/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
req, '1', body)
def test_volume_list(self):
self.stubs.Set(volume_api.API, 'get_all',
stubs.stub_volume_get_all_by_project)
req = fakes.HTTPRequest.blank('/v1/volumes')
res_dict = self.controller.index(req)
expected = {'volumes': [{'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'bootable': 'false',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}]}
self.assertEqual(res_dict, expected)
def test_volume_list_detail(self):
self.stubs.Set(volume_api.API, 'get_all',
stubs.stub_volume_get_all_by_project)
req = fakes.HTTPRequest.blank('/v1/volumes/detail')
res_dict = self.controller.index(req)
expected = {'volumes': [{'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'bootable': 'false',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}]}
self.assertEqual(res_dict, expected)
def test_volume_list_by_name(self):
def stub_volume_get_all_by_project(context, project_id, marker, limit,
sort_key, sort_dir):
return [
stubs.stub_volume(1, display_name='vol1'),
stubs.stub_volume(2, display_name='vol2'),
stubs.stub_volume(3, display_name='vol3'),
]
self.stubs.Set(db, 'volume_get_all_by_project',
stub_volume_get_all_by_project)
# no display_name filter
req = fakes.HTTPRequest.blank('/v1/volumes')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 3)
# filter on display_name
req = fakes.HTTPRequest.blank('/v1/volumes?display_name=vol2')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 1)
self.assertEqual(resp['volumes'][0]['display_name'], 'vol2')
# filter no match
req = fakes.HTTPRequest.blank('/v1/volumes?display_name=vol4')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 0)
def test_volume_list_by_status(self):
def stub_volume_get_all_by_project(context, project_id, marker, limit,
sort_key, sort_dir):
return [
stubs.stub_volume(1, display_name='vol1', status='available'),
stubs.stub_volume(2, display_name='vol2', status='available'),
stubs.stub_volume(3, display_name='vol3', status='in-use'),
]
self.stubs.Set(db, 'volume_get_all_by_project',
stub_volume_get_all_by_project)
# no status filter
req = fakes.HTTPRequest.blank('/v1/volumes')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 3)
# single match
req = fakes.HTTPRequest.blank('/v1/volumes?status=in-use')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 1)
self.assertEqual(resp['volumes'][0]['status'], 'in-use')
# multiple match
req = fakes.HTTPRequest.blank('/v1/volumes?status=available')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 2)
for volume in resp['volumes']:
self.assertEqual(volume['status'], 'available')
# multiple filters
req = fakes.HTTPRequest.blank('/v1/volumes?status=available&'
'display_name=vol1')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 1)
self.assertEqual(resp['volumes'][0]['display_name'], 'vol1')
self.assertEqual(resp['volumes'][0]['status'], 'available')
# no match
req = fakes.HTTPRequest.blank('/v1/volumes?status=in-use&'
'display_name=vol1')
resp = self.controller.index(req)
self.assertEqual(len(resp['volumes']), 0)
def test_volume_show(self):
req = fakes.HTTPRequest.blank('/v1/volumes/1')
res_dict = self.controller.show(req, '1')
expected = {'volume': {'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'bootable': 'false',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}}
self.assertEqual(res_dict, expected)
def test_volume_show_no_attachments(self):
def stub_volume_get(self, context, volume_id):
return stubs.stub_volume(volume_id, attach_status='detached')
self.stubs.Set(volume_api.API, 'get', stub_volume_get)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
res_dict = self.controller.show(req, '1')
expected = {'volume': {'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [],
'bootable': 'false',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}}
self.assertEqual(res_dict, expected)
def test_volume_show_bootable(self):
def stub_volume_get(self, context, volume_id):
return (stubs.stub_volume(volume_id,
volume_glance_metadata=dict(foo='bar')))
self.stubs.Set(volume_api.API, 'get', stub_volume_get)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
res_dict = self.controller.show(req, '1')
expected = {'volume': {'status': 'fakestatus',
'display_description': 'displaydesc',
'availability_zone': 'fakeaz',
'display_name': 'displayname',
'attachments': [{'device': '/',
'server_id': 'fakeuuid',
'id': '1',
'volume_id': '1'}],
'bootable': 'true',
'volume_type': 'vol_type_name',
'snapshot_id': None,
'source_volid': None,
'metadata': {},
'id': '1',
'created_at': datetime.datetime(1, 1, 1,
1, 1, 1),
'size': 1}}
self.assertEqual(res_dict, expected)
def test_volume_show_no_volume(self):
self.stubs.Set(volume_api.API, "get", stubs.stub_volume_get_notfound)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.show,
req,
1)
def test_volume_delete(self):
req = fakes.HTTPRequest.blank('/v1/volumes/1')
resp = self.controller.delete(req, 1)
self.assertEqual(resp.status_int, 202)
def test_volume_delete_no_volume(self):
self.stubs.Set(volume_api.API, "get", stubs.stub_volume_get_notfound)
req = fakes.HTTPRequest.blank('/v1/volumes/1')
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete,
req,
1)
def test_admin_list_volumes_limited_to_project(self):
req = fakes.HTTPRequest.blank('/v1/fake/volumes',
use_admin_context=True)
res = self.controller.index(req)
self.assertTrue('volumes' in res)
self.assertEqual(1, len(res['volumes']))
def test_admin_list_volumes_all_tenants(self):
req = fakes.HTTPRequest.blank('/v1/fake/volumes?all_tenants=1',
use_admin_context=True)
res = self.controller.index(req)
self.assertTrue('volumes' in res)
self.assertEqual(3, len(res['volumes']))
def test_all_tenants_non_admin_gets_all_tenants(self):
req = fakes.HTTPRequest.blank('/v1/fake/volumes?all_tenants=1')
res = self.controller.index(req)
self.assertTrue('volumes' in res)
self.assertEqual(1, len(res['volumes']))
def test_non_admin_get_by_project(self):
req = fakes.HTTPRequest.blank('/v1/fake/volumes')
res = self.controller.index(req)
self.assertTrue('volumes' in res)
self.assertEqual(1, len(res['volumes']))
class VolumeSerializerTest(test.TestCase):
def _verify_volume_attachment(self, attach, tree):
for attr in ('id', 'volume_id', 'server_id', 'device'):
self.assertEqual(str(attach[attr]), tree.get(attr))
def _verify_volume(self, vol, tree):
self.assertEqual(tree.tag, NS + 'volume')
for attr in ('id', 'status', 'size', 'availability_zone', 'created_at',
'display_name', 'display_description', 'volume_type',
'snapshot_id'):
self.assertEqual(str(vol[attr]), tree.get(attr))
for child in tree:
print child.tag
self.assertTrue(child.tag in (NS + 'attachments', NS + 'metadata'))
if child.tag == 'attachments':
self.assertEqual(1, len(child))
self.assertEqual('attachment', child[0].tag)
self._verify_volume_attachment(vol['attachments'][0], child[0])
elif child.tag == 'metadata':
not_seen = set(vol['metadata'].keys())
for gr_child in child:
self.assertTrue(gr_child.get("key") in not_seen)
self.assertEqual(str(vol['metadata'][gr_child.get("key")]),
gr_child.text)
not_seen.remove(gr_child.get('key'))
self.assertEqual(0, len(not_seen))
def test_volume_show_create_serializer(self):
serializer = volumes.VolumeTemplate()
raw_volume = dict(
id='vol_id',
status='vol_status',
size=1024,
availability_zone='vol_availability',
created_at=datetime.datetime.now(),
attachments=[dict(id='vol_id',
volume_id='vol_id',
server_id='instance_uuid',
device='/foo')],
display_name='vol_name',
display_description='vol_desc',
volume_type='vol_type',
snapshot_id='snap_id',
source_volid='source_volid',
metadata=dict(foo='bar',
baz='quux', ), )
text = serializer.serialize(dict(volume=raw_volume))
print text
tree = etree.fromstring(text)
self._verify_volume(raw_volume, tree)
def test_volume_index_detail_serializer(self):
serializer = volumes.VolumesTemplate()
raw_volumes = [dict(id='vol1_id',
status='vol1_status',
size=1024,
availability_zone='vol1_availability',
created_at=datetime.datetime.now(),
attachments=[dict(id='vol1_id',
volume_id='vol1_id',
server_id='instance_uuid',
device='/foo1')],
display_name='vol1_name',
display_description='vol1_desc',
volume_type='vol1_type',
snapshot_id='snap1_id',
source_volid=None,
metadata=dict(foo='vol1_foo',
bar='vol1_bar', ), ),
dict(id='vol2_id',
status='vol2_status',
size=1024,
availability_zone='vol2_availability',
created_at=datetime.datetime.now(),
attachments=[dict(id='vol2_id',
volume_id='vol2_id',
server_id='instance_uuid',
device='/foo2')],
display_name='vol2_name',
display_description='vol2_desc',
volume_type='vol2_type',
snapshot_id='snap2_id',
source_volid=None,
metadata=dict(foo='vol2_foo',
bar='vol2_bar', ), )]
text = serializer.serialize(dict(volumes=raw_volumes))
print text
tree = etree.fromstring(text)
self.assertEqual(NS + 'volumes', tree.tag)
self.assertEqual(len(raw_volumes), len(tree))
for idx, child in enumerate(tree):
self._verify_volume(raw_volumes[idx], child)
class TestVolumeCreateRequestXMLDeserializer(test.TestCase):
def setUp(self):
super(TestVolumeCreateRequestXMLDeserializer, self).setUp()
self.deserializer = volumes.CreateDeserializer()
def test_minimal_volume(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {"volume": {"size": "1", }, }
self.assertEquals(request['body'], expected)
def test_display_name(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"display_name": "Volume-xml",
},
}
self.assertEquals(request['body'], expected)
def test_display_description(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"
display_description="description"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"display_name": "Volume-xml",
"display_description": "description",
},
}
self.assertEquals(request['body'], expected)
def test_volume_type(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"
display_description="description"
volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"display_name": "Volume-xml",
"size": "1",
"display_name": "Volume-xml",
"display_description": "description",
"volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164",
},
}
self.assertEquals(request['body'], expected)
def test_availability_zone(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"
display_description="description"
volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"
availability_zone="us-east1"></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"display_name": "Volume-xml",
"display_description": "description",
"volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164",
"availability_zone": "us-east1",
},
}
self.assertEquals(request['body'], expected)
def test_metadata(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
display_name="Volume-xml"
size="1">
<metadata><meta key="Type">work</meta></metadata></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"display_name": "Volume-xml",
"size": "1",
"metadata": {
"Type": "work",
},
},
}
self.assertEquals(request['body'], expected)
def test_full_volume(self):
self_request = """
<volume xmlns="http://docs.openstack.org/compute/api/v1.1"
size="1"
display_name="Volume-xml"
display_description="description"
volume_type="289da7f8-6440-407c-9fb4-7db01ec49164"
availability_zone="us-east1">
<metadata><meta key="Type">work</meta></metadata></volume>"""
request = self.deserializer.deserialize(self_request)
expected = {
"volume": {
"size": "1",
"display_name": "Volume-xml",
"display_description": "description",
"volume_type": "289da7f8-6440-407c-9fb4-7db01ec49164",
"availability_zone": "us-east1",
"metadata": {
"Type": "work",
},
},
}
self.assertEquals(request['body'], expected)
class VolumesUnprocessableEntityTestCase(test.TestCase):
"""
Tests of places we throw 422 Unprocessable Entity from
"""
def setUp(self):
super(VolumesUnprocessableEntityTestCase, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self.controller = volumes.VolumeController(self.ext_mgr)
def _unprocessable_volume_create(self, body):
req = fakes.HTTPRequest.blank('/v2/fake/volumes')
req.method = 'POST'
self.assertRaises(webob.exc.HTTPUnprocessableEntity,
self.controller.create, req, body)
def test_create_no_body(self):
self._unprocessable_volume_create(body=None)
def test_create_missing_volume(self):
body = {'foo': {'a': 'b'}}
self._unprocessable_volume_create(body=body)
def test_create_malformed_entity(self):
body = {'volume': 'string'}
self._unprocessable_volume_create(body=body)
| tomasdubec/openstack-cinder | cinder/tests/api/v1/test_volumes.py | Python | apache-2.0 | 33,659 |
// Copyright 2018 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skylarkbuildapi.repository;
import com.google.common.collect.ImmutableMap;
import com.google.devtools.build.lib.skylarkinterface.SkylarkCallable;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModule;
import com.google.devtools.build.lib.skylarkinterface.SkylarkModuleCategory;
/** A Skylark structure to deliver information about the system we are running on. */
@SkylarkModule(
name = "repository_os",
category = SkylarkModuleCategory.NONE,
doc = "Various data about the current platform Bazel is running on."
)
public interface SkylarkOSApi {
@SkylarkCallable(name = "environ", structField = true, doc = "The list of environment variables.")
public ImmutableMap<String, String> getEnvironmentVariables();
@SkylarkCallable(
name = "name",
structField = true,
doc = "A string identifying the current system Bazel is running on."
)
public String getName();
}
| dropbox/bazel | src/main/java/com/google/devtools/build/lib/skylarkbuildapi/repository/SkylarkOSApi.java | Java | apache-2.0 | 1,570 |
package org.hl7.fhir.dstu3.model;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Tue, Dec 6, 2016 09:42-0500 for FHIR v1.8.0
import java.util.*;
import org.hl7.fhir.utilities.Utilities;
import org.hl7.fhir.dstu3.model.Enumerations.*;
import ca.uhn.fhir.model.api.annotation.ResourceDef;
import ca.uhn.fhir.model.api.annotation.SearchParamDefinition;
import ca.uhn.fhir.model.api.annotation.Child;
import ca.uhn.fhir.model.api.annotation.ChildOrder;
import ca.uhn.fhir.model.api.annotation.Description;
import ca.uhn.fhir.model.api.annotation.Block;
import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.exceptions.FHIRException;
/**
* This is the base resource type for everything.
*/
public abstract class Resource extends BaseResource implements IAnyResource {
/**
* The logical id of the resource, as used in the URL for the resource. Once assigned, this value never changes.
*/
@Child(name = "id", type = {IdType.class}, order=0, min=0, max=1, modifier=false, summary=true)
@Description(shortDefinition="Logical id of this artifact", formalDefinition="The logical id of the resource, as used in the URL for the resource. Once assigned, this value never changes." )
protected IdType id;
/**
* The metadata about the resource. This is content that is maintained by the infrastructure. Changes to the content may not always be associated with version changes to the resource.
*/
@Child(name = "meta", type = {Meta.class}, order=1, min=0, max=1, modifier=false, summary=true)
@Description(shortDefinition="Metadata about the resource", formalDefinition="The metadata about the resource. This is content that is maintained by the infrastructure. Changes to the content may not always be associated with version changes to the resource." )
protected Meta meta;
/**
* A reference to a set of rules that were followed when the resource was constructed, and which must be understood when processing the content.
*/
@Child(name = "implicitRules", type = {UriType.class}, order=2, min=0, max=1, modifier=true, summary=true)
@Description(shortDefinition="A set of rules under which this content was created", formalDefinition="A reference to a set of rules that were followed when the resource was constructed, and which must be understood when processing the content." )
protected UriType implicitRules;
/**
* The base language in which the resource is written.
*/
@Child(name = "language", type = {CodeType.class}, order=3, min=0, max=1, modifier=false, summary=false)
@Description(shortDefinition="Language of the resource content", formalDefinition="The base language in which the resource is written." )
@ca.uhn.fhir.model.api.annotation.Binding(valueSet="http://hl7.org/fhir/ValueSet/languages")
protected CodeType language;
private static final long serialVersionUID = -559462759L;
/**
* Constructor
*/
public Resource() {
super();
}
/**
* @return {@link #id} (The logical id of the resource, as used in the URL for the resource. Once assigned, this value never changes.). This is the underlying object with id, value and extensions. The accessor "getId" gives direct access to the value
*/
public IdType getIdElement() {
if (this.id == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create Resource.id");
else if (Configuration.doAutoCreate())
this.id = new IdType(); // bb
return this.id;
}
public boolean hasIdElement() {
return this.id != null && !this.id.isEmpty();
}
public boolean hasId() {
return this.id != null && !this.id.isEmpty();
}
/**
* @param value {@link #id} (The logical id of the resource, as used in the URL for the resource. Once assigned, this value never changes.). This is the underlying object with id, value and extensions. The accessor "getId" gives direct access to the value
*/
public Resource setIdElement(IdType value) {
this.id = value;
return this;
}
/**
* @return The logical id of the resource, as used in the URL for the resource. Once assigned, this value never changes.
*/
public String getId() {
return this.id == null ? null : this.id.getValue();
}
/**
* @param value The logical id of the resource, as used in the URL for the resource. Once assigned, this value never changes.
*/
public Resource setId(String value) {
if (Utilities.noString(value))
this.id = null;
else {
if (this.id == null)
this.id = new IdType();
this.id.setValue(value);
}
return this;
}
/**
* @return {@link #meta} (The metadata about the resource. This is content that is maintained by the infrastructure. Changes to the content may not always be associated with version changes to the resource.)
*/
public Meta getMeta() {
if (this.meta == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create Resource.meta");
else if (Configuration.doAutoCreate())
this.meta = new Meta(); // cc
return this.meta;
}
public boolean hasMeta() {
return this.meta != null && !this.meta.isEmpty();
}
/**
* @param value {@link #meta} (The metadata about the resource. This is content that is maintained by the infrastructure. Changes to the content may not always be associated with version changes to the resource.)
*/
public Resource setMeta(Meta value) {
this.meta = value;
return this;
}
/**
* @return {@link #implicitRules} (A reference to a set of rules that were followed when the resource was constructed, and which must be understood when processing the content.). This is the underlying object with id, value and extensions. The accessor "getImplicitRules" gives direct access to the value
*/
public UriType getImplicitRulesElement() {
if (this.implicitRules == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create Resource.implicitRules");
else if (Configuration.doAutoCreate())
this.implicitRules = new UriType(); // bb
return this.implicitRules;
}
public boolean hasImplicitRulesElement() {
return this.implicitRules != null && !this.implicitRules.isEmpty();
}
public boolean hasImplicitRules() {
return this.implicitRules != null && !this.implicitRules.isEmpty();
}
/**
* @param value {@link #implicitRules} (A reference to a set of rules that were followed when the resource was constructed, and which must be understood when processing the content.). This is the underlying object with id, value and extensions. The accessor "getImplicitRules" gives direct access to the value
*/
public Resource setImplicitRulesElement(UriType value) {
this.implicitRules = value;
return this;
}
/**
* @return A reference to a set of rules that were followed when the resource was constructed, and which must be understood when processing the content.
*/
public String getImplicitRules() {
return this.implicitRules == null ? null : this.implicitRules.getValue();
}
/**
* @param value A reference to a set of rules that were followed when the resource was constructed, and which must be understood when processing the content.
*/
public Resource setImplicitRules(String value) {
if (Utilities.noString(value))
this.implicitRules = null;
else {
if (this.implicitRules == null)
this.implicitRules = new UriType();
this.implicitRules.setValue(value);
}
return this;
}
/**
* @return {@link #language} (The base language in which the resource is written.). This is the underlying object with id, value and extensions. The accessor "getLanguage" gives direct access to the value
*/
public CodeType getLanguageElement() {
if (this.language == null)
if (Configuration.errorOnAutoCreate())
throw new Error("Attempt to auto-create Resource.language");
else if (Configuration.doAutoCreate())
this.language = new CodeType(); // bb
return this.language;
}
public boolean hasLanguageElement() {
return this.language != null && !this.language.isEmpty();
}
public boolean hasLanguage() {
return this.language != null && !this.language.isEmpty();
}
/**
* @param value {@link #language} (The base language in which the resource is written.). This is the underlying object with id, value and extensions. The accessor "getLanguage" gives direct access to the value
*/
public Resource setLanguageElement(CodeType value) {
this.language = value;
return this;
}
/**
* @return The base language in which the resource is written.
*/
public String getLanguage() {
return this.language == null ? null : this.language.getValue();
}
/**
* @param value The base language in which the resource is written.
*/
public Resource setLanguage(String value) {
if (Utilities.noString(value))
this.language = null;
else {
if (this.language == null)
this.language = new CodeType();
this.language.setValue(value);
}
return this;
}
protected void listChildren(List<Property> childrenList) {
childrenList.add(new Property("id", "id", "The logical id of the resource, as used in the URL for the resource. Once assigned, this value never changes.", 0, java.lang.Integer.MAX_VALUE, id));
childrenList.add(new Property("meta", "Meta", "The metadata about the resource. This is content that is maintained by the infrastructure. Changes to the content may not always be associated with version changes to the resource.", 0, java.lang.Integer.MAX_VALUE, meta));
childrenList.add(new Property("implicitRules", "uri", "A reference to a set of rules that were followed when the resource was constructed, and which must be understood when processing the content.", 0, java.lang.Integer.MAX_VALUE, implicitRules));
childrenList.add(new Property("language", "code", "The base language in which the resource is written.", 0, java.lang.Integer.MAX_VALUE, language));
}
@Override
public Base[] getProperty(int hash, String name, boolean checkValid) throws FHIRException {
switch (hash) {
case 3355: /*id*/ return this.id == null ? new Base[0] : new Base[] {this.id}; // IdType
case 3347973: /*meta*/ return this.meta == null ? new Base[0] : new Base[] {this.meta}; // Meta
case -961826286: /*implicitRules*/ return this.implicitRules == null ? new Base[0] : new Base[] {this.implicitRules}; // UriType
case -1613589672: /*language*/ return this.language == null ? new Base[0] : new Base[] {this.language}; // CodeType
default: return super.getProperty(hash, name, checkValid);
}
}
@Override
public void setProperty(int hash, String name, Base value) throws FHIRException {
switch (hash) {
case 3355: // id
this.id = castToId(value); // IdType
break;
case 3347973: // meta
this.meta = castToMeta(value); // Meta
break;
case -961826286: // implicitRules
this.implicitRules = castToUri(value); // UriType
break;
case -1613589672: // language
this.language = castToCode(value); // CodeType
break;
default: super.setProperty(hash, name, value);
}
}
@Override
public void setProperty(String name, Base value) throws FHIRException {
if (name.equals("id"))
this.id = castToId(value); // IdType
else if (name.equals("meta"))
this.meta = castToMeta(value); // Meta
else if (name.equals("implicitRules"))
this.implicitRules = castToUri(value); // UriType
else if (name.equals("language"))
this.language = castToCode(value); // CodeType
else
super.setProperty(name, value);
}
@Override
public Base makeProperty(int hash, String name) throws FHIRException {
switch (hash) {
case 3355: throw new FHIRException("Cannot make property id as it is not a complex type"); // IdType
case 3347973: return getMeta(); // Meta
case -961826286: throw new FHIRException("Cannot make property implicitRules as it is not a complex type"); // UriType
case -1613589672: throw new FHIRException("Cannot make property language as it is not a complex type"); // CodeType
default: return super.makeProperty(hash, name);
}
}
@Override
public Base addChild(String name) throws FHIRException {
if (name.equals("id")) {
throw new FHIRException("Cannot call addChild on a primitive type Resource.id");
}
else if (name.equals("meta")) {
this.meta = new Meta();
return this.meta;
}
else if (name.equals("implicitRules")) {
throw new FHIRException("Cannot call addChild on a primitive type Resource.implicitRules");
}
else if (name.equals("language")) {
throw new FHIRException("Cannot call addChild on a primitive type Resource.language");
}
else
return super.addChild(name);
}
public String fhirType() {
return "Resource";
}
public abstract Resource copy();
public void copyValues(Resource dst) {
dst.id = id == null ? null : id.copy();
dst.meta = meta == null ? null : meta.copy();
dst.implicitRules = implicitRules == null ? null : implicitRules.copy();
dst.language = language == null ? null : language.copy();
}
@Override
public boolean equalsDeep(Base other) {
if (!super.equalsDeep(other))
return false;
if (!(other instanceof Resource))
return false;
Resource o = (Resource) other;
return compareDeep(id, o.id, true) && compareDeep(meta, o.meta, true) && compareDeep(implicitRules, o.implicitRules, true)
&& compareDeep(language, o.language, true);
}
@Override
public boolean equalsShallow(Base other) {
if (!super.equalsShallow(other))
return false;
if (!(other instanceof Resource))
return false;
Resource o = (Resource) other;
return compareValues(id, o.id, true) && compareValues(implicitRules, o.implicitRules, true) && compareValues(language, o.language, true)
;
}
public boolean isEmpty() {
return super.isEmpty() && ca.uhn.fhir.util.ElementUtil.isEmpty(id, meta, implicitRules
, language);
}
@Override
public String getIdBase() {
return getId();
}
@Override
public void setIdBase(String value) {
setId(value);
}
public abstract ResourceType getResourceType();
}
| Gaduo/hapi-fhir | hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/model/Resource.java | Java | apache-2.0 | 17,114 |
//=====================================================================
//
//File: $RCSfile: SingleDimensionFixedArrayAssigmentTest_1_Generics.java,v $
//Version: $Revision: 1.5 $
//Modified: $Date: 2013/05/10 04:52:46 $
//
// Generated by: UnitTestGenerator.pl
// Version: 1.9
// Matrix: SingleDimensionFixedArrayAssigmentMatrix.txt
//
//(c) Copyright 2007-2014 by Mentor Graphics Corp. All rights reserved.
//
//=====================================================================
// Licensed under the Apache License, Version 2.0 (the "License"); you may not
// use this file except in compliance with the License. You may obtain a copy
// of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations under
// the License.
//=====================================================================
package com.mentor.nucleus.bp.als.oal.test;
import org.eclipse.ui.IEditorPart;
import antlr.RecognitionException;
import antlr.TokenStreamException;
import com.mentor.nucleus.bp.core.*;
import com.mentor.nucleus.bp.core.common.NonRootModelElement;
import com.mentor.nucleus.bp.test.common.*;
import com.mentor.nucleus.bp.ui.canvas.*;
import com.mentor.nucleus.bp.ui.canvas.test.*;
import com.mentor.nucleus.bp.ui.graphics.editor.GraphicalEditor;
import antlr.RecognitionException;
import antlr.TokenStreamException;
public class SingleDimensionFixedArrayAssigmentTest_1_Generics extends ArrayBaseTest {
private static boolean configured = false;
protected String getResultName() {
return super.getResultName();
}
public SingleDimensionFixedArrayAssigmentTest_1_Generics(String arg0) {
super("APVT", arg0);
}
protected void setUp() throws Exception {
if (!configured) {
super.setUp();
configured = true;
}
}
protected void tearDown() throws Exception {
try {
super.tearDown();
OalParserTest_Generics.tearDownActionData();
} catch (RecognitionException re) {
// do nothing
} catch (TokenStreamException te) {
// do nothing
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV1D8).
*
*/
public void testT1RV1D3_T1LV1D8() {
test_id = getTestId("T1RV1D3", "T1LV1D8", "1");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV1D8");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV1D9).
*
*/
public void testT1RV1D3_T1LV1D9() {
test_id = getTestId("T1RV1D3", "T1LV1D9", "2");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV1D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV2D1).
// *
// */
// public void testT1RV1D3_T1LV2D1() {
// test_id = getTestId("T1RV1D3", "T1LV2D1", "3");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV2D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure4, checkResult_Failure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV2D2).
// *
// */
// public void testT1RV1D3_T1LV2D2() {
// test_id = getTestId("T1RV1D3", "T1LV2D2", "4");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV2D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure4, checkResult_Failure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV2D3).
*
*/
public void testT1RV1D3_T1LV2D3() {
test_id = getTestId("T1RV1D3", "T1LV2D3", "5");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV2D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV2D4).
*
*/
public void testT1RV1D3_T1LV2D4() {
test_id = getTestId("T1RV1D3", "T1LV2D4", "6");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV2D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV2D5).
*
*/
public void testT1RV1D3_T1LV2D5() {
test_id = getTestId("T1RV1D3", "T1LV2D5", "7");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV2D5");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV2D6).
*
*/
public void testT1RV1D3_T1LV2D6() {
test_id = getTestId("T1RV1D3", "T1LV2D6", "8");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV2D6");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV2D7).
*
*/
public void testT1RV1D3_T1LV2D7() {
test_id = getTestId("T1RV1D3", "T1LV2D7", "9");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV2D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV2D8).
*
*/
public void testT1RV1D3_T1LV2D8() {
test_id = getTestId("T1RV1D3", "T1LV2D8", "10");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV2D8");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV2D9).
*
*/
public void testT1RV1D3_T1LV2D9() {
test_id = getTestId("T1RV1D3", "T1LV2D9", "11");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV2D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV3D1).
// *
// */
// public void testT1RV1D3_T1LV3D1() {
// test_id = getTestId("T1RV1D3", "T1LV3D1", "12");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV3D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV3D2).
// *
// */
// public void testT1RV1D3_T1LV3D2() {
// test_id = getTestId("T1RV1D3", "T1LV3D2", "13");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV3D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV3D3).
*
*/
public void testT1RV1D3_T1LV3D3() {
test_id = getTestId("T1RV1D3", "T1LV3D3", "14");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV3D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV3D4).
*
*/
public void testT1RV1D3_T1LV3D4() {
test_id = getTestId("T1RV1D3", "T1LV3D4", "15");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV3D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV3D5).
*
*/
public void testT1RV1D3_T1LV3D5() {
test_id = getTestId("T1RV1D3", "T1LV3D5", "16");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV3D5");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV3D6).
*
*/
public void testT1RV1D3_T1LV3D6() {
test_id = getTestId("T1RV1D3", "T1LV3D6", "17");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV3D6");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV3D7).
*
*/
public void testT1RV1D3_T1LV3D7() {
test_id = getTestId("T1RV1D3", "T1LV3D7", "18");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV3D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV3D8).
*
*/
public void testT1RV1D3_T1LV3D8() {
test_id = getTestId("T1RV1D3", "T1LV3D8", "19");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV3D8");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV3D9).
*
*/
public void testT1RV1D3_T1LV3D9() {
test_id = getTestId("T1RV1D3", "T1LV3D9", "20");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV3D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV4D1).
// *
// */
// public void testT1RV1D3_T1LV4D1() {
// test_id = getTestId("T1RV1D3", "T1LV4D1", "21");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV4D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV4D2).
// *
// */
// public void testT1RV1D3_T1LV4D2() {
// test_id = getTestId("T1RV1D3", "T1LV4D2", "22");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV4D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV4D3).
*
*/
public void testT1RV1D3_T1LV4D3() {
test_id = getTestId("T1RV1D3", "T1LV4D3", "23");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV4D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV4D4).
*
*/
public void testT1RV1D3_T1LV4D4() {
test_id = getTestId("T1RV1D3", "T1LV4D4", "24");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV4D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV4D5).
*
*/
public void testT1RV1D3_T1LV4D5() {
test_id = getTestId("T1RV1D3", "T1LV4D5", "25");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV4D5");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV4D6).
*
*/
public void testT1RV1D3_T1LV4D6() {
test_id = getTestId("T1RV1D3", "T1LV4D6", "26");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV4D6");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV4D7).
*
*/
public void testT1RV1D3_T1LV4D7() {
test_id = getTestId("T1RV1D3", "T1LV4D7", "27");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV4D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV4D8).
*
*/
public void testT1RV1D3_T1LV4D8() {
test_id = getTestId("T1RV1D3", "T1LV4D8", "28");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV4D8");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV4D9).
*
*/
public void testT1RV1D3_T1LV4D9() {
test_id = getTestId("T1RV1D3", "T1LV4D9", "29");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV4D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV5D1).
// *
// */
// public void testT1RV1D3_T1LV5D1() {
// test_id = getTestId("T1RV1D3", "T1LV5D1", "30");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV5D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV5D2).
// *
// */
// public void testT1RV1D3_T1LV5D2() {
// test_id = getTestId("T1RV1D3", "T1LV5D2", "31");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV5D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV5D3).
*
*/
public void testT1RV1D3_T1LV5D3() {
test_id = getTestId("T1RV1D3", "T1LV5D3", "32");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV5D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV5D4).
*
*/
public void testT1RV1D3_T1LV5D4() {
test_id = getTestId("T1RV1D3", "T1LV5D4", "33");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV5D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV5D5).
*
*/
public void testT1RV1D3_T1LV5D5() {
test_id = getTestId("T1RV1D3", "T1LV5D5", "34");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV5D5");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV5D6).
*
*/
public void testT1RV1D3_T1LV5D6() {
test_id = getTestId("T1RV1D3", "T1LV5D6", "35");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV5D6");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV5D7).
*
*/
public void testT1RV1D3_T1LV5D7() {
test_id = getTestId("T1RV1D3", "T1LV5D7", "36");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV5D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV5D8).
*
*/
public void testT1RV1D3_T1LV5D8() {
test_id = getTestId("T1RV1D3", "T1LV5D8", "37");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV5D8");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV5D9).
*
*/
public void testT1RV1D3_T1LV5D9() {
test_id = getTestId("T1RV1D3", "T1LV5D9", "38");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV5D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV6D1).
// *
// */
// public void testT1RV1D3_T1LV6D1() {
// test_id = getTestId("T1RV1D3", "T1LV6D1", "39");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV6D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100659374 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV6D2).
// *
// */
// public void testT1RV1D3_T1LV6D2() {
// test_id = getTestId("T1RV1D3", "T1LV6D2", "40");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV6D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV6D3).
*
*/
public void testT1RV1D3_T1LV6D3() {
test_id = getTestId("T1RV1D3", "T1LV6D3", "41");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV6D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV6D4).
*
*/
public void testT1RV1D3_T1LV6D4() {
test_id = getTestId("T1RV1D3", "T1LV6D4", "42");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV6D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV6D5).
*
*/
public void testT1RV1D3_T1LV6D5() {
test_id = getTestId("T1RV1D3", "T1LV6D5", "43");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV6D5");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV6D6).
*
*/
public void testT1RV1D3_T1LV6D6() {
test_id = getTestId("T1RV1D3", "T1LV6D6", "44");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV6D6");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV6D7).
*
*/
public void testT1RV1D3_T1LV6D7() {
test_id = getTestId("T1RV1D3", "T1LV6D7", "45");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV6D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV6D8).
*
*/
public void testT1RV1D3_T1LV6D8() {
test_id = getTestId("T1RV1D3", "T1LV6D8", "46");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV6D8");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV6D9).
*
*/
public void testT1RV1D3_T1LV6D9() {
test_id = getTestId("T1RV1D3", "T1LV6D9", "47");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV6D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV7D1).
// *
// */
// public void testT1RV1D3_T1LV7D1() {
// test_id = getTestId("T1RV1D3", "T1LV7D1", "48");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV7D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row (T1LV7D2).
// *
// */
// public void testT1RV1D3_T1LV7D2() {
// test_id = getTestId("T1RV1D3", "T1LV7D2", "49");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV7D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV7D3).
*
*/
public void testT1RV1D3_T1LV7D3() {
test_id = getTestId("T1RV1D3", "T1LV7D3", "50");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV7D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV8D1).
*
*/
public void testT1RV1D3_T1LV8D1() {
test_id = getTestId("T1RV1D3", "T1LV8D1", "51");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV8D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure4,
checkResult_ReturnFailure4(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV8D2).
*
*/
public void testT1RV1D3_T1LV8D2() {
test_id = getTestId("T1RV1D3", "T1LV8D2", "52");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV8D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure4,
checkResult_ReturnFailure4(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV8D3).
*
*/
public void testT1RV1D3_T1LV8D3() {
test_id = getTestId("T1RV1D3", "T1LV8D3", "53");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV8D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV9D1).
*
*/
public void testT1RV1D3_T1LV9D1() {
test_id = getTestId("T1RV1D3", "T1LV9D1", "54");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV9D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure4,
checkResult_ReturnFailure4(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV9D2).
*
*/
public void testT1RV1D3_T1LV9D2() {
test_id = getTestId("T1RV1D3", "T1LV9D2", "55");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV9D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure4,
checkResult_ReturnFailure4(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row (T1LV9D3).
*
*/
public void testT1RV1D3_T1LV9D3() {
test_id = getTestId("T1RV1D3", "T1LV9D3", "56");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV9D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV10D1).
*
*/
public void testT1RV1D3_T1LV10D1() {
test_id = getTestId("T1RV1D3", "T1LV10D1", "57");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV10D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure4,
checkResult_ReturnFailure4(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV10D2).
*
*/
public void testT1RV1D3_T1LV10D2() {
test_id = getTestId("T1RV1D3", "T1LV10D2", "58");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV10D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure4,
checkResult_ReturnFailure4(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV10D3).
*
*/
public void testT1RV1D3_T1LV10D3() {
test_id = getTestId("T1RV1D3", "T1LV10D3", "59");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV10D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV11D1).
*
*/
public void testT1RV1D3_T1LV11D1() {
test_id = getTestId("T1RV1D3", "T1LV11D1", "60");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV11D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure4,
checkResult_ReturnFailure4(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV11D2).
*
*/
public void testT1RV1D3_T1LV11D2() {
test_id = getTestId("T1RV1D3", "T1LV11D2", "61");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV11D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure4,
checkResult_ReturnFailure4(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV11D3).
*
*/
public void testT1RV1D3_T1LV11D3() {
test_id = getTestId("T1RV1D3", "T1LV11D3", "62");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV11D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row
// * (T1LV12D1).
// *
// */
// public void testT1RV1D3_T1LV12D1() {
// test_id = getTestId("T1RV1D3", "T1LV12D1", "63");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV12D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row
// * (T1LV12D2).
// *
// */
// public void testT1RV1D3_T1LV12D2() {
// test_id = getTestId("T1RV1D3", "T1LV12D2", "64");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV12D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV12D3).
*
*/
public void testT1RV1D3_T1LV12D3() {
test_id = getTestId("T1RV1D3", "T1LV12D3", "65");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV12D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row
// * (T1LV13D1).
// *
// */
// public void testT1RV1D3_T1LV13D1() {
// test_id = getTestId("T1RV1D3", "T1LV13D1", "66");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV13D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row
// * (T1LV13D2).
// *
// */
// public void testT1RV1D3_T1LV13D2() {
// test_id = getTestId("T1RV1D3", "T1LV13D2", "67");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV13D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV13D3).
*
*/
public void testT1RV1D3_T1LV13D3() {
test_id = getTestId("T1RV1D3", "T1LV13D3", "68");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV13D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row
// * (T1LV14D1).
// *
// */
// public void testT1RV1D3_T1LV14D1() {
// test_id = getTestId("T1RV1D3", "T1LV14D1", "69");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV14D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row
// * (T1LV14D2).
// *
// */
// public void testT1RV1D3_T1LV14D2() {
// test_id = getTestId("T1RV1D3", "T1LV14D2", "70");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV14D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV14D3).
*
*/
public void testT1RV1D3_T1LV14D3() {
test_id = getTestId("T1RV1D3", "T1LV14D3", "71");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV14D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row
// * (T1LV15D1).
// *
// */
// public void testT1RV1D3_T1LV15D1() {
// test_id = getTestId("T1RV1D3", "T1LV15D1", "72");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV15D1");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100650072 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D3) and row
// * (T1LV15D2).
// *
// */
// public void testT1RV1D3_T1LV15D2() {
// test_id = getTestId("T1RV1D3", "T1LV15D2", "73");
//
// String src = selectTRVD("T1RV1D3");
//
// String dest = selectTLVD("T1LV15D2");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(ParamFailure4, checkResult_ParamFailure4(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D3) and row
* (T1LV15D3).
*
*/
public void testT1RV1D3_T1LV15D3() {
test_id = getTestId("T1RV1D3", "T1LV15D3", "74");
String src = selectTRVD("T1RV1D3");
String dest = selectTLVD("T1LV15D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV1D1).
*
*/
public void testT1RV1D4_T1LV1D1() {
test_id = getTestId("T1RV1D4", "T1LV1D1", "75");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV1D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV1D2).
*
*/
public void testT1RV1D4_T1LV1D2() {
test_id = getTestId("T1RV1D4", "T1LV1D2", "76");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV1D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV1D3).
*
*/
public void testT1RV1D4_T1LV1D3() {
test_id = getTestId("T1RV1D4", "T1LV1D3", "77");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV1D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV1D4).
*
*/
public void testT1RV1D4_T1LV1D4() {
test_id = getTestId("T1RV1D4", "T1LV1D4", "78");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV1D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV1D5).
// *
// */
// public void testT1RV1D4_T1LV1D5() {
// test_id = getTestId("T1RV1D4", "T1LV1D5", "79");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV1D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV1D6).
*
*/
public void testT1RV1D4_T1LV1D6() {
test_id = getTestId("T1RV1D4", "T1LV1D6", "80");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV1D6");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure6, checkResult_Failure6(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV1D7).
*
*/
public void testT1RV1D4_T1LV1D7() {
test_id = getTestId("T1RV1D4", "T1LV1D7", "81");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV1D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV1D8).
*
*/
public void testT1RV1D4_T1LV1D8() {
test_id = getTestId("T1RV1D4", "T1LV1D8", "82");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV1D8");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure6, checkResult_Failure6(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV1D9).
*
*/
public void testT1RV1D4_T1LV1D9() {
test_id = getTestId("T1RV1D4", "T1LV1D9", "83");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV1D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV2D1).
*
*/
public void testT1RV1D4_T1LV2D1() {
test_id = getTestId("T1RV1D4", "T1LV2D1", "84");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV2D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV2D2).
*
*/
public void testT1RV1D4_T1LV2D2() {
test_id = getTestId("T1RV1D4", "T1LV2D2", "85");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV2D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV2D3).
*
*/
public void testT1RV1D4_T1LV2D3() {
test_id = getTestId("T1RV1D4", "T1LV2D3", "86");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV2D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV2D4).
*
*/
public void testT1RV1D4_T1LV2D4() {
test_id = getTestId("T1RV1D4", "T1LV2D4", "87");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV2D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV2D5).
// *
// */
// public void testT1RV1D4_T1LV2D5() {
// test_id = getTestId("T1RV1D4", "T1LV2D5", "88");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV2D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV2D6).
*
*/
public void testT1RV1D4_T1LV2D6() {
test_id = getTestId("T1RV1D4", "T1LV2D6", "89");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV2D6");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure6, checkResult_Failure6(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV2D7).
*
*/
public void testT1RV1D4_T1LV2D7() {
test_id = getTestId("T1RV1D4", "T1LV2D7", "90");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV2D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV2D8).
*
*/
public void testT1RV1D4_T1LV2D8() {
test_id = getTestId("T1RV1D4", "T1LV2D8", "91");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV2D8");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure6, checkResult_Failure6(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV2D9).
*
*/
public void testT1RV1D4_T1LV2D9() {
test_id = getTestId("T1RV1D4", "T1LV2D9", "92");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV2D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV3D1).
*
*/
public void testT1RV1D4_T1LV3D1() {
test_id = getTestId("T1RV1D4", "T1LV3D1", "93");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV3D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV3D2).
*
*/
public void testT1RV1D4_T1LV3D2() {
test_id = getTestId("T1RV1D4", "T1LV3D2", "94");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV3D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV3D3).
*
*/
public void testT1RV1D4_T1LV3D3() {
test_id = getTestId("T1RV1D4", "T1LV3D3", "95");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV3D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV3D4).
*
*/
public void testT1RV1D4_T1LV3D4() {
test_id = getTestId("T1RV1D4", "T1LV3D4", "96");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV3D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV3D5).
// *
// */
// public void testT1RV1D4_T1LV3D5() {
// test_id = getTestId("T1RV1D4", "T1LV3D5", "97");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV3D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100676237 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV3D6).
// *
// */
// public void testT1RV1D4_T1LV3D6() {
// test_id = getTestId("T1RV1D4", "T1LV3D6", "98");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV3D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure6, checkResult_Failure6(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV3D7).
*
*/
public void testT1RV1D4_T1LV3D7() {
test_id = getTestId("T1RV1D4", "T1LV3D7", "99");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV3D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100676237 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV3D8).
// *
// */
// public void testT1RV1D4_T1LV3D8() {
// test_id = getTestId("T1RV1D4", "T1LV3D8", "100");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV3D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure6, checkResult_Failure6(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV3D9).
*
*/
public void testT1RV1D4_T1LV3D9() {
test_id = getTestId("T1RV1D4", "T1LV3D9", "101");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV3D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV4D1).
*
*/
public void testT1RV1D4_T1LV4D1() {
test_id = getTestId("T1RV1D4", "T1LV4D1", "102");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV4D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV4D2).
*
*/
public void testT1RV1D4_T1LV4D2() {
test_id = getTestId("T1RV1D4", "T1LV4D2", "103");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV4D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV4D3).
*
*/
public void testT1RV1D4_T1LV4D3() {
test_id = getTestId("T1RV1D4", "T1LV4D3", "104");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV4D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV4D4).
*
*/
public void testT1RV1D4_T1LV4D4() {
test_id = getTestId("T1RV1D4", "T1LV4D4", "105");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV4D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV4D5).
// *
// */
// public void testT1RV1D4_T1LV4D5() {
// test_id = getTestId("T1RV1D4", "T1LV4D5", "106");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV4D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100676237 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV4D6).
// *
// */
// public void testT1RV1D4_T1LV4D6() {
// test_id = getTestId("T1RV1D4", "T1LV4D6", "107");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV4D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure6, checkResult_Failure6(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV4D7).
*
*/
public void testT1RV1D4_T1LV4D7() {
test_id = getTestId("T1RV1D4", "T1LV4D7", "108");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV4D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100676237 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV4D8).
// *
// */
// public void testT1RV1D4_T1LV4D8() {
// test_id = getTestId("T1RV1D4", "T1LV4D8", "109");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV4D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure6, checkResult_Failure6(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV4D9).
*
*/
public void testT1RV1D4_T1LV4D9() {
test_id = getTestId("T1RV1D4", "T1LV4D9", "110");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV4D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV5D1).
*
*/
public void testT1RV1D4_T1LV5D1() {
test_id = getTestId("T1RV1D4", "T1LV5D1", "111");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV5D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV5D2).
*
*/
public void testT1RV1D4_T1LV5D2() {
test_id = getTestId("T1RV1D4", "T1LV5D2", "112");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV5D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV5D3).
*
*/
public void testT1RV1D4_T1LV5D3() {
test_id = getTestId("T1RV1D4", "T1LV5D3", "113");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV5D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV5D4).
*
*/
public void testT1RV1D4_T1LV5D4() {
test_id = getTestId("T1RV1D4", "T1LV5D4", "114");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV5D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV5D5).
// *
// */
// public void testT1RV1D4_T1LV5D5() {
// test_id = getTestId("T1RV1D4", "T1LV5D5", "115");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV5D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100676237 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV5D6).
// *
// */
// public void testT1RV1D4_T1LV5D6() {
// test_id = getTestId("T1RV1D4", "T1LV5D6", "116");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV5D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure6, checkResult_Failure6(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV5D7).
*
*/
public void testT1RV1D4_T1LV5D7() {
test_id = getTestId("T1RV1D4", "T1LV5D7", "117");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV5D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100676237 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV5D8).
// *
// */
// public void testT1RV1D4_T1LV5D8() {
// test_id = getTestId("T1RV1D4", "T1LV5D8", "118");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV5D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure6, checkResult_Failure6(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV5D9).
*
*/
public void testT1RV1D4_T1LV5D9() {
test_id = getTestId("T1RV1D4", "T1LV5D9", "119");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV5D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV6D1).
*
*/
public void testT1RV1D4_T1LV6D1() {
test_id = getTestId("T1RV1D4", "T1LV6D1", "120");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV6D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV6D2).
*
*/
public void testT1RV1D4_T1LV6D2() {
test_id = getTestId("T1RV1D4", "T1LV6D2", "121");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV6D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV6D3).
*
*/
public void testT1RV1D4_T1LV6D3() {
test_id = getTestId("T1RV1D4", "T1LV6D3", "122");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV6D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV6D4).
*
*/
public void testT1RV1D4_T1LV6D4() {
test_id = getTestId("T1RV1D4", "T1LV6D4", "123");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV6D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV6D5).
// *
// */
// public void testT1RV1D4_T1LV6D5() {
// test_id = getTestId("T1RV1D4", "T1LV6D5", "124");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV6D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100676237 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV6D6).
// *
// */
// public void testT1RV1D4_T1LV6D6() {
// test_id = getTestId("T1RV1D4", "T1LV6D6", "125");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV6D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure6, checkResult_Failure6(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV6D7).
*
*/
public void testT1RV1D4_T1LV6D7() {
test_id = getTestId("T1RV1D4", "T1LV6D7", "126");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV6D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100676237 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D4) and row (T1LV6D8).
// *
// */
// public void testT1RV1D4_T1LV6D8() {
// test_id = getTestId("T1RV1D4", "T1LV6D8", "127");
//
// String src = selectTRVD("T1RV1D4");
//
// String dest = selectTLVD("T1LV6D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure6, checkResult_Failure6(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV6D9).
*
*/
public void testT1RV1D4_T1LV6D9() {
test_id = getTestId("T1RV1D4", "T1LV6D9", "128");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV6D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV7D1).
*
*/
public void testT1RV1D4_T1LV7D1() {
test_id = getTestId("T1RV1D4", "T1LV7D1", "129");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV7D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV7D2).
*
*/
public void testT1RV1D4_T1LV7D2() {
test_id = getTestId("T1RV1D4", "T1LV7D2", "130");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV7D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV7D3).
*
*/
public void testT1RV1D4_T1LV7D3() {
test_id = getTestId("T1RV1D4", "T1LV7D3", "131");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV7D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV8D1).
*
*/
public void testT1RV1D4_T1LV8D1() {
test_id = getTestId("T1RV1D4", "T1LV8D1", "132");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV8D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV8D2).
*
*/
public void testT1RV1D4_T1LV8D2() {
test_id = getTestId("T1RV1D4", "T1LV8D2", "133");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV8D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV8D3).
*
*/
public void testT1RV1D4_T1LV8D3() {
test_id = getTestId("T1RV1D4", "T1LV8D3", "134");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV8D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV9D1).
*
*/
public void testT1RV1D4_T1LV9D1() {
test_id = getTestId("T1RV1D4", "T1LV9D1", "135");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV9D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV9D2).
*
*/
public void testT1RV1D4_T1LV9D2() {
test_id = getTestId("T1RV1D4", "T1LV9D2", "136");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV9D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row (T1LV9D3).
*
*/
public void testT1RV1D4_T1LV9D3() {
test_id = getTestId("T1RV1D4", "T1LV9D3", "137");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV9D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV10D1).
*
*/
public void testT1RV1D4_T1LV10D1() {
test_id = getTestId("T1RV1D4", "T1LV10D1", "138");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV10D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV10D2).
*
*/
public void testT1RV1D4_T1LV10D2() {
test_id = getTestId("T1RV1D4", "T1LV10D2", "139");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV10D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV10D3).
*
*/
public void testT1RV1D4_T1LV10D3() {
test_id = getTestId("T1RV1D4", "T1LV10D3", "140");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV10D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV11D1).
*
*/
public void testT1RV1D4_T1LV11D1() {
test_id = getTestId("T1RV1D4", "T1LV11D1", "141");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV11D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV11D2).
*
*/
public void testT1RV1D4_T1LV11D2() {
test_id = getTestId("T1RV1D4", "T1LV11D2", "142");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV11D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV11D3).
*
*/
public void testT1RV1D4_T1LV11D3() {
test_id = getTestId("T1RV1D4", "T1LV11D3", "143");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV11D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV12D1).
*
*/
public void testT1RV1D4_T1LV12D1() {
test_id = getTestId("T1RV1D4", "T1LV12D1", "144");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV12D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV12D2).
*
*/
public void testT1RV1D4_T1LV12D2() {
test_id = getTestId("T1RV1D4", "T1LV12D2", "145");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV12D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV12D3).
*
*/
public void testT1RV1D4_T1LV12D3() {
test_id = getTestId("T1RV1D4", "T1LV12D3", "146");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV12D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV13D1).
*
*/
public void testT1RV1D4_T1LV13D1() {
test_id = getTestId("T1RV1D4", "T1LV13D1", "147");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV13D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV13D2).
*
*/
public void testT1RV1D4_T1LV13D2() {
test_id = getTestId("T1RV1D4", "T1LV13D2", "148");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV13D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV13D3).
*
*/
public void testT1RV1D4_T1LV13D3() {
test_id = getTestId("T1RV1D4", "T1LV13D3", "149");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV13D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV14D1).
*
*/
public void testT1RV1D4_T1LV14D1() {
test_id = getTestId("T1RV1D4", "T1LV14D1", "150");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV14D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV14D2).
*
*/
public void testT1RV1D4_T1LV14D2() {
test_id = getTestId("T1RV1D4", "T1LV14D2", "151");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV14D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV14D3).
*
*/
public void testT1RV1D4_T1LV14D3() {
test_id = getTestId("T1RV1D4", "T1LV14D3", "152");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV14D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV15D1).
*
*/
public void testT1RV1D4_T1LV15D1() {
test_id = getTestId("T1RV1D4", "T1LV15D1", "153");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV15D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV15D2).
*
*/
public void testT1RV1D4_T1LV15D2() {
test_id = getTestId("T1RV1D4", "T1LV15D2", "154");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV15D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D4) and row
* (T1LV15D3).
*
*/
public void testT1RV1D4_T1LV15D3() {
test_id = getTestId("T1RV1D4", "T1LV15D3", "155");
String src = selectTRVD("T1RV1D4");
String dest = selectTLVD("T1LV15D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV1D1).
*
*/
public void testT1RV1D5_T1LV1D1() {
test_id = getTestId("T1RV1D5", "T1LV1D1", "156");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV1D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV1D2).
*
*/
public void testT1RV1D5_T1LV1D2() {
test_id = getTestId("T1RV1D5", "T1LV1D2", "157");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV1D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV1D3).
*
*/
public void testT1RV1D5_T1LV1D3() {
test_id = getTestId("T1RV1D5", "T1LV1D3", "158");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV1D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV1D4).
*
*/
public void testT1RV1D5_T1LV1D4() {
test_id = getTestId("T1RV1D5", "T1LV1D4", "159");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV1D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV1D5).
// *
// */
// public void testT1RV1D5_T1LV1D5() {
// test_id = getTestId("T1RV1D5", "T1LV1D5", "160");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV1D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV1D6).
// *
// */
// public void testT1RV1D5_T1LV1D6() {
// test_id = getTestId("T1RV1D5", "T1LV1D6", "161");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV1D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV1D7).
*
*/
public void testT1RV1D5_T1LV1D7() {
test_id = getTestId("T1RV1D5", "T1LV1D7", "162");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV1D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV1D8).
// *
// */
// public void testT1RV1D5_T1LV1D8() {
// test_id = getTestId("T1RV1D5", "T1LV1D8", "163");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV1D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV1D9).
*
*/
public void testT1RV1D5_T1LV1D9() {
test_id = getTestId("T1RV1D5", "T1LV1D9", "164");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV1D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV2D1).
*
*/
public void testT1RV1D5_T1LV2D1() {
test_id = getTestId("T1RV1D5", "T1LV2D1", "165");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV2D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV2D2).
*
*/
public void testT1RV1D5_T1LV2D2() {
test_id = getTestId("T1RV1D5", "T1LV2D2", "166");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV2D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV2D3).
*
*/
public void testT1RV1D5_T1LV2D3() {
test_id = getTestId("T1RV1D5", "T1LV2D3", "167");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV2D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV2D4).
*
*/
public void testT1RV1D5_T1LV2D4() {
test_id = getTestId("T1RV1D5", "T1LV2D4", "168");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV2D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV2D5).
// *
// */
// public void testT1RV1D5_T1LV2D5() {
// test_id = getTestId("T1RV1D5", "T1LV2D5", "169");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV2D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV2D6).
// *
// */
// public void testT1RV1D5_T1LV2D6() {
// test_id = getTestId("T1RV1D5", "T1LV2D6", "170");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV2D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV2D7).
*
*/
public void testT1RV1D5_T1LV2D7() {
test_id = getTestId("T1RV1D5", "T1LV2D7", "171");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV2D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV2D8).
// *
// */
// public void testT1RV1D5_T1LV2D8() {
// test_id = getTestId("T1RV1D5", "T1LV2D8", "172");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV2D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV2D9).
*
*/
public void testT1RV1D5_T1LV2D9() {
test_id = getTestId("T1RV1D5", "T1LV2D9", "173");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV2D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV3D1).
*
*/
public void testT1RV1D5_T1LV3D1() {
test_id = getTestId("T1RV1D5", "T1LV3D1", "174");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV3D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV3D2).
*
*/
public void testT1RV1D5_T1LV3D2() {
test_id = getTestId("T1RV1D5", "T1LV3D2", "175");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV3D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV3D3).
*
*/
public void testT1RV1D5_T1LV3D3() {
test_id = getTestId("T1RV1D5", "T1LV3D3", "176");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV3D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV3D4).
*
*/
public void testT1RV1D5_T1LV3D4() {
test_id = getTestId("T1RV1D5", "T1LV3D4", "177");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV3D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV3D5).
// *
// */
// public void testT1RV1D5_T1LV3D5() {
// test_id = getTestId("T1RV1D5", "T1LV3D5", "178");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV3D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV3D6).
// *
// */
// public void testT1RV1D5_T1LV3D6() {
// test_id = getTestId("T1RV1D5", "T1LV3D6", "179");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV3D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV3D7).
*
*/
public void testT1RV1D5_T1LV3D7() {
test_id = getTestId("T1RV1D5", "T1LV3D7", "180");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV3D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV3D8).
// *
// */
// public void testT1RV1D5_T1LV3D8() {
// test_id = getTestId("T1RV1D5", "T1LV3D8", "181");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV3D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV3D9).
*
*/
public void testT1RV1D5_T1LV3D9() {
test_id = getTestId("T1RV1D5", "T1LV3D9", "182");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV3D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV4D1).
*
*/
public void testT1RV1D5_T1LV4D1() {
test_id = getTestId("T1RV1D5", "T1LV4D1", "183");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV4D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV4D2).
*
*/
public void testT1RV1D5_T1LV4D2() {
test_id = getTestId("T1RV1D5", "T1LV4D2", "184");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV4D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV4D3).
*
*/
public void testT1RV1D5_T1LV4D3() {
test_id = getTestId("T1RV1D5", "T1LV4D3", "185");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV4D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV4D4).
*
*/
public void testT1RV1D5_T1LV4D4() {
test_id = getTestId("T1RV1D5", "T1LV4D4", "186");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV4D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV4D5).
// *
// */
// public void testT1RV1D5_T1LV4D5() {
// test_id = getTestId("T1RV1D5", "T1LV4D5", "187");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV4D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV4D6).
// *
// */
// public void testT1RV1D5_T1LV4D6() {
// test_id = getTestId("T1RV1D5", "T1LV4D6", "188");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV4D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV4D7).
*
*/
public void testT1RV1D5_T1LV4D7() {
test_id = getTestId("T1RV1D5", "T1LV4D7", "189");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV4D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV4D8).
// *
// */
// public void testT1RV1D5_T1LV4D8() {
// test_id = getTestId("T1RV1D5", "T1LV4D8", "190");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV4D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV4D9).
*
*/
public void testT1RV1D5_T1LV4D9() {
test_id = getTestId("T1RV1D5", "T1LV4D9", "191");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV4D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV5D1).
*
*/
public void testT1RV1D5_T1LV5D1() {
test_id = getTestId("T1RV1D5", "T1LV5D1", "192");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV5D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV5D2).
*
*/
public void testT1RV1D5_T1LV5D2() {
test_id = getTestId("T1RV1D5", "T1LV5D2", "193");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV5D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV5D3).
*
*/
public void testT1RV1D5_T1LV5D3() {
test_id = getTestId("T1RV1D5", "T1LV5D3", "194");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV5D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV5D4).
*
*/
public void testT1RV1D5_T1LV5D4() {
test_id = getTestId("T1RV1D5", "T1LV5D4", "195");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV5D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV5D5).
// *
// */
// public void testT1RV1D5_T1LV5D5() {
// test_id = getTestId("T1RV1D5", "T1LV5D5", "196");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV5D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV5D6).
// *
// */
// public void testT1RV1D5_T1LV5D6() {
// test_id = getTestId("T1RV1D5", "T1LV5D6", "197");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV5D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV5D7).
*
*/
public void testT1RV1D5_T1LV5D7() {
test_id = getTestId("T1RV1D5", "T1LV5D7", "198");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV5D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV5D8).
// *
// */
// public void testT1RV1D5_T1LV5D8() {
// test_id = getTestId("T1RV1D5", "T1LV5D8", "199");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV5D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV5D9).
*
*/
public void testT1RV1D5_T1LV5D9() {
test_id = getTestId("T1RV1D5", "T1LV5D9", "200");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV5D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV6D1).
*
*/
public void testT1RV1D5_T1LV6D1() {
test_id = getTestId("T1RV1D5", "T1LV6D1", "201");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV6D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV6D2).
*
*/
public void testT1RV1D5_T1LV6D2() {
test_id = getTestId("T1RV1D5", "T1LV6D2", "202");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV6D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV6D3).
*
*/
public void testT1RV1D5_T1LV6D3() {
test_id = getTestId("T1RV1D5", "T1LV6D3", "203");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV6D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV6D4).
*
*/
public void testT1RV1D5_T1LV6D4() {
test_id = getTestId("T1RV1D5", "T1LV6D4", "204");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV6D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV6D5).
// *
// */
// public void testT1RV1D5_T1LV6D5() {
// test_id = getTestId("T1RV1D5", "T1LV6D5", "205");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV6D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV6D6).
// *
// */
// public void testT1RV1D5_T1LV6D6() {
// test_id = getTestId("T1RV1D5", "T1LV6D6", "206");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV6D6");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV6D7).
*
*/
public void testT1RV1D5_T1LV6D7() {
test_id = getTestId("T1RV1D5", "T1LV6D7", "207");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV6D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure3, checkResult_ParamFailure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D5) and row (T1LV6D8).
// *
// */
// public void testT1RV1D5_T1LV6D8() {
// test_id = getTestId("T1RV1D5", "T1LV6D8", "208");
//
// String src = selectTRVD("T1RV1D5");
//
// String dest = selectTLVD("T1LV6D8");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV6D9).
*
*/
public void testT1RV1D5_T1LV6D9() {
test_id = getTestId("T1RV1D5", "T1LV6D9", "209");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV6D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV7D1).
*
*/
public void testT1RV1D5_T1LV7D1() {
test_id = getTestId("T1RV1D5", "T1LV7D1", "210");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV7D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV7D2).
*
*/
public void testT1RV1D5_T1LV7D2() {
test_id = getTestId("T1RV1D5", "T1LV7D2", "211");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV7D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV7D3).
*
*/
public void testT1RV1D5_T1LV7D3() {
test_id = getTestId("T1RV1D5", "T1LV7D3", "212");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV7D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV8D1).
*
*/
public void testT1RV1D5_T1LV8D1() {
test_id = getTestId("T1RV1D5", "T1LV8D1", "213");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV8D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV8D2).
*
*/
public void testT1RV1D5_T1LV8D2() {
test_id = getTestId("T1RV1D5", "T1LV8D2", "214");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV8D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV8D3).
*
*/
public void testT1RV1D5_T1LV8D3() {
test_id = getTestId("T1RV1D5", "T1LV8D3", "215");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV8D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV9D1).
*
*/
public void testT1RV1D5_T1LV9D1() {
test_id = getTestId("T1RV1D5", "T1LV9D1", "216");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV9D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV9D2).
*
*/
public void testT1RV1D5_T1LV9D2() {
test_id = getTestId("T1RV1D5", "T1LV9D2", "217");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV9D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row (T1LV9D3).
*
*/
public void testT1RV1D5_T1LV9D3() {
test_id = getTestId("T1RV1D5", "T1LV9D3", "218");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV9D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV10D1).
*
*/
public void testT1RV1D5_T1LV10D1() {
test_id = getTestId("T1RV1D5", "T1LV10D1", "219");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV10D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV10D2).
*
*/
public void testT1RV1D5_T1LV10D2() {
test_id = getTestId("T1RV1D5", "T1LV10D2", "220");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV10D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV10D3).
*
*/
public void testT1RV1D5_T1LV10D3() {
test_id = getTestId("T1RV1D5", "T1LV10D3", "221");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV10D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV11D1).
*
*/
public void testT1RV1D5_T1LV11D1() {
test_id = getTestId("T1RV1D5", "T1LV11D1", "222");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV11D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV11D2).
*
*/
public void testT1RV1D5_T1LV11D2() {
test_id = getTestId("T1RV1D5", "T1LV11D2", "223");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV11D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV11D3).
*
*/
public void testT1RV1D5_T1LV11D3() {
test_id = getTestId("T1RV1D5", "T1LV11D3", "224");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV11D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ReturnFailure2,
checkResult_ReturnFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV12D1).
*
*/
public void testT1RV1D5_T1LV12D1() {
test_id = getTestId("T1RV1D5", "T1LV12D1", "225");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV12D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV12D2).
*
*/
public void testT1RV1D5_T1LV12D2() {
test_id = getTestId("T1RV1D5", "T1LV12D2", "226");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV12D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV12D3).
*
*/
public void testT1RV1D5_T1LV12D3() {
test_id = getTestId("T1RV1D5", "T1LV12D3", "227");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV12D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV13D1).
*
*/
public void testT1RV1D5_T1LV13D1() {
test_id = getTestId("T1RV1D5", "T1LV13D1", "228");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV13D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV13D2).
*
*/
public void testT1RV1D5_T1LV13D2() {
test_id = getTestId("T1RV1D5", "T1LV13D2", "229");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV13D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV13D3).
*
*/
public void testT1RV1D5_T1LV13D3() {
test_id = getTestId("T1RV1D5", "T1LV13D3", "230");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV13D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV14D1).
*
*/
public void testT1RV1D5_T1LV14D1() {
test_id = getTestId("T1RV1D5", "T1LV14D1", "231");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV14D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV14D2).
*
*/
public void testT1RV1D5_T1LV14D2() {
test_id = getTestId("T1RV1D5", "T1LV14D2", "232");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV14D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV14D3).
*
*/
public void testT1RV1D5_T1LV14D3() {
test_id = getTestId("T1RV1D5", "T1LV14D3", "233");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV14D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV15D1).
*
*/
public void testT1RV1D5_T1LV15D1() {
test_id = getTestId("T1RV1D5", "T1LV15D1", "234");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV15D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV15D2).
*
*/
public void testT1RV1D5_T1LV15D2() {
test_id = getTestId("T1RV1D5", "T1LV15D2", "235");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV15D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D5) and row
* (T1LV15D3).
*
*/
public void testT1RV1D5_T1LV15D3() {
test_id = getTestId("T1RV1D5", "T1LV15D3", "236");
String src = selectTRVD("T1RV1D5");
String dest = selectTLVD("T1LV15D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV1D1).
*
*/
public void testT1RV1D6_T1LV1D1() {
test_id = getTestId("T1RV1D6", "T1LV1D1", "237");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV1D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV1D2).
*
*/
public void testT1RV1D6_T1LV1D2() {
test_id = getTestId("T1RV1D6", "T1LV1D2", "238");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV1D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV1D3).
*
*/
public void testT1RV1D6_T1LV1D3() {
test_id = getTestId("T1RV1D6", "T1LV1D3", "239");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV1D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV1D4).
*
*/
public void testT1RV1D6_T1LV1D4() {
test_id = getTestId("T1RV1D6", "T1LV1D4", "240");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV1D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D6) and row (T1LV1D5).
// *
// */
// public void testT1RV1D6_T1LV1D5() {
// test_id = getTestId("T1RV1D6", "T1LV1D5", "241");
//
// String src = selectTRVD("T1RV1D6");
//
// String dest = selectTLVD("T1LV1D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV1D6).
*
*/
public void testT1RV1D6_T1LV1D6() {
test_id = getTestId("T1RV1D6", "T1LV1D6", "242");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV1D6");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV1D7).
*
*/
public void testT1RV1D6_T1LV1D7() {
test_id = getTestId("T1RV1D6", "T1LV1D7", "243");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV1D7");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV1D8).
*
*/
public void testT1RV1D6_T1LV1D8() {
test_id = getTestId("T1RV1D6", "T1LV1D8", "244");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV1D8");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Success, checkResult_Success(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV1D9).
*
*/
public void testT1RV1D6_T1LV1D9() {
test_id = getTestId("T1RV1D6", "T1LV1D9", "245");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV1D9");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV2D1).
*
*/
public void testT1RV1D6_T1LV2D1() {
test_id = getTestId("T1RV1D6", "T1LV2D1", "246");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV2D1");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV2D2).
*
*/
public void testT1RV1D6_T1LV2D2() {
test_id = getTestId("T1RV1D6", "T1LV2D2", "247");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV2D2");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV2D3).
*
*/
public void testT1RV1D6_T1LV2D3() {
test_id = getTestId("T1RV1D6", "T1LV2D3", "248");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV2D3");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure2, checkResult_Failure2(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
/**
* Perform the test for the given matrix column (T1RV1D6) and row (T1LV2D4).
*
*/
public void testT1RV1D6_T1LV2D4() {
test_id = getTestId("T1RV1D6", "T1LV2D4", "249");
String src = selectTRVD("T1RV1D6");
String dest = selectTLVD("T1LV2D4");
String result = ".";
try {
result = TRVD_TLVD_Action(src, dest);
} catch (RecognitionException e) {
e.printStackTrace();
} catch (TokenStreamException e) {
e.printStackTrace();
}
assertTrue(Failure3, checkResult_Failure3(src, dest, result));
GraphicalEditor editor = getActiveEditor();
if (editor != null) {
validateOrGenerateResults(editor, generateResults);
}
}
// TODO FIXME: This test must pass when CQ issue
// dts0100668874 is resolved.
// /**
// * Perform the test for the given matrix column (T1RV1D6) and row (T1LV2D5).
// *
// */
// public void testT1RV1D6_T1LV2D5() {
// test_id = getTestId("T1RV1D6", "T1LV2D5", "250");
//
// String src = selectTRVD("T1RV1D6");
//
// String dest = selectTLVD("T1LV2D5");
//
// String result = ".";
// try {
// result = TRVD_TLVD_Action(src, dest);
// } catch (RecognitionException e) {
// e.printStackTrace();
// } catch (TokenStreamException e) {
// e.printStackTrace();
// }
// assertTrue(Failure5, checkResult_Failure5(src, dest, result));
//
// GraphicalEditor editor = getActiveEditor();
// if (editor != null) {
// validateOrGenerateResults(editor, generateResults);
// }
// }
}
| HebaKhaled/bposs | src/com.mentor.nucleus.bp.als.oal.test/src/com/mentor/nucleus/bp/als/oal/test/SingleDimensionFixedArrayAssigmentTest_1_Generics.java | Java | apache-2.0 | 183,440 |
/**
* Copyright 2013 52°North Initiative for Geospatial Open Source Software GmbH
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.n52.sor.client;
import org.n52.sor.ISorRequest.SorMatchingType;
import org.n52.sor.OwsExceptionReport;
import org.n52.sor.PropertiesManager;
import org.n52.sor.util.XmlTools;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.x52North.sor.x031.GetMatchingDefinitionsRequestDocument;
import org.x52North.sor.x031.GetMatchingDefinitionsRequestDocument.GetMatchingDefinitionsRequest;
/**
* @author Jan Schulte, Daniel Nüst
*
*/
public class GetMatchingDefinitionsBean extends AbstractBean {
private static Logger log = LoggerFactory.getLogger(GetMatchingDefinitionsBean.class);
private String inputURI = "";
private String matchingTypeString;
private int searchDepth;
@Override
public void buildRequest() {
GetMatchingDefinitionsRequestDocument requestDoc = GetMatchingDefinitionsRequestDocument.Factory.newInstance();
GetMatchingDefinitionsRequest request = requestDoc.addNewGetMatchingDefinitionsRequest();
request.setService(PropertiesManager.getInstance().getService());
request.setVersion(PropertiesManager.getInstance().getServiceVersion());
// inputURI
if ( !this.inputURI.isEmpty()) {
request.setInputURI(this.inputURI);
}
else {
this.requestString = "Please choose an input URI!";
return;
}
// matchingType
try {
request.setMatchingType(SorMatchingType.getSorMatchingType(this.matchingTypeString).getSchemaMatchingType());
}
catch (OwsExceptionReport e) {
log.warn("Matching type NOT supported!");
this.requestString = "The matching type is not supported!\n\n" + e.getDocument();
}
// searchDepth
request.setSearchDepth(this.searchDepth);
if ( !requestDoc.validate()) {
log.warn("Request is NOT valid, service may return error!\n"
+ XmlTools.validateAndIterateErrors(requestDoc));
}
this.requestString = requestDoc.toString();
}
public String getInputURI() {
return this.inputURI;
}
public String getMatchingTypeString() {
return this.matchingTypeString;
}
public int getSearchDepth() {
return this.searchDepth;
}
public void setInputURI(String inputURI) {
this.inputURI = inputURI;
}
public void setMatchingTypeString(String matchingTypeString) {
this.matchingTypeString = matchingTypeString;
}
public void setSearchDepth(int searchDepth) {
this.searchDepth = searchDepth;
}
} | 52North/OpenSensorSearch | sor-common/src/main/java/org/n52/sor/client/GetMatchingDefinitionsBean.java | Java | apache-2.0 | 3,262 |
/*
* Copyright 2016 e-UCM (http://www.e-ucm.es/)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* This project has received funding from the European Union’s Horizon
* 2020 research and innovation programme under grant agreement No 644187.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0 (link is external)
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var express = require('express'),
authentication = require('../util/authentication'),
router = express.Router(),
async = require('async'),
applicationIdRoute = '/:applicationId',
unselectedFields = '-__v',
removeFields = ['__v'];
var Validator = require('jsonschema').Validator,
v = new Validator();
var appSchema = {
id: '/AppSchema',
type: 'object',
properties: {
_id: {type: 'string'},
name: { type: 'string'},
prefix: { type: 'string'},
host: { type: 'string'},
owner: { type: 'string'},
roles: {
type: 'array',
items: {
type: 'object',
properties: {
roles: {
anyOf: [{
type: 'array',
items: {type: 'string'}
},{
type: 'string'
}]
},
allows: {
type: 'array',
items: {$ref: '/AllowsSchema'}
}
}
}
},
autoroles: {
type: 'array',
items: {type: 'string'}
},
routes: {
type: 'array',
items: {type: 'string'}
},
anonymous: {
anyOf: [{
type: 'array',
items: {type: 'string'}
}, {
type: 'string'
}]
},
look: {
type: 'array',
items: {$ref: '/LookSchema'}
}
},
additionalProperties: false
};
var allowsSchema = {
id: '/AllowsSchema',
type: 'object',
properties: {
resources: {
type: 'array',
items: {type: 'string'}
},
permissions: {
type: 'array',
items: {type: 'string'}
}
}
};
var lookSchema = {
id: '/LookSchema',
type: 'object',
properties: {
url: { type: 'string'},
key: { type: 'string'},
methods: {
type: 'array',
items: {type: 'string'}
},
permissions: {type: 'object'}
},
additionalProperties: false
};
var putLookSchema = {
id: '/PutLookSchema',
type: 'object',
properties: {
key: {type: 'string'},
users: {
type: 'array',
items: {type: 'string'}
},
resources: {
type: 'array',
items: {type: 'string'}
},
methods: {
type: 'array',
items: {type: 'string'}
},
url: {type: 'string'}
},
additionalProperties: false
};
v.addSchema(allowsSchema, '/AllowsSchema');
v.addSchema(lookSchema, '/LookSchema');
v.addSchema(appSchema, '/AppSchema');
v.addSchema(putLookSchema, '/PutLookSchema');
/**
* @api {get} /applications Returns all the registered applications.
* @apiName GetApplications
* @apiGroup Applications
*
* @apiParam {String} [fields] The fields to be populated in the resulting objects.
* An empty string will return the complete document.
* @apiParam {String} [sort=_id] Place - before the field for a descending sort.
* @apiParam {Number} [limit=20]
* @apiParam {Number} [page=1]
*
* @apiPermission admin
*
* @apiParamExample {json} Request-Example:
* {
* "fields": "_id name prefix host anonymous timeCreated",
* "sort": "-name",
* "limit": 20,
* "page": 1
* }
*
* @apiParamExample {json} Request-Example:
* {
* "fields": "",
* "sort": "name",
* "limit": 20,
* "page": 1
* }
*
* @apiSuccess(200) Success.
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "data": [
* {
* "_id": "559a447831b7acec185bf513",
* "name": "Gleaner App.",
* "prefix": "gleaner",
* "host": "localhost:3300",
* "owner": "root",
* "autoroles": [
* "student",
* "teacher,
* "developer"
* ],
* "timeCreated": "2015-07-06T09:03:52.636Z",
* "routes": [
* "gleaner/games",
* "gleaner/activities",
* "gleaner/classes"
* ],
* "anonymous": [
* "/collector",
* "/env"
* ],
* "look":[
* {
* "url": "route/get",
* "permissions: {
* "user1: [
* "dashboard1",
* "dashboard2"
* ],
* "user2: [
* "dashboard1",
* "dashboard3"
* ]
* }
* }
* ]
* }],
* "pages": {
* "current": 1,
* "prev": 0,
* "hasPrev": false,
* "next": 2,
* "hasNext": false,
* "total": 1
* },
* "items": {
* "limit": 20,
* "begin": 1,
* "end": 1,
* "total": 1
* }
* }
*
*/
router.get('/', authentication.authorized, function (req, res, next) {
var query = {};
var fields = req.body.fields || '';
var sort = req.body.sort || '_id';
var limit = req.body.limit || 20;
var page = req.body.page || 1;
req.app.db.model('application').pagedFind(query, fields, removeFields, sort, limit, page, function (err, results) {
if (err) {
return next(err);
}
res.json(results);
});
});
/**
* @api {post} /applications Register a new application, if an application with the same prefix already exists it will be overridden with the new values.
* @apiName PostApplications
* @apiGroup Applications
*
* @apiParam {String} prefix Application prefix.
* @apiParam {String} host Application host.
* @apiParam {String[]} [anonymous Express-like] routes for whom unidentified (anonymous) requests will be forwarded anyway.
* @apiParam {String[]} [autoroles] Roles that the application use.
* @apiParam {Object[]} [look] Allow access to routes for specific users. Key field identify specific field that the algorithm need look to
* allow the access. In the next example, the user1 can use the route POST "rout/get" to see results if the req.body
* contains the value "dashboard1" in "docs._id" field.
* "look":[{"url": "route/get",
* "permissions: { "user1: ["dashboard1"] },
* "key": "docs._id",
* "_id": "59ce615e3ef2df4d94f734fc",
* "methods": ["post"]}]
* @apiParam {String[]} [routes] All the applications routes that are not anonymous
* @apiParam {String} [owner] The (user) owner of the application
*
* @apiPermission admin
*
* @apiParamExample {json} Request-Example:
* {
* "name": "Gleaner",
* "prefix" : "gleaner",
* "host" : "localhost:3300",
* "autoroles": [
* "student",
* "teacher,
* "developer"
* ],
* "look":[
* {
* "url": "route/get",
* "permissions: {
* "user1: [
* "dashboard1",
* "dashboard2"
* ],
* "user2: [
* "dashboard1",
* "dashboard3"
* ]
* },
* "key": "docs._id",
* "_id": "59ce615e3ef2df4d94f734fc",
* "methods": [
* "post",
* "put"
* ]
* }
* ]
* "anonymous": [
* "/collector",
* "/env"
* ],
* "routes": [
* "gleaner/games",
* "gleaner/activities",
* "gleaner/classes"
* ],
* "owner": "root"
* }
*
* @apiSuccess(200) Success.
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "_id": "559a447831b7acec185bf513",
* "prefix": "gleaner",
* "host": "localhost:3300",
* "anonymous": [
* "/collector",
* "/env"
* ],
* "timeCreated": "2015-07-06T09:03:52.636Z"
* }
*
* @apiError(400) PrefixRequired Prefix required!.
*
* @apiError(400) HostRequired Host required!.
*
*/
router.post('/', authentication.authorized, function (req, res, next) {
async.auto({
validate: function (done) {
var err;
if (!req.body.prefix) {
err = new Error('Prefix required!');
return done(err);
}
if (!req.body.host) {
err = new Error('Host required!');
return done(err);
}
var validationObj = v.validate(req.body, appSchema);
if (validationObj.errors && validationObj.errors.length > 0) {
err = new Error('Bad format: ' + validationObj.errors[0]);
return done(err);
}
done();
},
roles: ['validate', function (done) {
var rolesArray = req.body.roles;
var routes = [];
if (rolesArray) {
rolesArray.forEach(function (role) {
role.allows.forEach(function (allow) {
var resources = allow.resources;
for (var i = 0; i < resources.length; i++) {
resources[i] = req.body.prefix + resources[i];
if (routes.indexOf(resources[i]) === -1) {
routes.push(resources[i]);
}
}
});
});
req.app.acl.allow(rolesArray, function (err) {
if (err) {
return done(err);
}
return done(null, routes);
});
} else {
done(null, routes);
}
}],
application: ['roles', function (done, results) {
var ApplicationModel = req.app.db.model('application');
ApplicationModel.create({
name: req.body.name || '',
prefix: req.body.prefix,
host: req.body.host,
autoroles: req.body.autoroles,
look: req.body.look || [],
anonymous: req.body.anonymous || [],
routes: results.roles,
owner: req.user.username
}, done);
}]
}, function (err, results) {
if (err) {
err.status = 400;
return next(err);
}
var application = results.application;
res.json(application);
});
});
/**
* @api {get} /applications/prefix/:prefix Gets the application information.
* @apiName GetApplication
* @apiGroup Applications
*
* @apiParam {String} applicationId Application id.
*
* @apiPermission admin
*
* @apiSuccess(200) Success.
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "_id": "559a447831b7acec185bf513",
* "name": "My App Name",
* "prefix": "gleaner",
* "host": "localhost:3300",
* "anonymous": [],
* "timeCreated": "2015-07-06T09:03:52.636Z"
* }
*
* @apiError(400) ApplicationNotFound No application with the given user id exists.
*
*/
router.get('/prefix/:prefix', authentication.authorized, function (req, res, next) {
req.app.db.model('application').findByPrefix(req.params.prefix).select(unselectedFields).exec(function (err, application) {
if (err) {
return next(err);
}
if (!application) {
err = new Error('No application with the given application prefix exists.');
err.status = 400;
return next(err);
}
res.json(application);
});
});
function isArray(obj) {
return Object.prototype.toString.call(obj) === '[object Array]';
}
/**
* @api {get} /applications/:applicationId Gets the application information.
* @apiName GetApplication
* @apiGroup Applications
*
* @apiParam {String} applicationId Application id.
*
* @apiPermission admin
*
* @apiSuccess(200) Success.
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "_id": "559a447831b7acec185bf513",
* "name": "My App Name",
* "prefix": "gleaner",
* "host": "localhost:3300",
* "anonymous": [],
* "timeCreated": "2015-07-06T09:03:52.636Z"
* }
*
* @apiError(400) ApplicationNotFound No application with the given user id exists.
*
*/
router.get(applicationIdRoute, authentication.authorized, function (req, res, next) {
var applicationId = req.params.applicationId || '';
req.app.db.model('application').findById(applicationId).select(unselectedFields).exec(function (err, application) {
if (err) {
return next(err);
}
if (!application) {
err = new Error('No application with the given application id exists.');
err.status = 400;
return next(err);
}
res.json(application);
});
});
/**
* @api {put} /applications/:applicationId Changes the application values.
* @apiName PutApplication
* @apiGroup Applications
*
* @apiParam {String} applicationId ApplicationId id.
* @apiParam {String} name The new name.
* @apiParam {String} prefix Application prefix.
* @apiParam {String} host Application host.
* @apiParam {String[]} [anonymous] Express-like routes for whom unidentified (anonymous) requests will be forwarded anyway.
* The routes from this array will be added only if they're not present yet.
* @apiParam {Object[]} [look] Allow access to routes for specific users.
* @apiParam {String[]} [routes] All the applications routes that are not anonymous
*
* @apiPermission admin
*
* @apiParamExample {json} Request-Example:
* {
* "name": "Gleaner App."
* }
*
* @apiSuccess(200) Success.
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "_id": "559a447831b7acec185bf513",
* "name": "Gleaner App.",
* "prefix": "gleaner",
* "host": "localhost:3300",
* "anonymous": [],
* "timeCreated": "2015-07-06T09:03:52.636Z"
* }
*
* @apiError(400) InvalidApplicationId You must provide a valid application id.
*
* @apiError(400) ApplicationNotFound No application with the given application id exists.
*
*/
router.put(applicationIdRoute, authentication.authorized, function (req, res, next) {
if (!req.params.applicationId) {
var err = new Error('You must provide a valid application id');
err.status = 400;
return next(err);
}
var validationObj = v.validate(req.body, appSchema);
if (validationObj.errors && validationObj.errors.length > 0) {
var errVal = new Error('Bad format: ' + validationObj.errors[0]);
errVal.status = 400;
return next(errVal);
}
var applicationId = req.params.applicationId || '';
var query = {
_id: applicationId,
owner: req.user.username
};
var update = {
$set: {}
};
if (req.body.name) {
update.$set.name = req.body.name;
}
if (req.body.prefix) {
update.$set.prefix = req.body.prefix;
}
if (req.body.host) {
update.$set.host = req.body.host;
}
if (isArray(req.body.look)) {
update.$addToSet = {look: {$each: req.body.look.filter(Boolean)}};
}
if (isArray(req.body.anonymous)) {
update.$addToSet = {anonymous: {$each: req.body.anonymous.filter(Boolean)}};
}
var options = {
new: true,
/*
Since Mongoose 4.0.0 we can run validators
(e.g. isURL validator for the host attribute of ApplicationSchema --> /schema/application)
when performing updates with the following option.
More info. can be found here http://mongoosejs.com/docs/validation.html
*/
runValidators: true
};
req.app.db.model('application').findOneAndUpdate(query, update, options).select(unselectedFields).exec(function (err, application) {
if (err) {
err.status = 403;
return next(err);
}
if (!application) {
err = new Error('No application with the given application id exists or ' +
'you don\'t have permission to modify the given application.');
err.status = 400;
return next(err);
}
res.json(application);
});
});
/**
* @api {delete} /applications/:applicationId Removes the application.
* @apiName DeleteApplication
* @apiGroup Applications
*
* @apiParam {String} applicationId ApplicationId id.
*
* @apiPermission admin
*
* @apiSuccess(200) Success.
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "message": "Success."
* }
*
* @apiError(400) ApplicationNotFound No application with the given application id exists.
*
*/
router.delete(applicationIdRoute, authentication.authorized, function (req, res, next) {
var applicationId = req.params.applicationId || '';
var query = {
_id: applicationId,
owner: req.user.username
};
req.app.db.model('application').findOneAndRemove(query, function (err, app) {
if (err) {
return next(err);
}
if (!app) {
err = new Error('No application with the given application id exists.');
err.status = 400;
return next(err);
}
app.routes.forEach(function (route) {
req.app.acl.removeResource(route);
});
res.sendDefaultSuccessMessage();
});
});
/**
* @api {put} /applications/look/:prefix Changes the application look field.
* @apiName PutApplicationLook
* @apiGroup Applications
*
* @apiParam {String} prefix Application prefix.
* @apiParam {String} key Field name to check in the body of the request.
* @apiParam {String} user The user that have access to the URL.
* @apiParam {String[]} resources The value of the key field that can use the user in the URL route.
* @apiParam {String[]} methods URL methods allowed.
* @apiParam {String} url
*
* @apiPermission admin
*
* @apiParamExample {json} Request-Example (Single-User):
* {
* "key": "_id",
* "user": "dev",
* "resources": ["id1"],
* "methods": ["post", "put"],
* "url": "/url/*"
* }
* @apiParamExample {json} Request-Example (Multiple-User):
* {
* "key": "_id",
* "users": ["u1", "u2", "u3"],
* "resources": ["id1"],
* "methods": ["post", "put"],
* "url": "/url/*"
* }
*
* @apiSuccess(200) Success.
*
* @apiSuccessExample Success-Response:
* HTTP/1.1 200 OK
* {
* "_id": "559a447831b7acec185bf513",
* "name": "Gleaner App.",
* "prefix": "gleaner",
* "host": "localhost:3300",
* "anonymous": [],
* "look":[{
* "key":"_id",
* "permissions":{
* "dev":["id1","id2"]
* },
* "methods":["post","put"],
* "url":"/url/*"
* }],
* "timeCreated": "2015-07-06T09:03:52.636Z"
* }
*
* @apiError(400) InvalidApplicationId You must provide a valid application name.
*
* @apiError(400) ApplicationNotFound No application with the given application id exists.
*
*/
router.put('/look/:prefix', authentication.authorized, function (req, res, next) {
req.app.db.model('application').findByPrefix(req.params.prefix, function (err, results) {
if (err) {
return next(err);
}
var validationObj = v.validate(req.body, putLookSchema);
if (validationObj.errors && validationObj.errors.length > 0) {
var errVal = new Error('Bad format: ' + validationObj.errors[0]);
errVal.status = 400;
return next(errVal);
}
var users = [];
if (req.body.user) {
users.push(req.body.user);
}
if (req.body.users) {
users = users.concat(req.body.users);
}
var applicationId = results._id;
var query = {
_id: applicationId
};
var existKey = false;
var addNewUser = [];
var updateUser = [];
var error;
if (results.look) {
results.look.forEach(function (lookObj) {
if (lookObj.url === req.body.url) {
if (lookObj.key === req.body.key) {
if (lookObj.permissions) {
users.forEach(function(user) {
if (!lookObj.permissions[user]) {
addNewUser.push(user);
}else {
updateUser.push(user);
}
});
}
existKey = true;
} else {
error = new Error('URL registered but with a different key!');
error.status = 400;
}
}
});
}
if (error) {
return next(error);
}
var update = {};
if (!existKey) {
var objToAdd = {
key: req.body.key,
permissions: {},
methods: req.body.methods,
url: req.body.url
};
users.forEach(function(user) {
objToAdd.permissions[user] = req.body.resources;
});
update = {
$push: {
}
};
update.$push.look = objToAdd;
} else {
query['look.url'] = req.body.url;
if (updateUser.length !== 0) {
update.$addToSet = {};
updateUser.forEach(function(user) {
var resultField = 'look.$.permissions.' + user;
update.$addToSet[resultField] = { $each: req.body.resources };
});
}
if (addNewUser.length !== 0) {
update.$set = {};
addNewUser.forEach(function(user) {
var updateProp = 'look.$.permissions.' + user;
update.$set[updateProp] = req.body.resources;
});
}
}
var options = {
new: true,
/*
Since Mongoose 4.0.0 we can run validators
(e.g. isURL validator for the host attribute of ApplicationSchema --> /schema/application)
when performing updates with the following option.
More info. can be found here http://mongoosejs.com/docs/validation.html
*/
runValidators: true
};
req.app.db.model('application').findOneAndUpdate(query, update, options).select(unselectedFields).exec(function (err, application) {
if (err) {
err.status = 403;
return next(err);
}
if (!application) {
err = new Error('No application with the given application id exists or ' +
'you don\'t have permission to modify the given application.');
err.status = 400;
return next(err);
}
res.json(application.look);
});
});
});
module.exports = router; | e-ucm/a2 | routes/applications.js | JavaScript | apache-2.0 | 25,370 |
//===--- IRGenSIL.cpp - Swift Per-Function IR Generation ------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
//
// This file implements basic setup and teardown for the class which
// performs IR generation for function bodies.
//
//===----------------------------------------------------------------------===//
#define DEBUG_TYPE "irgensil"
#include "llvm/IR/Function.h"
#include "llvm/IR/Module.h"
#include "llvm/IR/Instructions.h"
#include "llvm/IR/IntrinsicInst.h"
#include "llvm/IR/InlineAsm.h"
#include "llvm/IR/Intrinsics.h"
#include "llvm/ADT/MapVector.h"
#include "llvm/ADT/SmallBitVector.h"
#include "llvm/ADT/TinyPtrVector.h"
#include "llvm/Support/Debug.h"
#include "clang/AST/ASTContext.h"
#include "clang/Basic/TargetInfo.h"
#include "swift/Basic/Range.h"
#include "swift/Basic/STLExtras.h"
#include "swift/AST/ASTContext.h"
#include "swift/AST/IRGenOptions.h"
#include "swift/AST/Pattern.h"
#include "swift/AST/ParameterList.h"
#include "swift/AST/SubstitutionMap.h"
#include "swift/AST/Types.h"
#include "swift/SIL/Dominance.h"
#include "swift/SIL/PrettyStackTrace.h"
#include "swift/SIL/SILDebugScope.h"
#include "swift/SIL/SILDeclRef.h"
#include "swift/SIL/SILLinkage.h"
#include "swift/SIL/SILModule.h"
#include "swift/SIL/SILType.h"
#include "swift/SIL/SILVisitor.h"
#include "swift/SIL/InstructionUtils.h"
#include "clang/CodeGen/CodeGenABITypes.h"
#include "CallEmission.h"
#include "Explosion.h"
#include "GenArchetype.h"
#include "GenBuiltin.h"
#include "GenCall.h"
#include "GenCast.h"
#include "GenClass.h"
#include "GenConstant.h"
#include "GenEnum.h"
#include "GenExistential.h"
#include "GenFunc.h"
#include "GenHeap.h"
#include "GenMeta.h"
#include "GenObjC.h"
#include "GenOpaque.h"
#include "GenPoly.h"
#include "GenProto.h"
#include "GenStruct.h"
#include "GenTuple.h"
#include "GenType.h"
#include "IRGenDebugInfo.h"
#include "IRGenModule.h"
#include "NativeConventionSchema.h"
#include "ReferenceTypeInfo.h"
#include "WeakTypeInfo.h"
using namespace swift;
using namespace irgen;
namespace {
class LoweredValue;
/// Represents a statically-known function as a SIL thin function value.
class StaticFunction {
/// The function reference.
llvm::Function *Function;
ForeignFunctionInfo ForeignInfo;
/// The function's native representation.
SILFunctionTypeRepresentation Rep;
public:
StaticFunction(llvm::Function *function, ForeignFunctionInfo foreignInfo,
SILFunctionTypeRepresentation rep)
: Function(function), ForeignInfo(foreignInfo), Rep(rep)
{}
llvm::Function *getFunction() const { return Function; }
SILFunctionTypeRepresentation getRepresentation() const { return Rep; }
const ForeignFunctionInfo &getForeignInfo() const { return ForeignInfo; }
llvm::Value *getExplosionValue(IRGenFunction &IGF) const;
};
/// Represents a SIL value lowered to IR, in one of these forms:
/// - an Address, corresponding to a SIL address value;
/// - an Explosion of (unmanaged) Values, corresponding to a SIL "register"; or
/// - a CallEmission for a partially-applied curried function or method.
class LoweredValue {
public:
enum class Kind {
/// The first two LoweredValue kinds correspond to a SIL address value.
///
/// The LoweredValue of an existential alloc_stack keeps an owning container
/// in addition to the address of the allocated buffer.
/// Depending on the allocated type, the container may be equal to the
/// buffer itself (for types with known sizes) or it may be the address
/// of a fixed-size container which points to the heap-allocated buffer.
/// In this case the address-part may be null, which means that the buffer
/// is not allocated yet.
ContainedAddress,
/// The LoweredValue of a resilient, generic, or loadable typed alloc_stack
/// keeps an optional stackrestore point in addition to the address of the
/// allocated buffer. For all other address values the stackrestore point is
/// just null.
/// If the stackrestore point is set (currently, this might happen for
/// opaque types: generic and resilient) the deallocation of the stack must
/// reset the stack pointer to this point.
Address,
/// The following kinds correspond to SIL non-address values.
Value_First,
/// A normal value, represented as an exploded array of llvm Values.
Explosion = Value_First,
/// A @box together with the address of the box value.
BoxWithAddress,
/// A value that represents a statically-known function symbol that
/// can be called directly, represented as a StaticFunction.
StaticFunction,
/// A value that represents an Objective-C method that must be called with
/// a form of objc_msgSend.
ObjCMethod,
Value_Last = ObjCMethod,
};
Kind kind;
private:
using ExplosionVector = SmallVector<llvm::Value *, 4>;
union {
ContainedAddress containedAddress;
StackAddress address;
OwnedAddress boxWithAddress;
struct {
ExplosionVector values;
} explosion;
StaticFunction staticFunction;
ObjCMethod objcMethod;
};
public:
/// Create an address value without a stack restore point.
LoweredValue(const Address &address)
: kind(Kind::Address), address(address)
{}
/// Create an address value with an optional stack restore point.
LoweredValue(const StackAddress &address)
: kind(Kind::Address), address(address)
{}
enum ContainerForUnallocatedAddress_t { ContainerForUnallocatedAddress };
/// Create an address value for an alloc_stack, consisting of a container and
/// a not yet allocated buffer.
LoweredValue(const Address &container, ContainerForUnallocatedAddress_t)
: kind(Kind::ContainedAddress), containedAddress(container, Address())
{}
/// Create an address value for an alloc_stack, consisting of a container and
/// the address of the allocated buffer.
LoweredValue(const ContainedAddress &address)
: kind(Kind::ContainedAddress), containedAddress(address)
{}
LoweredValue(StaticFunction &&staticFunction)
: kind(Kind::StaticFunction), staticFunction(std::move(staticFunction))
{}
LoweredValue(ObjCMethod &&objcMethod)
: kind(Kind::ObjCMethod), objcMethod(std::move(objcMethod))
{}
LoweredValue(Explosion &e)
: kind(Kind::Explosion), explosion{{}} {
auto Elts = e.claimAll();
explosion.values.append(Elts.begin(), Elts.end());
}
LoweredValue(const OwnedAddress &boxWithAddress)
: kind(Kind::BoxWithAddress), boxWithAddress(boxWithAddress)
{}
LoweredValue(LoweredValue &&lv)
: kind(lv.kind)
{
switch (kind) {
case Kind::ContainedAddress:
::new (&containedAddress) ContainedAddress(std::move(lv.containedAddress));
break;
case Kind::Address:
::new (&address) StackAddress(std::move(lv.address));
break;
case Kind::Explosion:
::new (&explosion.values) ExplosionVector(std::move(lv.explosion.values));
break;
case Kind::BoxWithAddress:
::new (&boxWithAddress) OwnedAddress(std::move(lv.boxWithAddress));
break;
case Kind::StaticFunction:
::new (&staticFunction) StaticFunction(std::move(lv.staticFunction));
break;
case Kind::ObjCMethod:
::new (&objcMethod) ObjCMethod(std::move(lv.objcMethod));
break;
}
}
LoweredValue &operator=(LoweredValue &&lv) {
assert(this != &lv);
this->~LoweredValue();
::new (this) LoweredValue(std::move(lv));
return *this;
}
bool isAddress() const {
return kind == Kind::Address && address.getAddress().isValid();
}
bool isUnallocatedAddressInBuffer() const {
return kind == Kind::ContainedAddress &&
!containedAddress.getAddress().isValid();
}
bool isValue() const {
return kind >= Kind::Value_First && kind <= Kind::Value_Last;
}
bool isBoxWithAddress() const {
return kind == Kind::BoxWithAddress;
}
Address getAddress() const {
assert(isAddress() && "not an allocated address");
return address.getAddress();
}
StackAddress getStackAddress() const {
assert(isAddress() && "not an allocated address");
return address;
}
Address getContainerOfAddress() const {
assert(kind == Kind::ContainedAddress);
assert(containedAddress.getContainer().isValid() && "address has no container");
return containedAddress.getContainer();
}
Address getAddressInContainer() const {
assert(kind == Kind::ContainedAddress);
assert(containedAddress.getContainer().isValid() &&
"address has no container");
return containedAddress.getAddress();
}
void getExplosion(IRGenFunction &IGF, Explosion &ex) const;
Explosion getExplosion(IRGenFunction &IGF) const {
Explosion e;
getExplosion(IGF, e);
return e;
}
Address getAddressOfBox() const {
assert(kind == Kind::BoxWithAddress);
return boxWithAddress.getAddress();
}
llvm::Value *getSingletonExplosion(IRGenFunction &IGF) const;
const StaticFunction &getStaticFunction() const {
assert(kind == Kind::StaticFunction && "not a static function");
return staticFunction;
}
const ObjCMethod &getObjCMethod() const {
assert(kind == Kind::ObjCMethod && "not an objc method");
return objcMethod;
}
~LoweredValue() {
switch (kind) {
case Kind::Address:
address.~StackAddress();
break;
case Kind::ContainedAddress:
containedAddress.~ContainedAddress();
break;
case Kind::Explosion:
explosion.values.~ExplosionVector();
break;
case Kind::BoxWithAddress:
boxWithAddress.~OwnedAddress();
break;
case Kind::StaticFunction:
staticFunction.~StaticFunction();
break;
case Kind::ObjCMethod:
objcMethod.~ObjCMethod();
break;
}
}
};
using PHINodeVector = llvm::TinyPtrVector<llvm::PHINode*>;
/// Represents a lowered SIL basic block. This keeps track
/// of SIL branch arguments so that they can be lowered to LLVM phi nodes.
struct LoweredBB {
llvm::BasicBlock *bb;
PHINodeVector phis;
LoweredBB() = default;
explicit LoweredBB(llvm::BasicBlock *bb, PHINodeVector &&phis)
: bb(bb), phis(std::move(phis))
{}
};
/// Visits a SIL Function and generates LLVM IR.
class IRGenSILFunction :
public IRGenFunction, public SILInstructionVisitor<IRGenSILFunction>
{
public:
llvm::DenseMap<SILValue, LoweredValue> LoweredValues;
llvm::DenseMap<SILType, LoweredValue> LoweredUndefs;
/// All alloc_ref instructions which allocate the object on the stack.
llvm::SmallPtrSet<SILInstruction *, 8> StackAllocs;
/// With closure captures it is actually possible to have two function
/// arguments that both have the same name. Until this is fixed, we need to
/// also hash the ArgNo here.
typedef std::pair<unsigned, std::pair<const SILDebugScope *, StringRef>>
StackSlotKey;
/// Keeps track of the mapping of source variables to -O0 shadow copy allocas.
llvm::SmallDenseMap<StackSlotKey, Address, 8> ShadowStackSlots;
llvm::SmallDenseMap<Decl *, SmallString<4>, 8> AnonymousVariables;
/// To avoid inserting elements into ValueDomPoints twice.
llvm::SmallDenseSet<llvm::Instruction *, 8> ValueVariables;
/// Holds the DominancePoint of values that are storage for a source variable.
SmallVector<std::pair<llvm::Instruction *, DominancePoint>, 8> ValueDomPoints;
unsigned NumAnonVars = 0;
unsigned NumCondFails = 0;
/// Accumulative amount of allocated bytes on the stack. Used to limit the
/// size for stack promoted objects.
/// We calculate it on demand, so that we don't have to do it if the
/// function does not have any stack promoted allocations.
int EstimatedStackSize = -1;
llvm::MapVector<SILBasicBlock *, LoweredBB> LoweredBBs;
// Destination basic blocks for condfail traps.
llvm::SmallVector<llvm::BasicBlock *, 8> FailBBs;
SILFunction *CurSILFn;
Address IndirectReturn;
// A cached dominance analysis.
std::unique_ptr<DominanceInfo> Dominance;
IRGenSILFunction(IRGenModule &IGM, SILFunction *f);
~IRGenSILFunction();
/// Generate IR for the SIL Function.
void emitSILFunction();
/// Calculates EstimatedStackSize.
void estimateStackSize();
void setLoweredValue(SILValue v, LoweredValue &&lv) {
auto inserted = LoweredValues.insert({v, std::move(lv)});
assert(inserted.second && "already had lowered value for sil value?!");
(void)inserted;
}
/// Create a new Address corresponding to the given SIL address value.
void setLoweredAddress(SILValue v, const Address &address) {
assert(v->getType().isAddress() && "address for non-address value?!");
setLoweredValue(v, address);
}
void setLoweredStackAddress(SILValue v, const StackAddress &address) {
assert(v->getType().isAddress() && "address for non-address value?!");
setLoweredValue(v, address);
}
void setContainerOfUnallocatedAddress(SILValue v,
const Address &buffer) {
assert(v->getType().isAddress() && "address for non-address value?!");
setLoweredValue(v,
LoweredValue(buffer, LoweredValue::ContainerForUnallocatedAddress));
}
void overwriteAllocatedAddress(SILValue v, const Address &address) {
assert(v->getType().isAddress() && "address for non-address value?!");
auto it = LoweredValues.find(v);
assert(it != LoweredValues.end() && "no existing entry for overwrite?");
assert(it->second.isUnallocatedAddressInBuffer() &&
"not an unallocated address");
it->second = ContainedAddress(it->second.getContainerOfAddress(), address);
}
void setAllocatedAddressForBuffer(SILValue v, const Address &allocedAddress);
/// Create a new Explosion corresponding to the given SIL value.
void setLoweredExplosion(SILValue v, Explosion &e) {
assert(v->getType().isObject() && "explosion for address value?!");
setLoweredValue(v, LoweredValue(e));
}
void setLoweredBox(SILValue v, const OwnedAddress &box) {
assert(v->getType().isObject() && "box for address value?!");
setLoweredValue(v, LoweredValue(box));
}
/// Create a new StaticFunction corresponding to the given SIL value.
void setLoweredStaticFunction(SILValue v,
llvm::Function *f,
SILFunctionTypeRepresentation rep,
ForeignFunctionInfo foreignInfo) {
assert(v->getType().isObject() && "function for address value?!");
assert(v->getType().is<SILFunctionType>() &&
"function for non-function value?!");
setLoweredValue(v, StaticFunction{f, foreignInfo, rep});
}
/// Create a new Objective-C method corresponding to the given SIL value.
void setLoweredObjCMethod(SILValue v, SILDeclRef method) {
assert(v->getType().isObject() && "function for address value?!");
assert(v->getType().is<SILFunctionType>() &&
"function for non-function value?!");
setLoweredValue(v, ObjCMethod{method, SILType(), false});
}
/// Create a new Objective-C method corresponding to the given SIL value that
/// starts its search from the given search type.
///
/// Unlike \c setLoweredObjCMethod, which finds the method in the actual
/// runtime type of the object, this routine starts at the static type of the
/// object and searches up the class hierarchy (toward superclasses).
///
/// \param searchType The class from which the Objective-C runtime will start
/// its search for a method.
///
/// \param startAtSuper Whether we want to start at the superclass of the
/// static type (vs. the static type itself).
void setLoweredObjCMethodBounded(SILValue v, SILDeclRef method,
SILType searchType, bool startAtSuper) {
assert(v->getType().isObject() && "function for address value?!");
assert(v->getType().is<SILFunctionType>() &&
"function for non-function value?!");
setLoweredValue(v, ObjCMethod{method, searchType, startAtSuper});
}
LoweredValue &getUndefLoweredValue(SILType t) {
auto found = LoweredUndefs.find(t);
if (found != LoweredUndefs.end())
return found->second;
auto &ti = getTypeInfo(t);
switch (t.getCategory()) {
case SILValueCategory::Address: {
Address undefAddr = ti.getAddressForPointer(
llvm::UndefValue::get(ti.getStorageType()->getPointerTo()));
LoweredUndefs.insert({t, LoweredValue(undefAddr)});
break;
}
case SILValueCategory::Object: {
auto schema = ti.getSchema();
Explosion e;
for (auto &elt : schema) {
assert(!elt.isAggregate()
&& "non-scalar element in loadable type schema?!");
e.add(llvm::UndefValue::get(elt.getScalarType()));
}
LoweredUndefs.insert({t, LoweredValue(e)});
break;
}
}
found = LoweredUndefs.find(t);
assert(found != LoweredUndefs.end());
return found->second;
}
/// Get the LoweredValue corresponding to the given SIL value, which must
/// have been lowered.
LoweredValue &getLoweredValue(SILValue v) {
if (isa<SILUndef>(v))
return getUndefLoweredValue(v->getType());
auto foundValue = LoweredValues.find(v);
assert(foundValue != LoweredValues.end() &&
"no lowered explosion for sil value!");
return foundValue->second;
}
/// Get the Address of a SIL value of address type, which must have been
/// lowered.
Address getLoweredAddress(SILValue v) {
if (getLoweredValue(v).kind == LoweredValue::Kind::Address)
return getLoweredValue(v).getAddress();
else
return getLoweredValue(v).getAddressInContainer();
}
StackAddress getLoweredStackAddress(SILValue v) {
return getLoweredValue(v).getStackAddress();
}
/// Add the unmanaged LLVM values lowered from a SIL value to an explosion.
void getLoweredExplosion(SILValue v, Explosion &e) {
getLoweredValue(v).getExplosion(*this, e);
}
/// Create an Explosion containing the unmanaged LLVM values lowered from a
/// SIL value.
Explosion getLoweredExplosion(SILValue v) {
return getLoweredValue(v).getExplosion(*this);
}
/// Return the single member of the lowered explosion for the
/// given SIL value.
llvm::Value *getLoweredSingletonExplosion(SILValue v) {
return getLoweredValue(v).getSingletonExplosion(*this);
}
LoweredBB &getLoweredBB(SILBasicBlock *bb) {
auto foundBB = LoweredBBs.find(bb);
assert(foundBB != LoweredBBs.end() && "no llvm bb for sil bb?!");
return foundBB->second;
}
StringRef getOrCreateAnonymousVarName(VarDecl *Decl) {
llvm::SmallString<4> &Name = AnonymousVariables[Decl];
if (Name.empty()) {
{
llvm::raw_svector_ostream S(Name);
S << '_' << NumAnonVars++;
}
AnonymousVariables.insert({Decl, Name});
}
return Name;
}
template <class DebugVarCarryingInst>
StringRef getVarName(DebugVarCarryingInst *i) {
StringRef Name = i->getVarInfo().Name;
// The $match variables generated by the type checker are not
// guaranteed to be unique within their scope, but they have
// unique VarDecls.
if ((Name.empty() || Name == "$match") && i->getDecl())
return getOrCreateAnonymousVarName(i->getDecl());
return Name;
}
/// At -Onone, forcibly keep all LLVM values that are tracked by
/// debug variables alive by inserting an empty inline assembler
/// expression depending on the value in the blocks dominated by the
/// value.
void emitDebugVariableRangeExtension(const SILBasicBlock *CurBB) {
if (IGM.IRGen.Opts.Optimize)
return;
for (auto &Variable : ValueDomPoints) {
auto VarDominancePoint = Variable.second;
llvm::Value *Storage = Variable.first;
if (getActiveDominancePoint() == VarDominancePoint ||
isActiveDominancePointDominatedBy(VarDominancePoint)) {
llvm::Type *ArgTys;
auto *Ty = Storage->getType();
// Vectors, Pointers and Floats are expected to fit into a register.
if (Ty->isPointerTy() || Ty->isFloatingPointTy() || Ty->isVectorTy())
ArgTys = { Ty };
else {
// If this is not a scalar or vector type, we can't handle it.
if (isa<llvm::CompositeType>(Ty))
continue;
// The storage is guaranteed to be no larger than the register width.
// Extend the storage so it would fit into a register.
llvm::Type *IntTy;
switch (IGM.getClangASTContext().getTargetInfo().getRegisterWidth()) {
case 64: IntTy = IGM.Int64Ty; break;
case 32: IntTy = IGM.Int32Ty; break;
default: llvm_unreachable("unsupported register width");
}
ArgTys = { IntTy };
Storage = Builder.CreateZExtOrBitCast(Storage, IntTy);
}
// Emit an empty inline assembler expression depending on the register.
auto *AsmFnTy = llvm::FunctionType::get(IGM.VoidTy, ArgTys, false);
auto *InlineAsm = llvm::InlineAsm::get(AsmFnTy, "", "r", true);
Builder.CreateCall(InlineAsm, Storage);
// Propagate the dbg.value intrinsics into the later basic blocks. Note
// that this shouldn't be necessary. LiveDebugValues should be doing
// this but can't in general because it currently only tracks register
// locations.
llvm::Instruction *Value = Variable.first;
auto It = llvm::BasicBlock::iterator(Value);
auto *BB = Value->getParent();
auto *CurBB = Builder.GetInsertBlock();
if (BB != CurBB)
for (auto I = std::next(It), E = BB->end(); I != E; ++I) {
auto *DVI = dyn_cast<llvm::DbgValueInst>(I);
if (DVI && DVI->getValue() == Value)
IGM.DebugInfo->getBuilder().insertDbgValueIntrinsic(
DVI->getValue(), 0, DVI->getVariable(), DVI->getExpression(),
DVI->getDebugLoc(), &*CurBB->getFirstInsertionPt());
else
// Found all dbg.value intrinsics describing this location.
break;
}
}
}
}
/// Account for bugs in LLVM.
///
/// - The LLVM type legalizer currently doesn't update debug
/// intrinsics when a large value is split up into smaller
/// pieces. Note that this heuristic as a bit too conservative
/// on 32-bit targets as it will also fire for doubles.
///
/// - CodeGen Prepare may drop dbg.values pointing to PHI instruction.
bool needsShadowCopy(llvm::Value *Storage) {
return (IGM.DataLayout.getTypeSizeInBits(Storage->getType()) >
IGM.getClangASTContext().getTargetInfo().getRegisterWidth()) ||
isa<llvm::PHINode>(Storage);
}
/// At -Onone, emit a shadow copy of an Address in an alloca, so the
/// register allocator doesn't elide the dbg.value intrinsic when
/// register pressure is high. There is a trade-off to this: With
/// shadow copies, we lose the precise lifetime.
llvm::Value *emitShadowCopy(llvm::Value *Storage,
const SILDebugScope *Scope,
StringRef Name, unsigned ArgNo,
Alignment Align = Alignment(0)) {
auto Ty = Storage->getType();
// Never emit shadow copies when optimizing, or if already on the stack.
if (IGM.IRGen.Opts.Optimize ||
isa<llvm::AllocaInst>(Storage) ||
isa<llvm::UndefValue>(Storage) ||
Ty == IGM.RefCountedPtrTy) // No debug info is emitted for refcounts.
return Storage;
// Always emit shadow copies for function arguments.
if (ArgNo == 0)
// Otherwise only if debug value range extension is not feasible.
if (!needsShadowCopy(Storage)) {
// Mark for debug value range extension unless this is a constant.
if (auto *Value = dyn_cast<llvm::Instruction>(Storage))
if (ValueVariables.insert(Value).second)
ValueDomPoints.push_back({Value, getActiveDominancePoint()});
return Storage;
}
if (Align.isZero())
Align = IGM.getPointerAlignment();
auto &Alloca = ShadowStackSlots[{ArgNo, {Scope, Name}}];
if (!Alloca.isValid())
Alloca = createAlloca(Ty, Align, Name+".addr");
ArtificialLocation AutoRestore(getDebugScope(), IGM.DebugInfo, Builder);
Builder.CreateStore(Storage, Alloca.getAddress(), Align);
return Alloca.getAddress();
}
llvm::Value *emitShadowCopy(Address Storage, const SILDebugScope *Scope,
StringRef Name, unsigned ArgNo) {
return emitShadowCopy(Storage.getAddress(), Scope, Name, ArgNo,
Storage.getAlignment());
}
void emitShadowCopy(ArrayRef<llvm::Value *> vals, const SILDebugScope *Scope,
StringRef Name, unsigned ArgNo,
llvm::SmallVectorImpl<llvm::Value *> ©) {
// Only do this at -O0.
if (IGM.IRGen.Opts.Optimize) {
copy.append(vals.begin(), vals.end());
return;
}
// Single or empty values.
if (vals.size() <= 1) {
for (auto val : vals)
copy.push_back(emitShadowCopy(val, Scope, Name, ArgNo));
return;
}
// Create a single aggregate alloca for explosions.
// TODO: why are we doing this instead of using the TypeInfo?
llvm::StructType *aggregateType = [&] {
SmallVector<llvm::Type *, 8> eltTypes;
for (auto val : vals)
eltTypes.push_back(val->getType());
return llvm::StructType::get(IGM.LLVMContext, eltTypes);
}();
auto layout = IGM.DataLayout.getStructLayout(aggregateType);
Alignment align(layout->getAlignment());
auto alloca = createAlloca(aggregateType, align, Name + ".debug");
ArtificialLocation AutoRestore(getDebugScope(), IGM.DebugInfo, Builder);
size_t i = 0;
for (auto val : vals) {
auto addr = Builder.CreateStructGEP(alloca, i,
Size(layout->getElementOffset(i)));
Builder.CreateStore(val, addr);
i++;
}
copy.push_back(alloca.getAddress());
}
/// Determine whether a generic variable has been inlined.
static bool isInlinedGeneric(VarDecl *VarDecl, const SILDebugScope *DS) {
if (!DS->InlinedCallSite)
return false;
if (VarDecl->hasType())
return VarDecl->getType()->hasArchetype();
return VarDecl->getInterfaceType()->hasTypeParameter();
}
/// Emit debug info for a function argument or a local variable.
template <typename StorageType>
void emitDebugVariableDeclaration(StorageType Storage,
DebugTypeInfo Ty,
SILType SILTy,
const SILDebugScope *DS,
VarDecl *VarDecl,
StringRef Name,
unsigned ArgNo = 0,
IndirectionKind Indirection = DirectValue) {
// Force all archetypes referenced by the type to be bound by this point.
// TODO: just make sure that we have a path to them that the debug info
// can follow.
// FIXME: The debug info type of all inlined instances of a variable must be
// the same as the type of the abstract variable.
if (isInlinedGeneric(VarDecl, DS))
return;
auto runtimeTy = getRuntimeReifiedType(IGM,
Ty.getType()->getCanonicalType());
if (!IGM.IRGen.Opts.Optimize && runtimeTy->hasArchetype())
runtimeTy.visit([&](CanType t) {
if (auto archetype = dyn_cast<ArchetypeType>(t))
emitTypeMetadataRef(archetype);
});
assert(IGM.DebugInfo && "debug info not enabled");
if (ArgNo) {
PrologueLocation AutoRestore(IGM.DebugInfo, Builder);
IGM.DebugInfo->emitVariableDeclaration(Builder, Storage, Ty, DS, VarDecl,
Name, ArgNo, Indirection);
} else
IGM.DebugInfo->emitVariableDeclaration(Builder, Storage, Ty, DS, VarDecl,
Name, 0, Indirection);
}
void emitFailBB() {
if (!FailBBs.empty()) {
// Move the trap basic blocks to the end of the function.
for (auto *FailBB : FailBBs) {
auto &BlockList = CurFn->getBasicBlockList();
BlockList.splice(BlockList.end(), BlockList, FailBB);
}
}
}
//===--------------------------------------------------------------------===//
// SIL instruction lowering
//===--------------------------------------------------------------------===//
void visitSILBasicBlock(SILBasicBlock *BB);
void emitErrorResultVar(SILResultInfo ErrorInfo, DebugValueInst *DbgValue);
void emitDebugInfoForAllocStack(AllocStackInst *i, const TypeInfo &type,
llvm::Value *addr);
void visitAllocStackInst(AllocStackInst *i);
void visitAllocRefInst(AllocRefInst *i);
void visitAllocRefDynamicInst(AllocRefDynamicInst *i);
void visitAllocBoxInst(AllocBoxInst *i);
void visitProjectBoxInst(ProjectBoxInst *i);
void visitApplyInst(ApplyInst *i);
void visitTryApplyInst(TryApplyInst *i);
void visitFullApplySite(FullApplySite i);
void visitPartialApplyInst(PartialApplyInst *i);
void visitBuiltinInst(BuiltinInst *i);
void visitFunctionRefInst(FunctionRefInst *i);
void visitAllocGlobalInst(AllocGlobalInst *i);
void visitGlobalAddrInst(GlobalAddrInst *i);
void visitIntegerLiteralInst(IntegerLiteralInst *i);
void visitFloatLiteralInst(FloatLiteralInst *i);
void visitStringLiteralInst(StringLiteralInst *i);
void visitLoadInst(LoadInst *i);
void visitStoreInst(StoreInst *i);
void visitAssignInst(AssignInst *i) {
llvm_unreachable("assign is not valid in canonical SIL");
}
void visitMarkUninitializedInst(MarkUninitializedInst *i) {
llvm_unreachable("mark_uninitialized is not valid in canonical SIL");
}
void visitMarkUninitializedBehaviorInst(MarkUninitializedBehaviorInst *i) {
llvm_unreachable("mark_uninitialized_behavior is not valid in canonical SIL");
}
void visitMarkFunctionEscapeInst(MarkFunctionEscapeInst *i) {
llvm_unreachable("mark_function_escape is not valid in canonical SIL");
}
void visitLoadBorrowInst(LoadBorrowInst *i) {
llvm_unreachable("unimplemented");
}
void visitDebugValueInst(DebugValueInst *i);
void visitDebugValueAddrInst(DebugValueAddrInst *i);
void visitLoadWeakInst(LoadWeakInst *i);
void visitStoreWeakInst(StoreWeakInst *i);
void visitRetainValueInst(RetainValueInst *i);
void visitCopyValueInst(CopyValueInst *i);
void visitCopyUnownedValueInst(CopyUnownedValueInst *i) {
llvm_unreachable("unimplemented");
}
void visitReleaseValueInst(ReleaseValueInst *i);
void visitDestroyValueInst(DestroyValueInst *i);
void visitAutoreleaseValueInst(AutoreleaseValueInst *i);
void visitSetDeallocatingInst(SetDeallocatingInst *i);
void visitStructInst(StructInst *i);
void visitTupleInst(TupleInst *i);
void visitEnumInst(EnumInst *i);
void visitInitEnumDataAddrInst(InitEnumDataAddrInst *i);
void visitSelectEnumInst(SelectEnumInst *i);
void visitSelectEnumAddrInst(SelectEnumAddrInst *i);
void visitSelectValueInst(SelectValueInst *i);
void visitUncheckedEnumDataInst(UncheckedEnumDataInst *i);
void visitUncheckedTakeEnumDataAddrInst(UncheckedTakeEnumDataAddrInst *i);
void visitInjectEnumAddrInst(InjectEnumAddrInst *i);
void visitObjCProtocolInst(ObjCProtocolInst *i);
void visitMetatypeInst(MetatypeInst *i);
void visitValueMetatypeInst(ValueMetatypeInst *i);
void visitExistentialMetatypeInst(ExistentialMetatypeInst *i);
void visitTupleExtractInst(TupleExtractInst *i);
void visitTupleElementAddrInst(TupleElementAddrInst *i);
void visitStructExtractInst(StructExtractInst *i);
void visitStructElementAddrInst(StructElementAddrInst *i);
void visitRefElementAddrInst(RefElementAddrInst *i);
void visitRefTailAddrInst(RefTailAddrInst *i);
void visitClassMethodInst(ClassMethodInst *i);
void visitSuperMethodInst(SuperMethodInst *i);
void visitWitnessMethodInst(WitnessMethodInst *i);
void visitDynamicMethodInst(DynamicMethodInst *i);
void visitAllocValueBufferInst(AllocValueBufferInst *i);
void visitProjectValueBufferInst(ProjectValueBufferInst *i);
void visitDeallocValueBufferInst(DeallocValueBufferInst *i);
void visitOpenExistentialAddrInst(OpenExistentialAddrInst *i);
void visitOpenExistentialMetatypeInst(OpenExistentialMetatypeInst *i);
void visitOpenExistentialRefInst(OpenExistentialRefInst *i);
void visitOpenExistentialOpaqueInst(OpenExistentialOpaqueInst *i);
void visitInitExistentialAddrInst(InitExistentialAddrInst *i);
void visitInitExistentialOpaqueInst(InitExistentialOpaqueInst *i);
void visitInitExistentialMetatypeInst(InitExistentialMetatypeInst *i);
void visitInitExistentialRefInst(InitExistentialRefInst *i);
void visitDeinitExistentialAddrInst(DeinitExistentialAddrInst *i);
void visitDeinitExistentialOpaqueInst(DeinitExistentialOpaqueInst *i);
void visitAllocExistentialBoxInst(AllocExistentialBoxInst *i);
void visitOpenExistentialBoxInst(OpenExistentialBoxInst *i);
void visitProjectExistentialBoxInst(ProjectExistentialBoxInst *i);
void visitDeallocExistentialBoxInst(DeallocExistentialBoxInst *i);
void visitProjectBlockStorageInst(ProjectBlockStorageInst *i);
void visitInitBlockStorageHeaderInst(InitBlockStorageHeaderInst *i);
void visitFixLifetimeInst(FixLifetimeInst *i);
void visitEndLifetimeInst(EndLifetimeInst *i) {
llvm_unreachable("unimplemented");
}
void
visitUncheckedOwnershipConversionInst(UncheckedOwnershipConversionInst *i) {
llvm_unreachable("unimplemented");
}
void visitBeginBorrowInst(BeginBorrowInst *i) {
llvm_unreachable("unimplemented");
}
void visitEndBorrowInst(EndBorrowInst *i) {
llvm_unreachable("unimplemented");
}
void visitEndBorrowArgumentInst(EndBorrowArgumentInst *i) {
llvm_unreachable("unimplemented");
}
void visitStoreBorrowInst(StoreBorrowInst *i) {
llvm_unreachable("unimplemented");
}
void visitBeginAccessInst(BeginAccessInst *i);
void visitEndAccessInst(EndAccessInst *i);
void visitUnmanagedRetainValueInst(UnmanagedRetainValueInst *i) {
llvm_unreachable("unimplemented");
}
void visitUnmanagedReleaseValueInst(UnmanagedReleaseValueInst *i) {
llvm_unreachable("unimplemented");
}
void visitUnmanagedAutoreleaseValueInst(UnmanagedAutoreleaseValueInst *i) {
llvm_unreachable("unimplemented");
}
void visitMarkDependenceInst(MarkDependenceInst *i);
void visitCopyBlockInst(CopyBlockInst *i);
void visitStrongPinInst(StrongPinInst *i);
void visitStrongUnpinInst(StrongUnpinInst *i);
void visitStrongRetainInst(StrongRetainInst *i);
void visitStrongReleaseInst(StrongReleaseInst *i);
void visitStrongRetainUnownedInst(StrongRetainUnownedInst *i);
void visitUnownedRetainInst(UnownedRetainInst *i);
void visitUnownedReleaseInst(UnownedReleaseInst *i);
void visitLoadUnownedInst(LoadUnownedInst *i);
void visitStoreUnownedInst(StoreUnownedInst *i);
void visitIsUniqueInst(IsUniqueInst *i);
void visitIsUniqueOrPinnedInst(IsUniqueOrPinnedInst *i);
void visitDeallocStackInst(DeallocStackInst *i);
void visitDeallocBoxInst(DeallocBoxInst *i);
void visitDeallocRefInst(DeallocRefInst *i);
void visitDeallocPartialRefInst(DeallocPartialRefInst *i);
void visitCopyAddrInst(CopyAddrInst *i);
void visitDestroyAddrInst(DestroyAddrInst *i);
void visitBindMemoryInst(BindMemoryInst *i);
void visitCondFailInst(CondFailInst *i);
void visitConvertFunctionInst(ConvertFunctionInst *i);
void visitThinFunctionToPointerInst(ThinFunctionToPointerInst *i);
void visitPointerToThinFunctionInst(PointerToThinFunctionInst *i);
void visitUpcastInst(UpcastInst *i);
void visitAddressToPointerInst(AddressToPointerInst *i);
void visitPointerToAddressInst(PointerToAddressInst *i);
void visitUncheckedRefCastInst(UncheckedRefCastInst *i);
void visitUncheckedRefCastAddrInst(UncheckedRefCastAddrInst *i);
void visitUncheckedAddrCastInst(UncheckedAddrCastInst *i);
void visitUncheckedTrivialBitCastInst(UncheckedTrivialBitCastInst *i);
void visitUncheckedBitwiseCastInst(UncheckedBitwiseCastInst *i);
void visitRefToRawPointerInst(RefToRawPointerInst *i);
void visitRawPointerToRefInst(RawPointerToRefInst *i);
void visitRefToUnownedInst(RefToUnownedInst *i);
void visitUnownedToRefInst(UnownedToRefInst *i);
void visitRefToUnmanagedInst(RefToUnmanagedInst *i);
void visitUnmanagedToRefInst(UnmanagedToRefInst *i);
void visitThinToThickFunctionInst(ThinToThickFunctionInst *i);
void visitThickToObjCMetatypeInst(ThickToObjCMetatypeInst *i);
void visitObjCToThickMetatypeInst(ObjCToThickMetatypeInst *i);
void visitUnconditionalCheckedCastInst(UnconditionalCheckedCastInst *i);
void visitUnconditionalCheckedCastAddrInst(UnconditionalCheckedCastAddrInst *i);
void
visitUnconditionalCheckedCastValueInst(UnconditionalCheckedCastValueInst *i);
void visitObjCMetatypeToObjectInst(ObjCMetatypeToObjectInst *i);
void visitObjCExistentialMetatypeToObjectInst(
ObjCExistentialMetatypeToObjectInst *i);
void visitRefToBridgeObjectInst(RefToBridgeObjectInst *i);
void visitBridgeObjectToRefInst(BridgeObjectToRefInst *i);
void visitBridgeObjectToWordInst(BridgeObjectToWordInst *i);
void visitIsNonnullInst(IsNonnullInst *i);
void visitIndexAddrInst(IndexAddrInst *i);
void visitTailAddrInst(TailAddrInst *i);
void visitIndexRawPointerInst(IndexRawPointerInst *i);
void visitUnreachableInst(UnreachableInst *i);
void visitBranchInst(BranchInst *i);
void visitCondBranchInst(CondBranchInst *i);
void visitReturnInst(ReturnInst *i);
void visitThrowInst(ThrowInst *i);
void visitSwitchValueInst(SwitchValueInst *i);
void visitSwitchEnumInst(SwitchEnumInst *i);
void visitSwitchEnumAddrInst(SwitchEnumAddrInst *i);
void visitDynamicMethodBranchInst(DynamicMethodBranchInst *i);
void visitCheckedCastBranchInst(CheckedCastBranchInst *i);
void visitCheckedCastValueBranchInst(CheckedCastValueBranchInst *i);
void visitCheckedCastAddrBranchInst(CheckedCastAddrBranchInst *i);
};
} // end anonymous namespace
llvm::Value *StaticFunction::getExplosionValue(IRGenFunction &IGF) const {
return IGF.Builder.CreateBitCast(Function, IGF.IGM.Int8PtrTy);
}
void LoweredValue::getExplosion(IRGenFunction &IGF, Explosion &ex) const {
switch (kind) {
case Kind::Address:
case Kind::ContainedAddress:
llvm_unreachable("not a value");
case Kind::Explosion:
for (auto *value : explosion.values)
ex.add(value);
break;
case Kind::BoxWithAddress:
ex.add(boxWithAddress.getOwner());
break;
case Kind::StaticFunction:
ex.add(staticFunction.getExplosionValue(IGF));
break;
case Kind::ObjCMethod:
ex.add(objcMethod.getExplosionValue(IGF));
break;
}
}
llvm::Value *LoweredValue::getSingletonExplosion(IRGenFunction &IGF) const {
switch (kind) {
case Kind::Address:
case Kind::ContainedAddress:
llvm_unreachable("not a value");
case Kind::Explosion:
assert(explosion.values.size() == 1);
return explosion.values[0];
case Kind::BoxWithAddress:
return boxWithAddress.getOwner();
case Kind::StaticFunction:
return staticFunction.getExplosionValue(IGF);
case Kind::ObjCMethod:
return objcMethod.getExplosionValue(IGF);
}
llvm_unreachable("bad lowered value kind!");
}
IRGenSILFunction::IRGenSILFunction(IRGenModule &IGM,
SILFunction *f)
: IRGenFunction(IGM, IGM.getAddrOfSILFunction(f, ForDefinition),
f->getDebugScope(), f->getLocation()),
CurSILFn(f) {
// Apply sanitizer attributes to the function.
// TODO: Check if the function is ASan black listed either in the external
// file or via annotations.
if (IGM.IRGen.Opts.Sanitize == SanitizerKind::Address)
CurFn->addFnAttr(llvm::Attribute::SanitizeAddress);
if (IGM.IRGen.Opts.Sanitize == SanitizerKind::Thread) {
if (dyn_cast_or_null<DestructorDecl>(f->getDeclContext()))
// Do not report races in deinit and anything called from it
// because TSan does not observe synchronization between retain
// count dropping to '0' and the object deinitialization.
CurFn->addFnAttr("sanitize_thread_no_checking_at_run_time");
else
CurFn->addFnAttr(llvm::Attribute::SanitizeThread);
}
}
IRGenSILFunction::~IRGenSILFunction() {
assert(Builder.hasPostTerminatorIP() && "did not terminate BB?!");
// Emit the fail BB if we have one.
if (!FailBBs.empty())
emitFailBB();
DEBUG(CurFn->print(llvm::dbgs()));
}
template<typename ValueVector>
static void emitPHINodesForType(IRGenSILFunction &IGF, SILType type,
const TypeInfo &ti, unsigned predecessors,
ValueVector &phis) {
if (type.isAddress()) {
phis.push_back(IGF.Builder.CreatePHI(ti.getStorageType()->getPointerTo(),
predecessors));
} else {
// PHIs are always emitted with maximal explosion.
ExplosionSchema schema = ti.getSchema();
for (auto &elt : schema) {
if (elt.isScalar())
phis.push_back(
IGF.Builder.CreatePHI(elt.getScalarType(), predecessors));
else
phis.push_back(
IGF.Builder.CreatePHI(elt.getAggregateType()->getPointerTo(),
predecessors));
}
}
}
static PHINodeVector
emitPHINodesForBBArgs(IRGenSILFunction &IGF,
SILBasicBlock *silBB,
llvm::BasicBlock *llBB) {
PHINodeVector phis;
unsigned predecessors = std::distance(silBB->pred_begin(), silBB->pred_end());
IGF.Builder.SetInsertPoint(llBB);
if (IGF.IGM.DebugInfo) {
// Use the location of the first instruction in the basic block
// for the φ-nodes.
if (!silBB->empty()) {
SILInstruction &I = *silBB->begin();
auto DS = I.getDebugScope();
assert(DS);
IGF.IGM.DebugInfo->setCurrentLoc(IGF.Builder, DS, I.getLoc());
}
}
for (SILArgument *arg : make_range(silBB->args_begin(), silBB->args_end())) {
size_t first = phis.size();
const TypeInfo &ti = IGF.getTypeInfo(arg->getType());
emitPHINodesForType(IGF, arg->getType(), ti, predecessors, phis);
if (arg->getType().isAddress()) {
IGF.setLoweredAddress(arg,
ti.getAddressForPointer(phis.back()));
} else {
Explosion argValue;
for (llvm::PHINode *phi :
swift::make_range(phis.begin()+first, phis.end()))
argValue.add(phi);
IGF.setLoweredExplosion(arg, argValue);
}
}
// Since we return to the entry of the function, reset the location.
if (IGF.IGM.DebugInfo)
IGF.IGM.DebugInfo->clearLoc(IGF.Builder);
return phis;
}
static void addIncomingExplosionToPHINodes(IRGenSILFunction &IGF,
LoweredBB &lbb,
unsigned &phiIndex,
Explosion &argValue);
// TODO: Handle this during SIL AddressLowering.
static ArrayRef<SILArgument*> emitEntryPointIndirectReturn(
IRGenSILFunction &IGF,
SILBasicBlock *entry,
Explosion ¶ms,
CanSILFunctionType funcTy,
llvm::function_ref<bool(SILType)> requiresIndirectResult) {
// Map an indirect return for a type SIL considers loadable but still
// requires an indirect return at the IR level.
SILFunctionConventions fnConv(funcTy, IGF.getSILModule());
SILType directResultType =
IGF.CurSILFn->mapTypeIntoContext(fnConv.getSILResultType());
if (requiresIndirectResult(directResultType)) {
auto &retTI = IGF.IGM.getTypeInfo(directResultType);
IGF.IndirectReturn = retTI.getAddressForPointer(params.claimNext());
}
auto bbargs = entry->getArguments();
// Map the indirect returns if present.
unsigned numIndirectResults = fnConv.getNumIndirectSILResults();
for (unsigned i = 0; i != numIndirectResults; ++i) {
SILArgument *ret = bbargs[i];
auto &retTI = IGF.IGM.getTypeInfo(ret->getType());
IGF.setLoweredAddress(ret, retTI.getAddressForPointer(params.claimNext()));
}
return bbargs.slice(numIndirectResults);
}
static void bindParameter(IRGenSILFunction &IGF,
SILArgument *param,
Explosion &allParamValues) {
// Pull out the parameter value and its formal type.
auto ¶mTI = IGF.getTypeInfo(param->getType());
// If the SIL parameter isn't passed indirectly, we need to map it
// to an explosion.
if (param->getType().isObject()) {
Explosion paramValues;
auto &loadableTI = cast<LoadableTypeInfo>(paramTI);
// If the explosion must be passed indirectly, load the value from the
// indirect address.
auto &nativeSchema = paramTI.nativeParameterValueSchema(IGF.IGM);
if (nativeSchema.requiresIndirect()) {
Address paramAddr
= loadableTI.getAddressForPointer(allParamValues.claimNext());
loadableTI.loadAsTake(IGF, paramAddr, paramValues);
} else {
if (!nativeSchema.empty()) {
// Otherwise, we map from the native convention to the type's explosion
// schema.
Explosion nativeParam;
allParamValues.transferInto(nativeParam, nativeSchema.size());
paramValues = nativeSchema.mapFromNative(IGF.IGM, IGF, nativeParam,
param->getType());
} else {
assert(paramTI.getSchema().empty());
}
}
IGF.setLoweredExplosion(param, paramValues);
return;
}
// Okay, the type is passed indirectly in SIL, so we need to map
// it to an address.
// FIXME: that doesn't mean we should physically pass it
// indirectly at this resilience expansion. An @in or @in_guaranteed parameter
// could be passed by value in the right resilience domain.
Address paramAddr
= paramTI.getAddressForPointer(allParamValues.claimNext());
IGF.setLoweredAddress(param, paramAddr);
}
/// Emit entry point arguments for a SILFunction with the Swift calling
/// convention.
static void emitEntryPointArgumentsNativeCC(IRGenSILFunction &IGF,
SILBasicBlock *entry,
Explosion &allParamValues) {
auto funcTy = IGF.CurSILFn->getLoweredFunctionType();
// Map the indirect return if present.
ArrayRef<SILArgument *> params = emitEntryPointIndirectReturn(
IGF, entry, allParamValues, funcTy, [&](SILType retType) -> bool {
auto &schema =
IGF.IGM.getTypeInfo(retType).nativeReturnValueSchema(IGF.IGM);
return schema.requiresIndirect();
});
// The witness method CC passes Self as a final argument.
WitnessMetadata witnessMetadata;
if (funcTy->getRepresentation() == SILFunctionTypeRepresentation::WitnessMethod) {
collectTrailingWitnessMetadata(IGF, *IGF.CurSILFn, allParamValues,
witnessMetadata);
}
// Bind the error result by popping it off the parameter list.
if (funcTy->hasErrorResult()) {
IGF.setErrorResultSlot(allParamValues.takeLast());
}
// The 'self' argument might be in the context position, which is
// now the end of the parameter list. Bind it now.
if (funcTy->hasSelfParam() &&
isSelfContextParameter(funcTy->getSelfParameter())) {
SILArgument *selfParam = params.back();
params = params.drop_back();
Explosion selfTemp;
selfTemp.add(allParamValues.takeLast());
bindParameter(IGF, selfParam, selfTemp);
// Even if we don't have a 'self', if we have an error result, we
// should have a placeholder argument here.
} else if (funcTy->hasErrorResult() ||
funcTy->getRepresentation() == SILFunctionTypeRepresentation::Thick)
{
llvm::Value *contextPtr = allParamValues.takeLast(); (void) contextPtr;
assert(contextPtr->getType() == IGF.IGM.RefCountedPtrTy);
}
// Map the remaining SIL parameters to LLVM parameters.
for (SILArgument *param : params) {
bindParameter(IGF, param, allParamValues);
}
// Bind polymorphic arguments. This can only be done after binding
// all the value parameters.
if (hasPolymorphicParameters(funcTy)) {
emitPolymorphicParameters(IGF, *IGF.CurSILFn, allParamValues,
&witnessMetadata,
[&](unsigned paramIndex) -> llvm::Value* {
SILValue parameter =
IGF.CurSILFn->getArgumentsWithoutIndirectResults()[paramIndex];
return IGF.getLoweredSingletonExplosion(parameter);
});
}
assert(allParamValues.empty() && "didn't claim all parameters!");
}
/// Emit entry point arguments for the parameters of a C function, or the
/// method parameters of an ObjC method.
static void emitEntryPointArgumentsCOrObjC(IRGenSILFunction &IGF,
SILBasicBlock *entry,
Explosion ¶ms,
CanSILFunctionType funcTy) {
// First, lower the method type.
ForeignFunctionInfo foreignInfo = IGF.IGM.getForeignFunctionInfo(funcTy);
assert(foreignInfo.ClangInfo);
auto &FI = *foreignInfo.ClangInfo;
// Okay, start processing the parameters explosion.
// First, claim all the indirect results.
ArrayRef<SILArgument*> args
= emitEntryPointIndirectReturn(IGF, entry, params, funcTy,
[&](SILType directResultType) -> bool {
return FI.getReturnInfo().isIndirect();
});
unsigned nextArgTyIdx = 0;
// Handle the arguments of an ObjC method.
if (IGF.CurSILFn->getRepresentation() ==
SILFunctionTypeRepresentation::ObjCMethod) {
// Claim the self argument from the end of the formal arguments.
SILArgument *selfArg = args.back();
args = args.slice(0, args.size() - 1);
// Set the lowered explosion for the self argument.
auto &selfTI = cast<LoadableTypeInfo>(IGF.getTypeInfo(selfArg->getType()));
auto selfSchema = selfTI.getSchema();
assert(selfSchema.size() == 1 && "Expected self to be a single element!");
auto *selfValue = params.claimNext();
auto *bodyType = selfSchema.begin()->getScalarType();
if (selfValue->getType() != bodyType)
selfValue = IGF.coerceValue(selfValue, bodyType, IGF.IGM.DataLayout);
Explosion self;
self.add(selfValue);
IGF.setLoweredExplosion(selfArg, self);
// Discard the implicit _cmd argument.
params.claimNext();
// We've handled the self and _cmd arguments, so when we deal with
// generating explosions for the remaining arguments we can skip
// these.
nextArgTyIdx = 2;
}
assert(args.size() == (FI.arg_size() - nextArgTyIdx) &&
"Number of arguments not equal to number of argument types!");
// Generate lowered explosions for each explicit argument.
for (auto i : indices(args)) {
SILArgument *arg = args[i];
auto argTyIdx = i + nextArgTyIdx;
auto &argTI = IGF.getTypeInfo(arg->getType());
// Bitcast indirect argument pointers to the right storage type.
if (arg->getType().isAddress()) {
llvm::Value *ptr = params.claimNext();
ptr = IGF.Builder.CreateBitCast(ptr,
argTI.getStorageType()->getPointerTo());
IGF.setLoweredAddress(arg, Address(ptr, argTI.getBestKnownAlignment()));
continue;
}
auto &loadableArgTI = cast<LoadableTypeInfo>(argTI);
Explosion argExplosion;
emitForeignParameter(IGF, params, foreignInfo, argTyIdx,
arg->getType(), loadableArgTI, argExplosion);
IGF.setLoweredExplosion(arg, argExplosion);
}
assert(params.empty() && "didn't claim all parameters!");
// emitPolymorphicParameters() may create function calls, so we need
// to initialize the debug location here.
ArtificialLocation Loc(IGF.getDebugScope(), IGF.IGM.DebugInfo, IGF.Builder);
// Bind polymorphic arguments. This can only be done after binding
// all the value parameters, and must be done even for non-polymorphic
// functions because of imported Objective-C generics.
emitPolymorphicParameters(
IGF, *IGF.CurSILFn, params, nullptr,
[&](unsigned paramIndex) -> llvm::Value * {
SILValue parameter = entry->getArguments()[paramIndex];
return IGF.getLoweredSingletonExplosion(parameter);
});
}
/// Get metadata for the dynamic Self type if we have it.
static void emitLocalSelfMetadata(IRGenSILFunction &IGF) {
if (!IGF.CurSILFn->hasSelfMetadataParam())
return;
const SILArgument *selfArg = IGF.CurSILFn->getSelfMetadataArgument();
CanMetatypeType metaTy =
dyn_cast<MetatypeType>(selfArg->getType().getSwiftRValueType());
IRGenFunction::LocalSelfKind selfKind;
if (!metaTy)
selfKind = IRGenFunction::ObjectReference;
else switch (metaTy->getRepresentation()) {
case MetatypeRepresentation::Thin:
llvm_unreachable("class metatypes are never thin");
case MetatypeRepresentation::Thick:
selfKind = IRGenFunction::SwiftMetatype;
break;
case MetatypeRepresentation::ObjC:
selfKind = IRGenFunction::ObjCMetatype;
break;
}
llvm::Value *value = IGF.getLoweredExplosion(selfArg).claimNext();
IGF.setLocalSelfMetadata(value, selfKind);
}
/// Emit the definition for the given SIL constant.
void IRGenModule::emitSILFunction(SILFunction *f) {
if (f->isExternalDeclaration())
return;
PrettyStackTraceSILFunction stackTrace("emitting IR", f);
IRGenSILFunction(*this, f).emitSILFunction();
}
void IRGenSILFunction::emitSILFunction() {
DEBUG(llvm::dbgs() << "emitting SIL function: ";
CurSILFn->printName(llvm::dbgs());
llvm::dbgs() << '\n';
CurSILFn->print(llvm::dbgs()));
assert(!CurSILFn->empty() && "function has no basic blocks?!");
// Configure the dominance resolver.
// TODO: consider re-using a dom analysis from the PassManager
// TODO: consider using a cheaper analysis at -O0
setDominanceResolver([](IRGenFunction &IGF_,
DominancePoint activePoint,
DominancePoint dominatingPoint) -> bool {
IRGenSILFunction &IGF = static_cast<IRGenSILFunction&>(IGF_);
if (!IGF.Dominance) {
IGF.Dominance.reset(new DominanceInfo(IGF.CurSILFn));
}
return IGF.Dominance->dominates(dominatingPoint.as<SILBasicBlock>(),
activePoint.as<SILBasicBlock>());
});
if (IGM.DebugInfo)
IGM.DebugInfo->emitFunction(*CurSILFn, CurFn);
// Map the entry bb.
LoweredBBs[&*CurSILFn->begin()] = LoweredBB(&*CurFn->begin(), {});
// Create LLVM basic blocks for the other bbs.
for (auto bi = std::next(CurSILFn->begin()), be = CurSILFn->end(); bi != be;
++bi) {
// FIXME: Use the SIL basic block's name.
llvm::BasicBlock *llBB = llvm::BasicBlock::Create(IGM.getLLVMContext());
auto phis = emitPHINodesForBBArgs(*this, &*bi, llBB);
CurFn->getBasicBlockList().push_back(llBB);
LoweredBBs[&*bi] = LoweredBB(llBB, std::move(phis));
}
auto entry = LoweredBBs.begin();
Builder.SetInsertPoint(entry->second.bb);
// Map the LLVM arguments to arguments on the entry point BB.
Explosion params = collectParameters();
auto funcTy = CurSILFn->getLoweredFunctionType();
switch (funcTy->getLanguage()) {
case SILFunctionLanguage::Swift:
emitEntryPointArgumentsNativeCC(*this, entry->first, params);
break;
case SILFunctionLanguage::C:
emitEntryPointArgumentsCOrObjC(*this, entry->first, params, funcTy);
break;
}
emitLocalSelfMetadata(*this);
assert(params.empty() && "did not map all llvm params to SIL params?!");
// It's really nice to be able to assume that we've already emitted
// all the values from dominating blocks --- it makes simple
// peepholing more powerful and allows us to avoid the need for
// nasty "forward-declared" values. We can do this by emitting
// blocks using a simple walk through the successor graph.
//
// We do want to preserve the original source order, but that's done
// by having previously added all the primary blocks to the LLVM
// function in their original order. As long as any secondary
// blocks are inserted after the current IP instead of at the end
// of the function, we're fine.
// Invariant: for every block in the work queue, we have visited all
// of its dominators.
llvm::SmallPtrSet<SILBasicBlock*, 8> visitedBlocks;
SmallVector<SILBasicBlock*, 8> workQueue; // really a stack
// Queue up the entry block, for which the invariant trivially holds.
visitedBlocks.insert(&*CurSILFn->begin());
workQueue.push_back(&*CurSILFn->begin());
while (!workQueue.empty()) {
auto bb = workQueue.pop_back_val();
// Emit the block.
visitSILBasicBlock(bb);
#ifndef NDEBUG
// Assert that the current IR IP (if valid) is immediately prior
// to the initial IR block for the next primary SIL block.
// It's not semantically necessary to preserve SIL block order,
// but we really should.
if (auto curBB = Builder.GetInsertBlock()) {
auto next = std::next(SILFunction::iterator(bb));
if (next != CurSILFn->end()) {
auto nextBB = LoweredBBs[&*next].bb;
assert(&*std::next(curBB->getIterator()) == nextBB &&
"lost source SIL order?");
}
}
#endif
// The immediate dominator of a successor of this block needn't be
// this block, but it has to be something which dominates this
// block. In either case, we've visited it.
//
// Therefore the invariant holds of all the successors, and we can
// queue them up if we haven't already visited them.
for (auto *succBB : bb->getSuccessorBlocks()) {
if (visitedBlocks.insert(succBB).second)
workQueue.push_back(succBB);
}
}
// If there are dead blocks in the SIL function, we might have left
// invalid blocks in the IR. Do another pass and kill them off.
for (SILBasicBlock &bb : *CurSILFn)
if (!visitedBlocks.count(&bb))
LoweredBBs[&bb].bb->eraseFromParent();
}
void IRGenSILFunction::estimateStackSize() {
if (EstimatedStackSize >= 0)
return;
// TODO: as soon as we generate alloca instructions with accurate lifetimes
// we should also do a better stack size calculation here. Currently we
// add all stack sizes even if life ranges do not overlap.
for (SILBasicBlock &BB : *CurSILFn) {
for (SILInstruction &I : BB) {
if (auto *ASI = dyn_cast<AllocStackInst>(&I)) {
const TypeInfo &type = getTypeInfo(ASI->getElementType());
if (llvm::Constant *SizeConst = type.getStaticSize(IGM)) {
auto *SizeInt = cast<llvm::ConstantInt>(SizeConst);
EstimatedStackSize += (int)SizeInt->getSExtValue();
}
}
}
}
}
void IRGenSILFunction::visitSILBasicBlock(SILBasicBlock *BB) {
// Insert into the lowered basic block.
llvm::BasicBlock *llBB = getLoweredBB(BB).bb;
Builder.SetInsertPoint(llBB);
bool InEntryBlock = BB->pred_empty();
// Set this block as the dominance point. This implicitly communicates
// with the dominance resolver configured in emitSILFunction.
DominanceScope dominance(*this, InEntryBlock ? DominancePoint::universal()
: DominancePoint(BB));
// The basic blocks are visited in a random order. Reset the debug location.
std::unique_ptr<AutoRestoreLocation> ScopedLoc;
if (InEntryBlock)
ScopedLoc = llvm::make_unique<PrologueLocation>(IGM.DebugInfo, Builder);
else
ScopedLoc = llvm::make_unique<ArtificialLocation>(
CurSILFn->getDebugScope(), IGM.DebugInfo, Builder);
// Generate the body.
bool InCleanupBlock = false;
bool KeepCurrentLocation = false;
for (auto InsnIter = BB->begin(); InsnIter != BB->end(); ++InsnIter) {
auto &I = *InsnIter;
if (IGM.DebugInfo) {
// Set the debug info location for I, if applicable.
SILLocation ILoc = I.getLoc();
auto DS = I.getDebugScope();
// Handle cleanup locations.
if (ILoc.is<CleanupLocation>()) {
// Cleanup locations point to the decl of the value that is
// being destroyed (for diagnostic generation). As far as
// the linetable is concerned, cleanups at the end of a
// lexical scope should point to the cleanup location, which
// is the location of the last instruction in the basic block.
if (!InCleanupBlock) {
InCleanupBlock = true;
// Scan ahead to see if this is the final cleanup block in
// this basic block.
auto It = InsnIter;
do ++It; while (It != BB->end() &&
It->getLoc().is<CleanupLocation>());
// We are still in the middle of a basic block?
if (It != BB->end() && !isa<TermInst>(It))
KeepCurrentLocation = true;
}
// Assign the cleanup location to this instruction.
if (!KeepCurrentLocation) {
assert(BB->getTerminator());
ILoc = BB->getTerminator()->getLoc();
DS = BB->getTerminator()->getDebugScope();
}
} else if (InCleanupBlock) {
KeepCurrentLocation = false;
InCleanupBlock = false;
}
// Until SILDebugScopes are properly serialized, bare functions
// are allowed to not have a scope.
if (!DS) {
if (CurSILFn->isBare())
DS = CurSILFn->getDebugScope();
assert(maybeScopeless(I) && "instruction has location, but no scope");
}
// Set the builder's debug location.
if (DS && !KeepCurrentLocation)
IGM.DebugInfo->setCurrentLoc(Builder, DS, ILoc);
else
// Use an artificial (line 0) location.
IGM.DebugInfo->setCurrentLoc(Builder, DS);
if (isa<TermInst>(&I))
emitDebugVariableRangeExtension(BB);
}
visit(&I);
}
assert(Builder.hasPostTerminatorIP() && "SIL bb did not terminate block?!");
}
void IRGenSILFunction::visitFunctionRefInst(FunctionRefInst *i) {
auto fn = i->getReferencedFunction();
llvm::Function *fnptr = IGM.getAddrOfSILFunction(fn, NotForDefinition);
auto foreignInfo = IGM.getForeignFunctionInfo(fn->getLoweredFunctionType());
// Store the function constant and calling
// convention as a StaticFunction so we can avoid bitcasting or thunking if
// we don't need to.
setLoweredStaticFunction(i, fnptr, fn->getRepresentation(), foreignInfo);
}
void IRGenSILFunction::visitAllocGlobalInst(AllocGlobalInst *i) {
SILGlobalVariable *var = i->getReferencedGlobal();
SILType loweredTy = var->getLoweredType();
auto &ti = getTypeInfo(loweredTy);
auto expansion = IGM.getResilienceExpansionForLayout(var);
// If the global is fixed-size in all resilience domains that can see it,
// we allocated storage for it statically, and there's nothing to do.
if (ti.isFixedSize(expansion))
return;
// Otherwise, the static storage for the global consists of a fixed-size
// buffer.
Address addr = IGM.getAddrOfSILGlobalVariable(var, ti,
NotForDefinition);
if (getSILModule().getOptions().UseCOWExistentials) {
emitAllocateValueInBuffer(*this, loweredTy, addr);
} else {
(void) ti.allocateBuffer(*this, addr, loweredTy);
}
}
void IRGenSILFunction::visitGlobalAddrInst(GlobalAddrInst *i) {
SILGlobalVariable *var = i->getReferencedGlobal();
SILType loweredTy = var->getLoweredType();
assert(loweredTy == i->getType().getObjectType());
auto &ti = getTypeInfo(loweredTy);
auto expansion = IGM.getResilienceExpansionForLayout(var);
// If the variable is empty in all resilience domains that can see it,
// don't actually emit a symbol for the global at all, just return undef.
if (ti.isKnownEmpty(expansion)) {
setLoweredAddress(i, ti.getUndefAddress());
return;
}
Address addr = IGM.getAddrOfSILGlobalVariable(var, ti,
NotForDefinition);
// If the global is fixed-size in all resilience domains that can see it,
// we allocated storage for it statically, and there's nothing to do.
if (ti.isFixedSize(expansion)) {
setLoweredAddress(i, addr);
return;
}
// Otherwise, the static storage for the global consists of a fixed-size
// buffer; project it.
if (getSILModule().getOptions().UseCOWExistentials) {
addr = emitProjectValueInBuffer(*this, loweredTy, addr);
} else {
addr = ti.projectBuffer(*this, addr, loweredTy);
}
setLoweredAddress(i, addr);
}
void IRGenSILFunction::visitMetatypeInst(swift::MetatypeInst *i) {
auto metaTy = i->getType().castTo<MetatypeType>();
Explosion e;
emitMetatypeRef(*this, metaTy, e);
setLoweredExplosion(i, e);
}
static llvm::Value *getClassBaseValue(IRGenSILFunction &IGF,
SILValue v) {
if (v->getType().isAddress()) {
auto addr = IGF.getLoweredAddress(v);
return IGF.Builder.CreateLoad(addr);
}
Explosion e = IGF.getLoweredExplosion(v);
return e.claimNext();
}
static llvm::Value *getClassMetatype(IRGenFunction &IGF,
llvm::Value *baseValue,
MetatypeRepresentation repr,
SILType instanceType) {
switch (repr) {
case MetatypeRepresentation::Thin:
llvm_unreachable("Class metatypes are never thin");
case MetatypeRepresentation::Thick:
return emitDynamicTypeOfHeapObject(IGF, baseValue, instanceType);
case MetatypeRepresentation::ObjC:
return emitHeapMetadataRefForHeapObject(IGF, baseValue, instanceType);
}
llvm_unreachable("Not a valid MetatypeRepresentation.");
}
void IRGenSILFunction::visitValueMetatypeInst(swift::ValueMetatypeInst *i) {
SILType instanceTy = i->getOperand()->getType();
auto metaTy = i->getType().castTo<MetatypeType>();
if (metaTy->getRepresentation() == MetatypeRepresentation::Thin) {
Explosion empty;
setLoweredExplosion(i, empty);
return;
}
Explosion e;
if (instanceTy.getClassOrBoundGenericClass()) {
e.add(getClassMetatype(*this,
getClassBaseValue(*this, i->getOperand()),
metaTy->getRepresentation(), instanceTy));
} else if (auto arch = instanceTy.getAs<ArchetypeType>()) {
if (arch->requiresClass()) {
e.add(getClassMetatype(*this,
getClassBaseValue(*this, i->getOperand()),
metaTy->getRepresentation(), instanceTy));
} else {
Address base = getLoweredAddress(i->getOperand());
e.add(emitDynamicTypeOfOpaqueArchetype(*this, base,
i->getOperand()->getType()));
// FIXME: We need to convert this back to an ObjC class for an
// ObjC metatype representation.
if (metaTy->getRepresentation() == MetatypeRepresentation::ObjC)
unimplemented(i->getLoc().getSourceLoc(),
"objc metatype of non-class-bounded archetype");
}
} else {
emitMetatypeRef(*this, metaTy, e);
}
setLoweredExplosion(i, e);
}
void IRGenSILFunction::visitExistentialMetatypeInst(
swift::ExistentialMetatypeInst *i) {
Explosion result;
SILValue op = i->getOperand();
SILType opType = op->getType();
switch (opType.getPreferredExistentialRepresentation(IGM.getSILModule())) {
case ExistentialRepresentation::Metatype: {
Explosion existential = getLoweredExplosion(op);
emitMetatypeOfMetatype(*this, existential, opType, result);
break;
}
case ExistentialRepresentation::Class: {
Explosion existential = getLoweredExplosion(op);
emitMetatypeOfClassExistential(*this, existential, i->getType(),
opType, result);
break;
}
case ExistentialRepresentation::Boxed: {
Explosion existential = getLoweredExplosion(op);
emitMetatypeOfBoxedExistential(*this, existential, opType, result);
break;
}
case ExistentialRepresentation::Opaque: {
Address existential = getLoweredAddress(op);
emitMetatypeOfOpaqueExistential(*this, existential, opType, result);
break;
}
case ExistentialRepresentation::None:
llvm_unreachable("Bad existential representation");
}
setLoweredExplosion(i, result);
}
static void emitApplyArgument(IRGenSILFunction &IGF,
SILValue arg,
SILType paramType,
Explosion &out) {
bool isSubstituted = (arg->getType() != paramType);
// For indirect arguments, we just need to pass a pointer.
if (paramType.isAddress()) {
// This address is of the substituted type.
auto addr = IGF.getLoweredAddress(arg);
// If a substitution is in play, just bitcast the address.
if (isSubstituted) {
auto origType = IGF.IGM.getStoragePointerType(paramType);
addr = IGF.Builder.CreateBitCast(addr, origType);
}
out.add(addr.getAddress());
return;
}
// Otherwise, it's an explosion, which we may need to translate,
// both in terms of explosion level and substitution levels.
assert(arg->getType().isObject());
// Fast path: avoid an unnecessary temporary explosion.
if (!isSubstituted) {
IGF.getLoweredExplosion(arg, out);
return;
}
Explosion temp = IGF.getLoweredExplosion(arg);
reemitAsUnsubstituted(IGF, paramType, arg->getType(),
temp, out);
}
static llvm::Value *getObjCClassForValue(IRGenSILFunction &IGF,
llvm::Value *selfValue,
CanAnyMetatypeType selfType) {
// If we have a Swift metatype, map it to the heap metadata, which
// will be the Class for an ObjC type.
switch (selfType->getRepresentation()) {
case swift::MetatypeRepresentation::ObjC:
return selfValue;
case swift::MetatypeRepresentation::Thick:
// Convert thick metatype to Objective-C metatype.
return emitClassHeapMetadataRefForMetatype(IGF, selfValue,
selfType.getInstanceType());
case swift::MetatypeRepresentation::Thin:
llvm_unreachable("Cannot convert Thin metatype to ObjC metatype");
}
llvm_unreachable("bad metatype representation");
}
static llvm::Value *emitWitnessTableForLoweredCallee(IRGenSILFunction &IGF,
CanSILFunctionType origCalleeType,
SubstitutionList subs) {
auto &M = *IGF.getSwiftModule();
llvm::Value *wtable;
if (auto *proto = origCalleeType->getDefaultWitnessMethodProtocol(M)) {
// The generic signature for a witness method with abstract Self must
// have exactly one protocol requirement.
//
// We recover the witness table from the substitution that was used to
// produce the substituted callee type.
auto subMap = origCalleeType->getGenericSignature()
->getSubstitutionMap(subs);
auto origSelfType = proto->getSelfInterfaceType()->getCanonicalType();
auto substSelfType = origSelfType.subst(subMap)->getCanonicalType();
auto conformance = *subMap.lookupConformance(origSelfType, proto);
llvm::Value *argMetadata = IGF.emitTypeMetadataRef(substSelfType);
wtable = emitWitnessTableRef(IGF, substSelfType, &argMetadata,
conformance);
} else {
// Otherwise, we have no way of knowing the original protocol or
// conformance, since the witness has a concrete self type.
//
// Protocol witnesses for concrete types are thus not allowed to touch
// the witness table; they already know all the witnesses, and we can't
// say who they are.
wtable = llvm::ConstantPointerNull::get(IGF.IGM.WitnessTablePtrTy);
}
assert(wtable->getType() == IGF.IGM.WitnessTablePtrTy);
return wtable;
}
static CallEmission getCallEmissionForLoweredValue(IRGenSILFunction &IGF,
CanSILFunctionType origCalleeType,
CanSILFunctionType substCalleeType,
const LoweredValue &lv,
llvm::Value *selfValue,
SubstitutionList substitutions,
WitnessMetadata *witnessMetadata,
Explosion &args) {
llvm::Value *calleeFn, *calleeData;
ForeignFunctionInfo foreignInfo;
switch (lv.kind) {
case LoweredValue::Kind::StaticFunction:
calleeFn = lv.getStaticFunction().getFunction();
calleeData = selfValue;
foreignInfo = lv.getStaticFunction().getForeignInfo();
if (origCalleeType->getRepresentation()
== SILFunctionType::Representation::WitnessMethod) {
llvm::Value *wtable = emitWitnessTableForLoweredCallee(
IGF, origCalleeType, substitutions);
witnessMetadata->SelfWitnessTable = wtable;
}
break;
case LoweredValue::Kind::ObjCMethod: {
assert(selfValue);
auto &objcMethod = lv.getObjCMethod();
ObjCMessageKind kind = objcMethod.getMessageKind();
CallEmission emission =
prepareObjCMethodRootCall(IGF, objcMethod.getMethod(),
origCalleeType, substCalleeType,
substitutions, kind);
// Convert a metatype 'self' argument to the ObjC Class pointer.
// FIXME: Should be represented in SIL.
if (auto metatype = dyn_cast<AnyMetatypeType>(
origCalleeType->getSelfParameter().getType())) {
selfValue = getObjCClassForValue(IGF, selfValue, metatype);
}
addObjCMethodCallImplicitArguments(IGF, args, objcMethod.getMethod(),
selfValue,
objcMethod.getSearchType());
return emission;
}
case LoweredValue::Kind::Explosion: {
switch (origCalleeType->getRepresentation()) {
case SILFunctionType::Representation::Block: {
assert(!selfValue && "block function with self?");
// Grab the block pointer and make it the first physical argument.
llvm::Value *blockPtr = lv.getSingletonExplosion(IGF);
blockPtr = IGF.Builder.CreateBitCast(blockPtr, IGF.IGM.ObjCBlockPtrTy);
args.add(blockPtr);
// Extract the invocation pointer for blocks.
llvm::Value *invokeAddr = IGF.Builder.CreateStructGEP(
/*Ty=*/nullptr, blockPtr, 3);
calleeFn = IGF.Builder.CreateLoad(invokeAddr, IGF.IGM.getPointerAlignment());
calleeData = nullptr;
break;
}
case SILFunctionType::Representation::Thin:
case SILFunctionType::Representation::CFunctionPointer:
case SILFunctionType::Representation::Method:
case SILFunctionType::Representation::Closure:
case SILFunctionType::Representation::ObjCMethod:
case SILFunctionType::Representation::WitnessMethod:
case SILFunctionType::Representation::Thick: {
Explosion calleeValues = lv.getExplosion(IGF);
calleeFn = calleeValues.claimNext();
if (origCalleeType->getRepresentation()
== SILFunctionType::Representation::WitnessMethod) {
witnessMetadata->SelfWitnessTable = emitWitnessTableForLoweredCallee(
IGF, origCalleeType, substitutions);
}
if (origCalleeType->getRepresentation()
== SILFunctionType::Representation::Thick) {
// @convention(thick) callees are exploded as a pair
// consisting of the function and the self value.
assert(!selfValue);
calleeData = calleeValues.claimNext();
} else {
calleeData = selfValue;
}
break;
}
}
// Cast the callee pointer to the right function type.
llvm::AttributeSet attrs;
llvm::FunctionType *fnTy =
IGF.IGM.getFunctionType(origCalleeType, attrs, &foreignInfo);
calleeFn = IGF.Builder.CreateBitCast(calleeFn, fnTy->getPointerTo());
break;
}
case LoweredValue::Kind::BoxWithAddress:
llvm_unreachable("@box isn't a valid callee");
case LoweredValue::Kind::ContainedAddress:
case LoweredValue::Kind::Address:
llvm_unreachable("sil address isn't a valid callee");
}
Callee callee = Callee::forKnownFunction(origCalleeType, substCalleeType,
substitutions, calleeFn, calleeData,
foreignInfo);
CallEmission callEmission(IGF, callee);
if (IGF.CurSILFn->isThunk())
callEmission.addAttribute(llvm::AttributeSet::FunctionIndex, llvm::Attribute::NoInline);
return callEmission;
}
void IRGenSILFunction::visitBuiltinInst(swift::BuiltinInst *i) {
auto argValues = i->getArguments();
Explosion args;
for (auto argValue : argValues) {
// Builtin arguments should never be substituted, so use the value's type
// as the parameter type.
emitApplyArgument(*this, argValue, argValue->getType(), args);
}
Explosion result;
emitBuiltinCall(*this, i->getName(), i->getType(),
args, result, i->getSubstitutions());
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitApplyInst(swift::ApplyInst *i) {
visitFullApplySite(i);
}
void IRGenSILFunction::visitTryApplyInst(swift::TryApplyInst *i) {
visitFullApplySite(i);
}
void IRGenSILFunction::visitFullApplySite(FullApplySite site) {
const LoweredValue &calleeLV = getLoweredValue(site.getCallee());
auto origCalleeType = site.getOrigCalleeType();
auto substCalleeType = site.getSubstCalleeType();
auto args = site.getArguments();
SILFunctionConventions origConv(origCalleeType, getSILModule());
assert(origConv.getNumSILArguments() == args.size());
// Extract 'self' if it needs to be passed as the context parameter.
llvm::Value *selfValue = nullptr;
if (origCalleeType->hasSelfParam() &&
isSelfContextParameter(origCalleeType->getSelfParameter())) {
SILValue selfArg = args.back();
args = args.drop_back();
if (selfArg->getType().isObject()) {
selfValue = getLoweredSingletonExplosion(selfArg);
} else {
selfValue = getLoweredAddress(selfArg).getAddress();
}
}
Explosion llArgs;
WitnessMetadata witnessMetadata;
CallEmission emission =
getCallEmissionForLoweredValue(*this, origCalleeType, substCalleeType,
calleeLV, selfValue, site.getSubstitutions(),
&witnessMetadata, llArgs);
// Lower the arguments and return value in the callee's generic context.
GenericContextScope scope(IGM, origCalleeType->getGenericSignature());
// Lower the SIL arguments to IR arguments.
// Turn the formal SIL parameters into IR-gen things.
for (auto index : indices(args)) {
emitApplyArgument(*this, args[index], origConv.getSILArgumentType(index),
llArgs);
}
// Pass the generic arguments.
if (hasPolymorphicParameters(origCalleeType)) {
SubstitutionMap subMap;
if (auto genericSig = origCalleeType->getGenericSignature())
subMap = genericSig->getSubstitutionMap(site.getSubstitutions());
emitPolymorphicArguments(*this, origCalleeType, substCalleeType,
subMap, &witnessMetadata, llArgs);
}
// Add all those arguments.
emission.setArgs(llArgs, &witnessMetadata);
SILInstruction *i = site.getInstruction();
Explosion result;
emission.emitToExplosion(result);
if (isa<ApplyInst>(i)) {
setLoweredExplosion(i, result);
} else {
auto tryApplyInst = cast<TryApplyInst>(i);
// Load the error value.
SILFunctionConventions substConv(substCalleeType, getSILModule());
SILType errorType = substConv.getSILErrorType();
Address errorSlot = getErrorResultSlot(errorType);
auto errorValue = Builder.CreateLoad(errorSlot);
auto &normalDest = getLoweredBB(tryApplyInst->getNormalBB());
auto &errorDest = getLoweredBB(tryApplyInst->getErrorBB());
// Zero the error slot to maintain the invariant that it always
// contains null. This will frequently become a dead store.
auto nullError = llvm::Constant::getNullValue(errorValue->getType());
if (!tryApplyInst->getErrorBB()->getSinglePredecessorBlock()) {
// Only do that here if we can't move the store to the error block.
// See below.
Builder.CreateStore(nullError, errorSlot);
}
// If the error value is non-null, branch to the error destination.
auto hasError = Builder.CreateICmpNE(errorValue, nullError);
Builder.CreateCondBr(hasError, errorDest.bb, normalDest.bb);
// Set up the PHI nodes on the normal edge.
unsigned firstIndex = 0;
addIncomingExplosionToPHINodes(*this, normalDest, firstIndex, result);
assert(firstIndex == normalDest.phis.size());
// Set up the PHI nodes on the error edge.
assert(errorDest.phis.size() == 1);
errorDest.phis[0]->addIncoming(errorValue, Builder.GetInsertBlock());
if (tryApplyInst->getErrorBB()->getSinglePredecessorBlock()) {
// Zeroing out the error slot only in the error block increases the chance
// that it will become a dead store.
auto origBB = Builder.GetInsertBlock();
Builder.SetInsertPoint(errorDest.bb);
Builder.CreateStore(nullError, errorSlot);
Builder.SetInsertPoint(origBB);
}
}
}
/// If the value is a @convention(witness_method) function, the context
/// is the witness table that must be passed to the call.
///
/// \param v A value of possibly-polymorphic SILFunctionType.
/// \param subs This is the set of substitutions that we are going to be
/// applying to 'v'.
static std::tuple<llvm::Value*, llvm::Value*, CanSILFunctionType>
getPartialApplicationFunction(IRGenSILFunction &IGF, SILValue v,
SubstitutionList subs) {
LoweredValue &lv = IGF.getLoweredValue(v);
auto fnType = v->getType().castTo<SILFunctionType>();
switch (lv.kind) {
case LoweredValue::Kind::ContainedAddress:
case LoweredValue::Kind::Address:
llvm_unreachable("can't partially apply an address");
case LoweredValue::Kind::BoxWithAddress:
llvm_unreachable("can't partially apply a @box");
case LoweredValue::Kind::ObjCMethod:
llvm_unreachable("objc method partial application shouldn't get here");
case LoweredValue::Kind::StaticFunction: {
llvm::Value *context = nullptr;
switch (lv.getStaticFunction().getRepresentation()) {
case SILFunctionTypeRepresentation::CFunctionPointer:
case SILFunctionTypeRepresentation::Block:
case SILFunctionTypeRepresentation::ObjCMethod:
assert(false && "partial_apply of foreign functions not implemented");
break;
case SILFunctionTypeRepresentation::WitnessMethod:
context = emitWitnessTableForLoweredCallee(IGF, fnType, subs);
break;
case SILFunctionTypeRepresentation::Thick:
case SILFunctionTypeRepresentation::Thin:
case SILFunctionTypeRepresentation::Method:
case SILFunctionTypeRepresentation::Closure:
break;
}
return std::make_tuple(lv.getStaticFunction().getFunction(),
context, v->getType().castTo<SILFunctionType>());
}
case LoweredValue::Kind::Explosion: {
Explosion ex = lv.getExplosion(IGF);
llvm::Value *fn = ex.claimNext();
llvm::Value *context = nullptr;
switch (fnType->getRepresentation()) {
case SILFunctionType::Representation::Thin:
case SILFunctionType::Representation::Method:
case SILFunctionType::Representation::Closure:
case SILFunctionType::Representation::ObjCMethod:
break;
case SILFunctionType::Representation::WitnessMethod:
context = emitWitnessTableForLoweredCallee(IGF, fnType, subs);
break;
case SILFunctionType::Representation::CFunctionPointer:
break;
case SILFunctionType::Representation::Thick:
context = ex.claimNext();
break;
case SILFunctionType::Representation::Block:
llvm_unreachable("partial application of block not implemented");
}
return std::make_tuple(fn, context, fnType);
}
}
llvm_unreachable("Not a valid SILFunctionType.");
}
void IRGenSILFunction::visitPartialApplyInst(swift::PartialApplyInst *i) {
SILValue v(i);
// NB: We collect the arguments under the substituted type.
auto args = i->getArguments();
auto params = i->getSubstCalleeType()->getParameters();
params = params.slice(params.size() - args.size(), args.size());
Explosion llArgs;
{
// Lower the parameters in the callee's generic context.
GenericContextScope scope(IGM, i->getOrigCalleeType()->getGenericSignature());
for (auto index : indices(args)) {
assert(args[index]->getType() == IGM.silConv.getSILType(params[index]));
emitApplyArgument(*this, args[index],
IGM.silConv.getSILType(params[index]), llArgs);
}
}
auto &lv = getLoweredValue(i->getCallee());
if (lv.kind == LoweredValue::Kind::ObjCMethod) {
// Objective-C partial applications require a different path. There's no
// actual function pointer to capture, and we semantically can't cache
// dispatch, so we need to perform the message send in the partial
// application thunk.
auto &objcMethod = lv.getObjCMethod();
assert(i->getArguments().size() == 1 &&
"only partial application of objc method to self implemented");
assert(llArgs.size() == 1 &&
"objc partial_apply argument is not a single retainable pointer?!");
llvm::Value *selfVal = llArgs.claimNext();
Explosion function;
emitObjCPartialApplication(*this,
objcMethod,
i->getOrigCalleeType(),
i->getType().castTo<SILFunctionType>(),
selfVal,
i->getArguments()[0]->getType(),
function);
setLoweredExplosion(i, function);
return;
}
// Get the function value.
llvm::Value *calleeFn = nullptr;
llvm::Value *innerContext = nullptr;
CanSILFunctionType origCalleeTy;
std::tie(calleeFn, innerContext, origCalleeTy)
= getPartialApplicationFunction(*this, i->getCallee(),
i->getSubstitutions());
// Create the thunk and function value.
Explosion function;
emitFunctionPartialApplication(*this, *CurSILFn,
calleeFn, innerContext, llArgs,
params, i->getSubstitutions(),
origCalleeTy, i->getSubstCalleeType(),
i->getType().castTo<SILFunctionType>(),
function);
setLoweredExplosion(v, function);
}
void IRGenSILFunction::visitIntegerLiteralInst(swift::IntegerLiteralInst *i) {
llvm::Value *constant = emitConstantInt(IGM, i);
Explosion e;
e.add(constant);
setLoweredExplosion(i, e);
}
void IRGenSILFunction::visitFloatLiteralInst(swift::FloatLiteralInst *i) {
llvm::Value *constant = emitConstantFP(IGM, i);
Explosion e;
e.add(constant);
setLoweredExplosion(i, e);
}
void IRGenSILFunction::visitStringLiteralInst(swift::StringLiteralInst *i) {
llvm::Value *addr;
// Emit a load of a selector.
if (i->getEncoding() == swift::StringLiteralInst::Encoding::ObjCSelector)
addr = emitObjCSelectorRefLoad(i->getValue());
else
addr = emitAddrOfConstantString(IGM, i);
Explosion e;
e.add(addr);
setLoweredExplosion(i, e);
}
void IRGenSILFunction::visitUnreachableInst(swift::UnreachableInst *i) {
Builder.CreateUnreachable();
}
static void emitReturnInst(IRGenSILFunction &IGF,
SILType resultTy,
Explosion &result) {
// The invariant on the out-parameter is that it's always zeroed, so
// there's nothing to do here.
// Even if SIL has a direct return, the IR-level calling convention may
// require an indirect return.
if (IGF.IndirectReturn.isValid()) {
auto &retTI = cast<LoadableTypeInfo>(IGF.getTypeInfo(resultTy));
retTI.initialize(IGF, result, IGF.IndirectReturn);
IGF.Builder.CreateRetVoid();
} else {
auto funcLang = IGF.CurSILFn->getLoweredFunctionType()->getLanguage();
auto swiftCCReturn = funcLang == SILFunctionLanguage::Swift;
assert(swiftCCReturn ||
funcLang == SILFunctionLanguage::C && "Need to handle all cases");
IGF.emitScalarReturn(resultTy, result, swiftCCReturn);
}
}
void IRGenSILFunction::visitReturnInst(swift::ReturnInst *i) {
Explosion result = getLoweredExplosion(i->getOperand());
// Implicitly autorelease the return value if the function's result
// convention is autoreleased.
auto fnConv = CurSILFn->getConventions();
if (fnConv.getNumDirectSILResults() == 1
&& (fnConv.getDirectSILResults().begin()->getConvention()
== ResultConvention::Autoreleased)) {
Explosion temp;
temp.add(emitObjCAutoreleaseReturnValue(*this, result.claimNext()));
result = std::move(temp);
}
emitReturnInst(*this, i->getOperand()->getType(), result);
}
void IRGenSILFunction::visitThrowInst(swift::ThrowInst *i) {
// Store the exception to the error slot.
llvm::Value *exn = getLoweredSingletonExplosion(i->getOperand());
Builder.CreateStore(exn, getCallerErrorResultSlot());
// Create a normal return, but leaving the return value undefined.
auto fnTy = CurFn->getType()->getPointerElementType();
auto retTy = cast<llvm::FunctionType>(fnTy)->getReturnType();
if (retTy->isVoidTy()) {
Builder.CreateRetVoid();
} else {
Builder.CreateRet(llvm::UndefValue::get(retTy));
}
}
static llvm::BasicBlock *emitBBMapForSwitchValue(
IRGenSILFunction &IGF,
SmallVectorImpl<std::pair<SILValue, llvm::BasicBlock*>> &dests,
SwitchValueInst *inst) {
for (unsigned i = 0, e = inst->getNumCases(); i < e; ++i) {
auto casePair = inst->getCase(i);
dests.push_back({casePair.first, IGF.getLoweredBB(casePair.second).bb});
}
llvm::BasicBlock *defaultDest = nullptr;
if (inst->hasDefault())
defaultDest = IGF.getLoweredBB(inst->getDefaultBB()).bb;
return defaultDest;
}
static llvm::ConstantInt *
getSwitchCaseValue(IRGenFunction &IGF, SILValue val) {
if (auto *IL = dyn_cast<IntegerLiteralInst>(val)) {
return dyn_cast<llvm::ConstantInt>(emitConstantInt(IGF.IGM, IL));
}
else {
llvm_unreachable("Switch value cases should be integers");
}
}
static void
emitSwitchValueDispatch(IRGenSILFunction &IGF,
SILType ty,
Explosion &value,
ArrayRef<std::pair<SILValue, llvm::BasicBlock*>> dests,
llvm::BasicBlock *defaultDest) {
// Create an unreachable block for the default if the original SIL
// instruction had none.
bool unreachableDefault = false;
if (!defaultDest) {
unreachableDefault = true;
defaultDest = llvm::BasicBlock::Create(IGF.IGM.getLLVMContext());
}
if (ty.is<BuiltinIntegerType>()) {
auto *discriminator = value.claimNext();
auto *i = IGF.Builder.CreateSwitch(discriminator, defaultDest,
dests.size());
for (auto &dest : dests)
i->addCase(getSwitchCaseValue(IGF, dest.first), dest.second);
} else {
// Get the value we're testing, which is a function.
llvm::Value *val;
llvm::BasicBlock *nextTest = nullptr;
if (ty.is<SILFunctionType>()) {
val = value.claimNext(); // Function pointer.
//values.claimNext(); // Ignore the data pointer.
} else {
llvm_unreachable("switch_value operand has an unknown type");
}
for (int i = 0, e = dests.size(); i < e; ++i) {
auto casePair = dests[i];
llvm::Value *caseval;
auto casevalue = IGF.getLoweredExplosion(casePair.first);
if (casePair.first->getType().is<SILFunctionType>()) {
caseval = casevalue.claimNext(); // Function pointer.
//values.claimNext(); // Ignore the data pointer.
} else {
llvm_unreachable("switch_value operand has an unknown type");
}
// Compare operand with a case tag value.
llvm::Value *cond = IGF.Builder.CreateICmp(llvm::CmpInst::ICMP_EQ,
val, caseval);
if (i == e -1 && !unreachableDefault) {
nextTest = nullptr;
IGF.Builder.CreateCondBr(cond, casePair.second, defaultDest);
} else {
nextTest = IGF.createBasicBlock("next-test");
IGF.Builder.CreateCondBr(cond, casePair.second, nextTest);
IGF.Builder.emitBlock(nextTest);
IGF.Builder.SetInsertPoint(nextTest);
}
}
if (nextTest) {
IGF.Builder.CreateBr(defaultDest);
}
}
if (unreachableDefault) {
IGF.Builder.emitBlock(defaultDest);
IGF.Builder.CreateUnreachable();
}
}
void IRGenSILFunction::visitSwitchValueInst(SwitchValueInst *inst) {
Explosion value = getLoweredExplosion(inst->getOperand());
// Map the SIL dest bbs to their LLVM bbs.
SmallVector<std::pair<SILValue, llvm::BasicBlock*>, 4> dests;
auto *defaultDest = emitBBMapForSwitchValue(*this, dests, inst);
emitSwitchValueDispatch(*this, inst->getOperand()->getType(),
value, dests, defaultDest);
}
// Bind an incoming explosion value to an explosion of LLVM phi node(s).
static void addIncomingExplosionToPHINodes(IRGenSILFunction &IGF,
ArrayRef<llvm::Value*> phis,
Explosion &argValue) {
llvm::BasicBlock *curBB = IGF.Builder.GetInsertBlock();
unsigned phiIndex = 0;
while (!argValue.empty())
cast<llvm::PHINode>(phis[phiIndex++])
->addIncoming(argValue.claimNext(), curBB);
assert(phiIndex == phis.size() && "explosion doesn't match number of phis");
}
// Bind an incoming explosion value to a SILArgument's LLVM phi node(s).
static void addIncomingExplosionToPHINodes(IRGenSILFunction &IGF,
LoweredBB &lbb,
unsigned &phiIndex,
Explosion &argValue) {
llvm::BasicBlock *curBB = IGF.Builder.GetInsertBlock();
while (!argValue.empty())
lbb.phis[phiIndex++]->addIncoming(argValue.claimNext(), curBB);
}
// Bind an incoming address value to a SILArgument's LLVM phi node(s).
static void addIncomingAddressToPHINodes(IRGenSILFunction &IGF,
ArrayRef<llvm::Value*> phis,
Address argValue) {
llvm::BasicBlock *curBB = IGF.Builder.GetInsertBlock();
assert(phis.size() == 1 && "more than one phi for address?!");
cast<llvm::PHINode>(phis[0])->addIncoming(argValue.getAddress(), curBB);
}
// Bind an incoming address value to a SILArgument's LLVM phi node(s).
static void addIncomingAddressToPHINodes(IRGenSILFunction &IGF,
LoweredBB &lbb,
unsigned &phiIndex,
Address argValue) {
llvm::BasicBlock *curBB = IGF.Builder.GetInsertBlock();
lbb.phis[phiIndex++]->addIncoming(argValue.getAddress(), curBB);
}
// Add branch arguments to destination phi nodes.
static void addIncomingSILArgumentsToPHINodes(IRGenSILFunction &IGF,
LoweredBB &lbb,
OperandValueArrayRef args) {
unsigned phiIndex = 0;
for (SILValue arg : args) {
const LoweredValue &lv = IGF.getLoweredValue(arg);
if (lv.isAddress()) {
addIncomingAddressToPHINodes(IGF, lbb, phiIndex, lv.getAddress());
continue;
}
Explosion argValue = lv.getExplosion(IGF);
addIncomingExplosionToPHINodes(IGF, lbb, phiIndex, argValue);
}
}
static llvm::BasicBlock *emitBBMapForSwitchEnum(
IRGenSILFunction &IGF,
SmallVectorImpl<std::pair<EnumElementDecl*, llvm::BasicBlock*>> &dests,
SwitchEnumInstBase *inst) {
for (unsigned i = 0, e = inst->getNumCases(); i < e; ++i) {
auto casePair = inst->getCase(i);
// If the destination BB accepts the case argument, set up a waypoint BB so
// we can feed the values into the argument's PHI node(s).
//
// FIXME: This is cheesy when the destination BB has only the switch
// as a predecessor.
if (!casePair.second->args_empty())
dests.push_back({casePair.first,
llvm::BasicBlock::Create(IGF.IGM.getLLVMContext())});
else
dests.push_back({casePair.first, IGF.getLoweredBB(casePair.second).bb});
}
llvm::BasicBlock *defaultDest = nullptr;
if (inst->hasDefault())
defaultDest = IGF.getLoweredBB(inst->getDefaultBB()).bb;
return defaultDest;
}
void IRGenSILFunction::visitSwitchEnumInst(SwitchEnumInst *inst) {
Explosion value = getLoweredExplosion(inst->getOperand());
// Map the SIL dest bbs to their LLVM bbs.
SmallVector<std::pair<EnumElementDecl*, llvm::BasicBlock*>, 4> dests;
llvm::BasicBlock *defaultDest
= emitBBMapForSwitchEnum(*this, dests, inst);
// Emit the dispatch.
auto &EIS = getEnumImplStrategy(IGM, inst->getOperand()->getType());
EIS.emitValueSwitch(*this, value, dests, defaultDest);
// Bind arguments for cases that want them.
for (unsigned i = 0, e = inst->getNumCases(); i < e; ++i) {
auto casePair = inst->getCase(i);
if (!casePair.second->args_empty()) {
auto waypointBB = dests[i].second;
auto &destLBB = getLoweredBB(casePair.second);
Builder.emitBlock(waypointBB);
Explosion inValue = getLoweredExplosion(inst->getOperand());
Explosion projected;
emitProjectLoadableEnum(*this, inst->getOperand()->getType(),
inValue, casePair.first, projected);
unsigned phiIndex = 0;
addIncomingExplosionToPHINodes(*this, destLBB, phiIndex, projected);
Builder.CreateBr(destLBB.bb);
}
}
}
void
IRGenSILFunction::visitSwitchEnumAddrInst(SwitchEnumAddrInst *inst) {
Address value = getLoweredAddress(inst->getOperand());
// Map the SIL dest bbs to their LLVM bbs.
SmallVector<std::pair<EnumElementDecl*, llvm::BasicBlock*>, 4> dests;
llvm::BasicBlock *defaultDest
= emitBBMapForSwitchEnum(*this, dests, inst);
// Emit the dispatch.
emitSwitchAddressOnlyEnumDispatch(*this, inst->getOperand()->getType(),
value, dests, defaultDest);
}
// FIXME: We could lower select_enum directly to LLVM select in a lot of cases.
// For now, just emit a switch and phi nodes, like a chump.
template<class C, class T>
static llvm::BasicBlock *
emitBBMapForSelect(IRGenSILFunction &IGF,
Explosion &resultPHI,
SmallVectorImpl<std::pair<T, llvm::BasicBlock*>> &BBs,
llvm::BasicBlock *&defaultBB,
SelectInstBase<C, T> *inst) {
auto origBB = IGF.Builder.GetInsertBlock();
// Set up a continuation BB and phi nodes to receive the result value.
llvm::BasicBlock *contBB = IGF.createBasicBlock("select_enum");
IGF.Builder.SetInsertPoint(contBB);
// Emit an explosion of phi node(s) to receive the value.
SmallVector<llvm::Value*, 4> phis;
auto &ti = IGF.getTypeInfo(inst->getType());
emitPHINodesForType(IGF, inst->getType(), ti,
inst->getNumCases() + inst->hasDefault(),
phis);
resultPHI.add(phis);
IGF.Builder.SetInsertPoint(origBB);
auto addIncoming = [&](SILValue value) {
if (value->getType().isAddress()) {
addIncomingAddressToPHINodes(IGF, resultPHI.getAll(),
IGF.getLoweredAddress(value));
} else {
Explosion ex = IGF.getLoweredExplosion(value);
addIncomingExplosionToPHINodes(IGF, resultPHI.getAll(), ex);
}
};
for (unsigned i = 0, e = inst->getNumCases(); i < e; ++i) {
auto casePair = inst->getCase(i);
// Create a basic block destination for this case.
llvm::BasicBlock *destBB = IGF.createBasicBlock("");
IGF.Builder.emitBlock(destBB);
// Feed the corresponding result into the phi nodes.
addIncoming(casePair.second);
// Jump immediately to the continuation.
IGF.Builder.CreateBr(contBB);
BBs.push_back(std::make_pair(casePair.first, destBB));
}
if (inst->hasDefault()) {
defaultBB = IGF.createBasicBlock("");
IGF.Builder.emitBlock(defaultBB);
addIncoming(inst->getDefaultResult());
IGF.Builder.CreateBr(contBB);
} else {
defaultBB = nullptr;
}
IGF.Builder.emitBlock(contBB);
IGF.Builder.SetInsertPoint(origBB);
return contBB;
}
// Try to map the value of a select_enum directly to an int type with a simple
// cast from the tag value to the result type. Optionally also by adding a
// constant offset.
// This is useful, e.g. for rawValue or hashValue of C-like enums.
static llvm::Value *
mapTriviallyToInt(IRGenSILFunction &IGF, const EnumImplStrategy &EIS, SelectEnumInst *inst) {
// All cases must be covered
if (inst->hasDefault())
return nullptr;
auto &ti = IGF.getTypeInfo(inst->getType());
ExplosionSchema schema = ti.getSchema();
// Check if the select_enum's result is a single integer scalar.
if (schema.size() != 1)
return nullptr;
if (!schema[0].isScalar())
return nullptr;
llvm::Type *type = schema[0].getScalarType();
llvm::IntegerType *resultType = dyn_cast<llvm::IntegerType>(type);
if (!resultType)
return nullptr;
// Check if the case values directly map to the tag values, maybe with a
// constant offset.
APInt commonOffset;
bool offsetValid = false;
for (unsigned i = 0, e = inst->getNumCases(); i < e; ++i) {
auto casePair = inst->getCase(i);
int64_t index = EIS.getDiscriminatorIndex(casePair.first);
if (index < 0)
return nullptr;
IntegerLiteralInst *intLit = dyn_cast<IntegerLiteralInst>(casePair.second);
if (!intLit)
return nullptr;
APInt caseValue = intLit->getValue();
APInt offset = caseValue - index;
if (offsetValid) {
if (offset != commonOffset)
return nullptr;
} else {
commonOffset = offset;
offsetValid = true;
}
}
// Ask the enum implementation strategy to extract the enum tag as an integer
// value.
Explosion enumValue = IGF.getLoweredExplosion(inst->getEnumOperand());
llvm::Value *result = EIS.emitExtractDiscriminator(IGF, enumValue);
if (!result) {
(void)enumValue.claimAll();
return nullptr;
}
// Cast to the result type.
result = IGF.Builder.CreateIntCast(result, resultType, false);
if (commonOffset != 0) {
// The offset, if any.
auto *offsetConst = llvm::ConstantInt::get(resultType, commonOffset);
result = IGF.Builder.CreateAdd(result, offsetConst);
}
return result;
}
template <class C, class T>
static LoweredValue
getLoweredValueForSelect(IRGenSILFunction &IGF,
Explosion &result, SelectInstBase<C, T> *inst) {
if (inst->getType().isAddress())
// FIXME: Loses potentially better alignment info we might have.
return LoweredValue(Address(result.claimNext(),
IGF.getTypeInfo(inst->getType()).getBestKnownAlignment()));
return LoweredValue(result);
}
static void emitSingleEnumMemberSelectResult(IRGenSILFunction &IGF,
SelectEnumInstBase *inst,
llvm::Value *isTrue,
Explosion &result) {
assert((inst->getNumCases() == 1 && inst->hasDefault()) ||
(inst->getNumCases() == 2 && !inst->hasDefault()));
// Extract the true values.
auto trueValue = inst->getCase(0).second;
SmallVector<llvm::Value*, 4> TrueValues;
if (trueValue->getType().isAddress()) {
TrueValues.push_back(IGF.getLoweredAddress(trueValue).getAddress());
} else {
Explosion ex = IGF.getLoweredExplosion(trueValue);
while (!ex.empty())
TrueValues.push_back(ex.claimNext());
}
// Extract the false values.
auto falseValue =
inst->hasDefault() ? inst->getDefaultResult() : inst->getCase(1).second;
SmallVector<llvm::Value*, 4> FalseValues;
if (falseValue->getType().isAddress()) {
FalseValues.push_back(IGF.getLoweredAddress(falseValue).getAddress());
} else {
Explosion ex = IGF.getLoweredExplosion(falseValue);
while (!ex.empty())
FalseValues.push_back(ex.claimNext());
}
assert(TrueValues.size() == FalseValues.size() &&
"explosions didn't produce same element count?");
for (unsigned i = 0, e = FalseValues.size(); i != e; ++i) {
auto *TV = TrueValues[i], *FV = FalseValues[i];
// It is pretty common to select between zero and 1 as the result of the
// select. Instead of emitting an obviously dumb select, emit nothing or
// a zext.
if (auto *TC = dyn_cast<llvm::ConstantInt>(TV))
if (auto *FC = dyn_cast<llvm::ConstantInt>(FV))
if (TC->isOne() && FC->isZero()) {
result.add(IGF.Builder.CreateZExtOrBitCast(isTrue, TV->getType()));
continue;
}
result.add(IGF.Builder.CreateSelect(isTrue, TV, FalseValues[i]));
}
}
void IRGenSILFunction::visitSelectEnumInst(SelectEnumInst *inst) {
auto &EIS = getEnumImplStrategy(IGM, inst->getEnumOperand()->getType());
Explosion result;
if (llvm::Value *R = mapTriviallyToInt(*this, EIS, inst)) {
result.add(R);
} else if ((inst->getNumCases() == 1 && inst->hasDefault()) ||
(inst->getNumCases() == 2 && !inst->hasDefault())) {
// If this is testing for one case, do simpler codegen. This is
// particularly common when testing optionals.
Explosion value = getLoweredExplosion(inst->getEnumOperand());
auto isTrue = EIS.emitValueCaseTest(*this, value, inst->getCase(0).first);
emitSingleEnumMemberSelectResult(*this, inst, isTrue, result);
} else {
Explosion value = getLoweredExplosion(inst->getEnumOperand());
// Map the SIL dest bbs to their LLVM bbs.
SmallVector<std::pair<EnumElementDecl*, llvm::BasicBlock*>, 4> dests;
llvm::BasicBlock *defaultDest;
llvm::BasicBlock *contBB
= emitBBMapForSelect(*this, result, dests, defaultDest, inst);
// Emit the dispatch.
EIS.emitValueSwitch(*this, value, dests, defaultDest);
// emitBBMapForSelectEnum set up a continuation block and phi nodes to
// receive the result.
Builder.SetInsertPoint(contBB);
}
setLoweredValue(inst,
getLoweredValueForSelect(*this, result, inst));
}
void IRGenSILFunction::visitSelectEnumAddrInst(SelectEnumAddrInst *inst) {
Address value = getLoweredAddress(inst->getEnumOperand());
Explosion result;
if ((inst->getNumCases() == 1 && inst->hasDefault()) ||
(inst->getNumCases() == 2 && !inst->hasDefault())) {
auto &EIS = getEnumImplStrategy(IGM, inst->getEnumOperand()->getType());
// If this is testing for one case, do simpler codegen. This is
// particularly common when testing optionals.
auto isTrue = EIS.emitIndirectCaseTest(*this,
inst->getEnumOperand()->getType(),
value, inst->getCase(0).first);
emitSingleEnumMemberSelectResult(*this, inst, isTrue, result);
} else {
// Map the SIL dest bbs to their LLVM bbs.
SmallVector<std::pair<EnumElementDecl*, llvm::BasicBlock*>, 4> dests;
llvm::BasicBlock *defaultDest;
llvm::BasicBlock *contBB
= emitBBMapForSelect(*this, result, dests, defaultDest, inst);
// Emit the dispatch.
emitSwitchAddressOnlyEnumDispatch(*this, inst->getEnumOperand()->getType(),
value, dests, defaultDest);
// emitBBMapForSelectEnum set up a phi node to receive the result.
Builder.SetInsertPoint(contBB);
}
setLoweredValue(inst,
getLoweredValueForSelect(*this, result, inst));
}
void IRGenSILFunction::visitSelectValueInst(SelectValueInst *inst) {
Explosion value = getLoweredExplosion(inst->getOperand());
// Map the SIL dest bbs to their LLVM bbs.
SmallVector<std::pair<SILValue, llvm::BasicBlock*>, 4> dests;
llvm::BasicBlock *defaultDest;
Explosion result;
auto *contBB = emitBBMapForSelect(*this, result, dests, defaultDest, inst);
// Emit the dispatch.
emitSwitchValueDispatch(*this, inst->getOperand()->getType(), value, dests,
defaultDest);
// emitBBMapForSelectEnum set up a continuation block and phi nodes to
// receive the result.
Builder.SetInsertPoint(contBB);
setLoweredValue(inst,
getLoweredValueForSelect(*this, result, inst));
}
void IRGenSILFunction::visitDynamicMethodBranchInst(DynamicMethodBranchInst *i){
LoweredBB &hasMethodBB = getLoweredBB(i->getHasMethodBB());
LoweredBB &noMethodBB = getLoweredBB(i->getNoMethodBB());
// Emit the respondsToSelector: call.
StringRef selector;
llvm::SmallString<64> selectorBuffer;
if (auto fnDecl = dyn_cast<FuncDecl>(i->getMember().getDecl()))
selector = fnDecl->getObjCSelector().getString(selectorBuffer);
else if (auto var = dyn_cast<AbstractStorageDecl>(i->getMember().getDecl()))
selector = var->getObjCGetterSelector().getString(selectorBuffer);
else
llvm_unreachable("Unhandled dynamic method branch query");
llvm::Value *object = getLoweredExplosion(i->getOperand()).claimNext();
if (object->getType() != IGM.ObjCPtrTy)
object = Builder.CreateBitCast(object, IGM.ObjCPtrTy);
llvm::Value *loadSel = emitObjCSelectorRefLoad(selector);
llvm::Value *respondsToSelector
= emitObjCSelectorRefLoad("respondsToSelector:");
llvm::Constant *messenger = IGM.getObjCMsgSendFn();
llvm::Type *argTys[] = {
IGM.ObjCPtrTy,
IGM.Int8PtrTy,
IGM.Int8PtrTy,
};
auto respondsToSelectorTy = llvm::FunctionType::get(IGM.Int1Ty,
argTys,
/*isVarArg*/ false)
->getPointerTo();
messenger = llvm::ConstantExpr::getBitCast(messenger,
respondsToSelectorTy);
llvm::CallInst *call = Builder.CreateCall(messenger,
{object, respondsToSelector, loadSel});
call->setDoesNotThrow();
// FIXME: Assume (probably safely) that the hasMethodBB has only us as a
// predecessor, and cannibalize its bb argument so we can represent is as an
// ObjCMethod lowered value. This is hella gross but saves us having to
// implement ObjCMethod-to-Explosion lowering and creating a thunk we don't
// want.
assert(std::next(i->getHasMethodBB()->pred_begin())
== i->getHasMethodBB()->pred_end()
&& "lowering dynamic_method_br with multiple preds for destination "
"not implemented");
// Kill the existing lowered value for the bb arg and its phi nodes.
SILValue methodArg = i->getHasMethodBB()->args_begin()[0];
Explosion formerLLArg = getLoweredExplosion(methodArg);
for (llvm::Value *val : formerLLArg.claimAll()) {
auto phi = cast<llvm::PHINode>(val);
assert(phi->getNumIncomingValues() == 0 && "phi already used");
phi->removeFromParent();
delete phi;
}
LoweredValues.erase(methodArg);
// Replace the lowered value with an ObjCMethod lowering.
setLoweredObjCMethod(methodArg, i->getMember());
// Create the branch.
Builder.CreateCondBr(call, hasMethodBB.bb, noMethodBB.bb);
}
void IRGenSILFunction::visitBranchInst(swift::BranchInst *i) {
LoweredBB &lbb = getLoweredBB(i->getDestBB());
addIncomingSILArgumentsToPHINodes(*this, lbb, i->getArgs());
Builder.CreateBr(lbb.bb);
}
void IRGenSILFunction::visitCondBranchInst(swift::CondBranchInst *i) {
LoweredBB &trueBB = getLoweredBB(i->getTrueBB());
LoweredBB &falseBB = getLoweredBB(i->getFalseBB());
llvm::Value *condValue =
getLoweredExplosion(i->getCondition()).claimNext();
addIncomingSILArgumentsToPHINodes(*this, trueBB, i->getTrueArgs());
addIncomingSILArgumentsToPHINodes(*this, falseBB, i->getFalseArgs());
Builder.CreateCondBr(condValue, trueBB.bb, falseBB.bb);
}
void IRGenSILFunction::visitRetainValueInst(swift::RetainValueInst *i) {
Explosion in = getLoweredExplosion(i->getOperand());
Explosion out;
cast<LoadableTypeInfo>(getTypeInfo(i->getOperand()->getType()))
.copy(*this, in, out, i->isAtomic() ? irgen::Atomicity::Atomic
: irgen::Atomicity::NonAtomic);
(void)out.claimAll();
}
void IRGenSILFunction::visitCopyValueInst(swift::CopyValueInst *i) {
Explosion in = getLoweredExplosion(i->getOperand());
Explosion out;
cast<LoadableTypeInfo>(getTypeInfo(i->getOperand()->getType()))
.copy(*this, in, out, getDefaultAtomicity());
setLoweredExplosion(i, out);
}
// TODO: Implement this more generally for arbitrary values. Currently the
// SIL verifier restricts it to single-refcounted-pointer types.
void IRGenSILFunction::visitAutoreleaseValueInst(swift::AutoreleaseValueInst *i)
{
Explosion in = getLoweredExplosion(i->getOperand());
auto val = in.claimNext();
emitObjCAutoreleaseCall(val);
}
void IRGenSILFunction::visitSetDeallocatingInst(SetDeallocatingInst *i) {
auto *ARI = dyn_cast<AllocRefInst>(i->getOperand());
if (ARI && StackAllocs.count(ARI)) {
// A small peep-hole optimization: If the operand is allocated on stack and
// there is no "significant" code between the set_deallocating and the final
// dealloc_ref, the set_deallocating is not required.
// %0 = alloc_ref [stack]
// ...
// set_deallocating %0 // not needed
// // code which does not depend on the RC_DEALLOCATING_FLAG flag.
// dealloc_ref %0 // not needed (stems from the inlined deallocator)
// ...
// dealloc_ref [stack] %0
SILBasicBlock::iterator Iter(i);
SILBasicBlock::iterator End = i->getParent()->end();
for (++Iter; Iter != End; ++Iter) {
SILInstruction *I = &*Iter;
if (auto *DRI = dyn_cast<DeallocRefInst>(I)) {
if (DRI->getOperand() == ARI) {
// The set_deallocating is followed by a dealloc_ref -> we can ignore
// it.
return;
}
}
// Assume that any instruction with side-effects may depend on the
// RC_DEALLOCATING_FLAG flag.
if (I->mayHaveSideEffects())
break;
}
}
Explosion lowered = getLoweredExplosion(i->getOperand());
emitNativeSetDeallocating(lowered.claimNext());
}
void IRGenSILFunction::visitReleaseValueInst(swift::ReleaseValueInst *i) {
Explosion in = getLoweredExplosion(i->getOperand());
cast<LoadableTypeInfo>(getTypeInfo(i->getOperand()->getType()))
.consume(*this, in, i->isAtomic() ? irgen::Atomicity::Atomic
: irgen::Atomicity::NonAtomic);
}
void IRGenSILFunction::visitDestroyValueInst(swift::DestroyValueInst *i) {
Explosion in = getLoweredExplosion(i->getOperand());
cast<LoadableTypeInfo>(getTypeInfo(i->getOperand()->getType()))
.consume(*this, in, getDefaultAtomicity());
}
void IRGenSILFunction::visitStructInst(swift::StructInst *i) {
Explosion out;
for (SILValue elt : i->getElements())
out.add(getLoweredExplosion(elt).claimAll());
setLoweredExplosion(i, out);
}
void IRGenSILFunction::visitTupleInst(swift::TupleInst *i) {
Explosion out;
for (SILValue elt : i->getElements())
out.add(getLoweredExplosion(elt).claimAll());
setLoweredExplosion(i, out);
}
void IRGenSILFunction::visitEnumInst(swift::EnumInst *i) {
Explosion data = (i->hasOperand())
? getLoweredExplosion(i->getOperand())
: Explosion();
Explosion out;
emitInjectLoadableEnum(*this, i->getType(), i->getElement(), data, out);
setLoweredExplosion(i, out);
}
void IRGenSILFunction::visitInitEnumDataAddrInst(swift::InitEnumDataAddrInst *i) {
Address enumAddr = getLoweredAddress(i->getOperand());
Address dataAddr = emitProjectEnumAddressForStore(*this,
i->getOperand()->getType(),
enumAddr,
i->getElement());
setLoweredAddress(i, dataAddr);
}
void IRGenSILFunction::visitUncheckedEnumDataInst(swift::UncheckedEnumDataInst *i) {
Explosion enumVal = getLoweredExplosion(i->getOperand());
Explosion data;
emitProjectLoadableEnum(*this, i->getOperand()->getType(),
enumVal, i->getElement(), data);
setLoweredExplosion(i, data);
}
void IRGenSILFunction::visitUncheckedTakeEnumDataAddrInst(swift::UncheckedTakeEnumDataAddrInst *i) {
Address enumAddr = getLoweredAddress(i->getOperand());
Address dataAddr = emitDestructiveProjectEnumAddressForLoad(*this,
i->getOperand()->getType(),
enumAddr,
i->getElement());
setLoweredAddress(i, dataAddr);
}
void IRGenSILFunction::visitInjectEnumAddrInst(swift::InjectEnumAddrInst *i) {
Address enumAddr = getLoweredAddress(i->getOperand());
emitStoreEnumTagToAddress(*this, i->getOperand()->getType(),
enumAddr, i->getElement());
}
void IRGenSILFunction::visitTupleExtractInst(swift::TupleExtractInst *i) {
Explosion fullTuple = getLoweredExplosion(i->getOperand());
Explosion output;
SILType baseType = i->getOperand()->getType();
projectTupleElementFromExplosion(*this,
baseType,
fullTuple,
i->getFieldNo(),
output);
(void)fullTuple.claimAll();
setLoweredExplosion(i, output);
}
void IRGenSILFunction::visitTupleElementAddrInst(swift::TupleElementAddrInst *i)
{
Address base = getLoweredAddress(i->getOperand());
SILType baseType = i->getOperand()->getType();
Address field = projectTupleElementAddress(*this, base, baseType,
i->getFieldNo());
setLoweredAddress(i, field);
}
void IRGenSILFunction::visitStructExtractInst(swift::StructExtractInst *i) {
Explosion operand = getLoweredExplosion(i->getOperand());
Explosion lowered;
SILType baseType = i->getOperand()->getType();
projectPhysicalStructMemberFromExplosion(*this,
baseType,
operand,
i->getField(),
lowered);
(void)operand.claimAll();
setLoweredExplosion(i, lowered);
}
void IRGenSILFunction::visitStructElementAddrInst(
swift::StructElementAddrInst *i) {
Address base = getLoweredAddress(i->getOperand());
SILType baseType = i->getOperand()->getType();
Address field = projectPhysicalStructMemberAddress(*this, base, baseType,
i->getField());
setLoweredAddress(i, field);
}
void IRGenSILFunction::visitRefElementAddrInst(swift::RefElementAddrInst *i) {
Explosion base = getLoweredExplosion(i->getOperand());
llvm::Value *value = base.claimNext();
SILType baseTy = i->getOperand()->getType();
Address field = projectPhysicalClassMemberAddress(*this,
value,
baseTy,
i->getType(),
i->getField())
.getAddress();
setLoweredAddress(i, field);
}
void IRGenSILFunction::visitRefTailAddrInst(RefTailAddrInst *i) {
SILValue Ref = i->getOperand();
llvm::Value *RefValue = getLoweredExplosion(Ref).claimNext();
Address TailAddr = emitTailProjection(*this, RefValue, Ref->getType(),
i->getTailType());
setLoweredAddress(i, TailAddr);
}
static bool isInvariantAddress(SILValue v) {
auto root = getUnderlyingAddressRoot(v);
if (auto ptrRoot = dyn_cast<PointerToAddressInst>(root)) {
return ptrRoot->isInvariant();
}
// TODO: We could be more aggressive about considering addresses based on
// `let` variables as invariant when the type of the address is known not to
// have any sharably-mutable interior storage (in other words, no weak refs,
// atomics, etc.)
return false;
}
void IRGenSILFunction::visitLoadInst(swift::LoadInst *i) {
Explosion lowered;
Address source = getLoweredAddress(i->getOperand());
SILType objType = i->getType().getObjectType();
const auto &typeInfo = cast<LoadableTypeInfo>(getTypeInfo(objType));
switch (i->getOwnershipQualifier()) {
case LoadOwnershipQualifier::Unqualified:
case LoadOwnershipQualifier::Trivial:
case LoadOwnershipQualifier::Take:
typeInfo.loadAsTake(*this, source, lowered);
break;
case LoadOwnershipQualifier::Copy:
typeInfo.loadAsCopy(*this, source, lowered);
break;
}
if (isInvariantAddress(i->getOperand())) {
// It'd be better to push this down into `loadAs` methods, perhaps...
for (auto value : lowered.getAll())
if (auto load = dyn_cast<llvm::LoadInst>(value))
setInvariantLoad(load);
}
setLoweredExplosion(i, lowered);
}
void IRGenSILFunction::visitStoreInst(swift::StoreInst *i) {
Explosion source = getLoweredExplosion(i->getSrc());
Address dest = getLoweredAddress(i->getDest());
SILType objType = i->getSrc()->getType().getObjectType();
const auto &typeInfo = cast<LoadableTypeInfo>(getTypeInfo(objType));
switch (i->getOwnershipQualifier()) {
case StoreOwnershipQualifier::Unqualified:
case StoreOwnershipQualifier::Init:
case StoreOwnershipQualifier::Trivial:
typeInfo.initialize(*this, source, dest);
break;
case StoreOwnershipQualifier::Assign:
typeInfo.assign(*this, source, dest);
break;
}
}
/// Emit the artificial error result argument.
void IRGenSILFunction::emitErrorResultVar(SILResultInfo ErrorInfo,
DebugValueInst *DbgValue) {
// We don't need a shadow error variable for debugging on ABI's that return
// swifterror in a register.
if (IGM.IsSwiftErrorInRegister)
return;
auto ErrorResultSlot = getErrorResultSlot(IGM.silConv.getSILType(ErrorInfo));
SILDebugVariable Var = DbgValue->getVarInfo();
auto Storage = emitShadowCopy(ErrorResultSlot.getAddress(), getDebugScope(),
Var.Name, Var.ArgNo);
DebugTypeInfo DTI(nullptr, nullptr, ErrorInfo.getType(),
ErrorResultSlot->getType(), IGM.getPointerSize(),
IGM.getPointerAlignment());
IGM.DebugInfo->emitVariableDeclaration(Builder, Storage, DTI, getDebugScope(),
nullptr, Var.Name, Var.ArgNo,
IndirectValue, ArtificialValue);
}
void IRGenSILFunction::visitDebugValueInst(DebugValueInst *i) {
if (!IGM.DebugInfo)
return;
auto SILVal = i->getOperand();
if (isa<SILUndef>(SILVal)) {
// We cannot track the location of inlined error arguments because it has no
// representation in SIL.
if (!i->getDebugScope()->InlinedCallSite &&
i->getVarInfo().Name == "$error") {
auto funcTy = CurSILFn->getLoweredFunctionType();
emitErrorResultVar(funcTy->getErrorResult(), i);
}
return;
}
StringRef Name = getVarName(i);
DebugTypeInfo DbgTy;
SILType SILTy = SILVal->getType();
auto RealTy = SILVal->getType().getSwiftRValueType();
if (VarDecl *Decl = i->getDecl()) {
DbgTy = DebugTypeInfo::getLocalVariable(
CurSILFn->getDeclContext(), CurSILFn->getGenericEnvironment(), Decl,
RealTy, getTypeInfo(SILVal->getType()), /*Unwrap=*/false);
} else if (i->getFunction()->isBare() &&
!SILTy.hasArchetype() && !Name.empty()) {
// Preliminary support for .sil debug information.
DbgTy = DebugTypeInfo::getFromTypeInfo(CurSILFn->getDeclContext(),
CurSILFn->getGenericEnvironment(),
RealTy, getTypeInfo(SILTy));
} else
return;
// Put the value into a stack slot at -Onone.
llvm::SmallVector<llvm::Value *, 8> Copy;
Explosion e = getLoweredExplosion(SILVal);
unsigned ArgNo = i->getVarInfo().ArgNo;
emitShadowCopy(e.claimAll(), i->getDebugScope(), Name, ArgNo, Copy);
emitDebugVariableDeclaration(Copy, DbgTy, SILTy, i->getDebugScope(),
i->getDecl(), Name, ArgNo);
}
void IRGenSILFunction::visitDebugValueAddrInst(DebugValueAddrInst *i) {
if (!IGM.DebugInfo)
return;
VarDecl *Decl = i->getDecl();
if (!Decl)
return;
auto SILVal = i->getOperand();
if (isa<SILUndef>(SILVal))
return;
StringRef Name = getVarName(i);
auto Addr = getLoweredAddress(SILVal).getAddress();
SILType SILTy = SILVal->getType();
auto RealType = SILTy.getSwiftRValueType();
if (SILTy.isAddress())
RealType = CanInOutType::get(RealType);
// Unwrap implicitly indirect types and types that are passed by
// reference only at the SIL level and below.
//
// FIXME: Should this check if the lowered SILType is address only
// instead? Otherwise optionals of archetypes etc will still have
// 'Unwrap' set to false.
bool Unwrap =
i->getVarInfo().Constant ||
SILTy.is<ArchetypeType>();
auto DbgTy = DebugTypeInfo::getLocalVariable(
CurSILFn->getDeclContext(), CurSILFn->getGenericEnvironment(), Decl,
RealType, getTypeInfo(SILVal->getType()), Unwrap);
// Put the value's address into a stack slot at -Onone and emit a debug
// intrinsic.
unsigned ArgNo = i->getVarInfo().ArgNo;
emitDebugVariableDeclaration(
emitShadowCopy(Addr, i->getDebugScope(), Name, ArgNo), DbgTy,
i->getType(), i->getDebugScope(), Decl, Name, ArgNo,
DbgTy.isImplicitlyIndirect() ? DirectValue : IndirectValue);
}
void IRGenSILFunction::visitLoadWeakInst(swift::LoadWeakInst *i) {
Address source = getLoweredAddress(i->getOperand());
auto &weakTI = cast<WeakTypeInfo>(getTypeInfo(i->getOperand()->getType()));
Explosion result;
if (i->isTake()) {
weakTI.weakTakeStrong(*this, source, result);
} else {
weakTI.weakLoadStrong(*this, source, result);
}
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitStoreWeakInst(swift::StoreWeakInst *i) {
Explosion source = getLoweredExplosion(i->getSrc());
Address dest = getLoweredAddress(i->getDest());
auto &weakTI = cast<WeakTypeInfo>(getTypeInfo(i->getDest()->getType()));
if (i->isInitializationOfDest()) {
weakTI.weakInit(*this, source, dest);
} else {
weakTI.weakAssign(*this, source, dest);
}
}
void IRGenSILFunction::visitFixLifetimeInst(swift::FixLifetimeInst *i) {
if (i->getOperand()->getType().isAddress()) {
// Just pass in the address to fix lifetime if we have one. We will not do
// anything to it so nothing bad should happen.
emitFixLifetime(getLoweredAddress(i->getOperand()).getAddress());
return;
}
// Handle objects.
Explosion in = getLoweredExplosion(i->getOperand());
cast<LoadableTypeInfo>(getTypeInfo(i->getOperand()->getType()))
.fixLifetime(*this, in);
}
void IRGenSILFunction::visitMarkDependenceInst(swift::MarkDependenceInst *i) {
// Dependency-marking is purely for SIL. Just forward the input as
// the result.
SILValue value = i->getValue();
if (value->getType().isAddress()) {
setLoweredAddress(i, getLoweredAddress(value));
} else {
Explosion temp = getLoweredExplosion(value);
setLoweredExplosion(i, temp);
}
}
void IRGenSILFunction::visitCopyBlockInst(CopyBlockInst *i) {
Explosion lowered = getLoweredExplosion(i->getOperand());
llvm::Value *copied = emitBlockCopyCall(lowered.claimNext());
Explosion result;
result.add(copied);
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitStrongPinInst(swift::StrongPinInst *i) {
Explosion lowered = getLoweredExplosion(i->getOperand());
llvm::Value *object = lowered.claimNext();
llvm::Value *pinHandle =
emitNativeTryPin(object, i->isAtomic() ? irgen::Atomicity::Atomic
: irgen::Atomicity::NonAtomic);
Explosion result;
result.add(pinHandle);
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitStrongUnpinInst(swift::StrongUnpinInst *i) {
Explosion lowered = getLoweredExplosion(i->getOperand());
llvm::Value *pinHandle = lowered.claimNext();
emitNativeUnpin(pinHandle, i->isAtomic() ? irgen::Atomicity::Atomic
: irgen::Atomicity::NonAtomic);
}
void IRGenSILFunction::visitStrongRetainInst(swift::StrongRetainInst *i) {
Explosion lowered = getLoweredExplosion(i->getOperand());
auto &ti = cast<ReferenceTypeInfo>(getTypeInfo(i->getOperand()->getType()));
ti.strongRetain(*this, lowered, i->isAtomic() ? irgen::Atomicity::Atomic
: irgen::Atomicity::NonAtomic);
}
void IRGenSILFunction::visitStrongReleaseInst(swift::StrongReleaseInst *i) {
Explosion lowered = getLoweredExplosion(i->getOperand());
auto &ti = cast<ReferenceTypeInfo>(getTypeInfo(i->getOperand()->getType()));
ti.strongRelease(*this, lowered, i->isAtomic() ? irgen::Atomicity::Atomic
: irgen::Atomicity::NonAtomic);
}
/// Given a SILType which is a ReferenceStorageType, return the type
/// info for the underlying reference type.
static const ReferenceTypeInfo &getReferentTypeInfo(IRGenFunction &IGF,
SILType silType) {
auto type = silType.castTo<ReferenceStorageType>().getReferentType();
return cast<ReferenceTypeInfo>(IGF.getTypeInfoForLowered(type));
}
void IRGenSILFunction::
visitStrongRetainUnownedInst(swift::StrongRetainUnownedInst *i) {
Explosion lowered = getLoweredExplosion(i->getOperand());
auto &ti = getReferentTypeInfo(*this, i->getOperand()->getType());
ti.strongRetainUnowned(*this, lowered, i->isAtomic() ? irgen::Atomicity::Atomic
: irgen::Atomicity::NonAtomic);
}
void IRGenSILFunction::visitUnownedRetainInst(swift::UnownedRetainInst *i) {
Explosion lowered = getLoweredExplosion(i->getOperand());
auto &ti = getReferentTypeInfo(*this, i->getOperand()->getType());
ti.unownedRetain(*this, lowered, i->isAtomic() ? irgen::Atomicity::Atomic
: irgen::Atomicity::NonAtomic);
}
void IRGenSILFunction::visitUnownedReleaseInst(swift::UnownedReleaseInst *i) {
Explosion lowered = getLoweredExplosion(i->getOperand());
auto &ti = getReferentTypeInfo(*this, i->getOperand()->getType());
ti.unownedRelease(*this, lowered, i->isAtomic() ? irgen::Atomicity::Atomic
: irgen::Atomicity::NonAtomic);
}
void IRGenSILFunction::visitLoadUnownedInst(swift::LoadUnownedInst *i) {
Address source = getLoweredAddress(i->getOperand());
auto &ti = getReferentTypeInfo(*this, i->getOperand()->getType());
Explosion result;
if (i->isTake()) {
ti.unownedTakeStrong(*this, source, result);
} else {
ti.unownedLoadStrong(*this, source, result);
}
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitStoreUnownedInst(swift::StoreUnownedInst *i) {
Explosion source = getLoweredExplosion(i->getSrc());
Address dest = getLoweredAddress(i->getDest());
auto &ti = getReferentTypeInfo(*this, i->getDest()->getType());
if (i->isInitializationOfDest()) {
ti.unownedInit(*this, source, dest);
} else {
ti.unownedAssign(*this, source, dest);
}
}
static bool hasReferenceSemantics(IRGenSILFunction &IGF,
SILType silType) {
auto operType = silType.getSwiftRValueType();
auto valueType = operType->getAnyOptionalObjectType();
auto objType = valueType ? valueType : operType;
return (objType->mayHaveSuperclass()
|| objType->isClassExistentialType()
|| objType->is<BuiltinNativeObjectType>()
|| objType->is<BuiltinBridgeObjectType>()
|| objType->is<BuiltinUnknownObjectType>());
}
static llvm::Value *emitIsUnique(IRGenSILFunction &IGF, SILValue operand,
SourceLoc loc, bool checkPinned) {
if (!hasReferenceSemantics(IGF, operand->getType())) {
llvm::Function *trapIntrinsic = llvm::Intrinsic::getDeclaration(
&IGF.IGM.Module, llvm::Intrinsic::ID::trap);
IGF.Builder.CreateCall(trapIntrinsic, {});
return llvm::UndefValue::get(IGF.IGM.Int1Ty);
}
auto &operTI = cast<LoadableTypeInfo>(IGF.getTypeInfo(operand->getType()));
LoadedRef ref =
operTI.loadRefcountedPtr(IGF, loc, IGF.getLoweredAddress(operand));
return
IGF.emitIsUniqueCall(ref.getValue(), loc, ref.isNonNull(), checkPinned);
}
void IRGenSILFunction::visitIsUniqueInst(swift::IsUniqueInst *i) {
llvm::Value *result = emitIsUnique(*this, i->getOperand(),
i->getLoc().getSourceLoc(), false);
Explosion out;
out.add(result);
setLoweredExplosion(i, out);
}
void IRGenSILFunction::
visitIsUniqueOrPinnedInst(swift::IsUniqueOrPinnedInst *i) {
llvm::Value *result = emitIsUnique(*this, i->getOperand(),
i->getLoc().getSourceLoc(), true);
Explosion out;
out.add(result);
setLoweredExplosion(i, out);
}
static bool tryDeferFixedSizeBufferInitialization(IRGenSILFunction &IGF,
SILInstruction *allocInst,
const TypeInfo &ti,
Address fixedSizeBuffer,
const llvm::Twine &name) {
// There's no point in doing this for fixed-sized types, since we'll allocate
// an appropriately-sized buffer for them statically.
if (ti.isFixedSize())
return false;
// TODO: More interesting dominance analysis could be done here to see
// if the alloc_stack is dominated by copy_addrs into it on all paths.
// For now, check only that the copy_addr is the first use within the same
// block.
for (auto ii = std::next(allocInst->getIterator()),
ie = std::prev(allocInst->getParent()->end());
ii != ie; ++ii) {
auto *inst = &*ii;
// Does this instruction use the allocation? If not, continue.
auto Ops = inst->getAllOperands();
if (std::none_of(Ops.begin(), Ops.end(),
[allocInst](const Operand &Op) {
return Op.get() == allocInst;
}))
continue;
// Is this a copy?
auto *copy = dyn_cast<swift::CopyAddrInst>(inst);
if (!copy)
return false;
// Destination must be the allocation.
if (copy->getDest() != SILValue(allocInst))
return false;
// Copy must be an initialization.
if (!copy->isInitializationOfDest())
return false;
// We can defer to this initialization. Allocate the fixed-size buffer
// now, but don't allocate the value inside it.
if (!fixedSizeBuffer.getAddress()) {
fixedSizeBuffer = IGF.createFixedSizeBufferAlloca(name);
IGF.Builder.CreateLifetimeStart(fixedSizeBuffer,
getFixedBufferSize(IGF.IGM));
}
IGF.setContainerOfUnallocatedAddress(allocInst, fixedSizeBuffer);
return true;
}
return false;
}
void IRGenSILFunction::emitDebugInfoForAllocStack(AllocStackInst *i,
const TypeInfo &type,
llvm::Value *addr) {
VarDecl *Decl = i->getDecl();
if (IGM.DebugInfo && Decl) {
// Ignore compiler-generated patterns but not optional bindings.
if (auto *Pattern = Decl->getParentPattern())
if (Pattern->isImplicit() &&
Pattern->getKind() != PatternKind::OptionalSome)
return;
SILType SILTy = i->getType();
auto RealType = SILTy.getSwiftRValueType();
auto DbgTy = DebugTypeInfo::getLocalVariable(
CurSILFn->getDeclContext(), CurSILFn->getGenericEnvironment(), Decl,
RealType, type, false);
StringRef Name = getVarName(i);
if (auto DS = i->getDebugScope())
emitDebugVariableDeclaration(addr, DbgTy, SILTy, DS, Decl, Name,
i->getVarInfo().ArgNo);
}
}
void IRGenSILFunction::visitAllocStackInst(swift::AllocStackInst *i) {
const TypeInfo &type = getTypeInfo(i->getElementType());
// Derive name from SIL location.
VarDecl *Decl = i->getDecl();
StringRef dbgname;
# ifndef NDEBUG
// If this is a DEBUG build, use pretty names for the LLVM IR.
dbgname = getVarName(i);
# endif
(void) Decl;
bool isEntryBlock =
i->getParentBlock() == i->getFunction()->getEntryBlock();
auto addr =
type.allocateStack(*this, i->getElementType(), isEntryBlock, dbgname);
emitDebugInfoForAllocStack(i, type, addr.getAddress().getAddress());
setLoweredStackAddress(i, addr);
}
static void
buildTailArrays(IRGenSILFunction &IGF,
SmallVectorImpl<std::pair<SILType, llvm::Value *>> &TailArrays,
AllocRefInstBase *ARI) {
auto Types = ARI->getTailAllocatedTypes();
auto Counts = ARI->getTailAllocatedCounts();
for (unsigned Idx = 0, NumTypes = Types.size(); Idx < NumTypes; ++Idx) {
Explosion ElemCount = IGF.getLoweredExplosion(Counts[Idx].get());
TailArrays.push_back({Types[Idx], ElemCount.claimNext()});
}
}
void IRGenSILFunction::visitAllocRefInst(swift::AllocRefInst *i) {
int StackAllocSize = -1;
if (i->canAllocOnStack()) {
estimateStackSize();
// Is there enough space for stack allocation?
StackAllocSize = IGM.IRGen.Opts.StackPromotionSizeLimit - EstimatedStackSize;
}
SmallVector<std::pair<SILType, llvm::Value *>, 4> TailArrays;
buildTailArrays(*this, TailArrays, i);
llvm::Value *alloced = emitClassAllocation(*this, i->getType(), i->isObjC(),
StackAllocSize, TailArrays);
if (StackAllocSize >= 0) {
// Remember that this alloc_ref allocates the object on the stack.
StackAllocs.insert(i);
EstimatedStackSize += StackAllocSize;
}
Explosion e;
e.add(alloced);
setLoweredExplosion(i, e);
}
void IRGenSILFunction::visitAllocRefDynamicInst(swift::AllocRefDynamicInst *i) {
SmallVector<std::pair<SILType, llvm::Value *>, 4> TailArrays;
buildTailArrays(*this, TailArrays, i);
Explosion metadata = getLoweredExplosion(i->getMetatypeOperand());
auto metadataValue = metadata.claimNext();
llvm::Value *alloced = emitClassAllocationDynamic(*this, metadataValue,
i->getType(), i->isObjC(),
TailArrays);
Explosion e;
e.add(alloced);
setLoweredExplosion(i, e);
}
void IRGenSILFunction::visitDeallocStackInst(swift::DeallocStackInst *i) {
auto allocatedType = i->getOperand()->getType();
const TypeInfo &allocatedTI = getTypeInfo(allocatedType);
StackAddress stackAddr = getLoweredStackAddress(i->getOperand());
allocatedTI.deallocateStack(*this, stackAddr, allocatedType);
}
void IRGenSILFunction::visitDeallocRefInst(swift::DeallocRefInst *i) {
// Lower the operand.
Explosion self = getLoweredExplosion(i->getOperand());
auto selfValue = self.claimNext();
auto *ARI = dyn_cast<AllocRefInst>(i->getOperand());
if (!i->canAllocOnStack()) {
if (ARI && StackAllocs.count(ARI)) {
// We can ignore dealloc_refs (without [stack]) for stack allocated
// objects.
//
// %0 = alloc_ref [stack]
// ...
// dealloc_ref %0 // not needed (stems from the inlined deallocator)
// ...
// dealloc_ref [stack] %0
return;
}
auto classType = i->getOperand()->getType();
emitClassDeallocation(*this, classType, selfValue);
return;
}
// It's a dealloc_ref [stack]. Even if the alloc_ref did not allocate the
// object on the stack, we don't have to deallocate it, because it is
// deallocated in the final release.
assert(ARI->canAllocOnStack());
if (StackAllocs.count(ARI)) {
if (IGM.IRGen.Opts.EmitStackPromotionChecks) {
selfValue = Builder.CreateBitCast(selfValue, IGM.RefCountedPtrTy);
emitVerifyEndOfLifetimeCall(selfValue);
} else {
// This has two purposes:
// 1. Tell LLVM the lifetime of the allocated stack memory.
// 2. Avoid tail-call optimization which may convert the call to the final
// release to a jump, which is done after the stack frame is
// destructed.
Builder.CreateLifetimeEnd(selfValue);
}
}
}
void IRGenSILFunction::visitDeallocPartialRefInst(swift::DeallocPartialRefInst *i) {
Explosion self = getLoweredExplosion(i->getInstance());
auto selfValue = self.claimNext();
Explosion metadata = getLoweredExplosion(i->getMetatype());
auto metadataValue = metadata.claimNext();
auto classType = i->getInstance()->getType();
emitPartialClassDeallocation(*this, classType, selfValue, metadataValue);
}
void IRGenSILFunction::visitDeallocBoxInst(swift::DeallocBoxInst *i) {
Explosion owner = getLoweredExplosion(i->getOperand());
llvm::Value *ownerPtr = owner.claimNext();
auto boxTy = i->getOperand()->getType().castTo<SILBoxType>();
emitDeallocateBox(*this, ownerPtr, boxTy);
}
void IRGenSILFunction::visitAllocBoxInst(swift::AllocBoxInst *i) {
assert(i->getBoxType()->getLayout()->getFields().size() == 1
&& "multi field boxes not implemented yet");
const TypeInfo &type = getTypeInfo(i->getBoxType()
->getFieldType(IGM.getSILModule(), 0));
// Derive name from SIL location.
VarDecl *Decl = i->getDecl();
StringRef Name = getVarName(i);
StringRef DbgName =
# ifndef NDEBUG
// If this is a DEBUG build, use pretty names for the LLVM IR.
Name;
# else
"";
# endif
auto boxTy = i->getType().castTo<SILBoxType>();
OwnedAddress boxWithAddr = emitAllocateBox(*this, boxTy,
CurSILFn->getGenericEnvironment(),
DbgName);
setLoweredBox(i, boxWithAddr);
if (IGM.DebugInfo && Decl) {
// FIXME: This is a workaround to not produce local variables for
// capture list arguments like "[weak self]". The better solution
// would be to require all variables to be described with a
// SILDebugValue(Addr) and then not describe capture list
// arguments.
if (Name == IGM.Context.Id_self.str())
return;
assert(i->getBoxType()->getLayout()->getFields().size() == 1
&& "box for a local variable should only have one field");
auto SILTy = i->getBoxType()->getFieldType(IGM.getSILModule(), 0);
auto RealType = SILTy.getSwiftRValueType();
if (SILTy.isAddress())
RealType = CanInOutType::get(RealType);
auto DbgTy = DebugTypeInfo::getLocalVariable(
CurSILFn->getDeclContext(), CurSILFn->getGenericEnvironment(), Decl,
RealType, type, /*Unwrap=*/false);
if (isInlinedGeneric(Decl, i->getDebugScope()))
return;
IGM.DebugInfo->emitVariableDeclaration(
Builder,
emitShadowCopy(boxWithAddr.getAddress(), i->getDebugScope(), Name, 0),
DbgTy, i->getDebugScope(), Decl, Name, 0,
DbgTy.isImplicitlyIndirect() ? DirectValue : IndirectValue);
}
}
void IRGenSILFunction::visitProjectBoxInst(swift::ProjectBoxInst *i) {
auto boxTy = i->getOperand()->getType().castTo<SILBoxType>();
const LoweredValue &val = getLoweredValue(i->getOperand());
if (val.isBoxWithAddress()) {
// The operand is an alloc_box. We can directly reuse the address.
setLoweredAddress(i, val.getAddressOfBox());
} else {
// The slow-path: we have to emit code to get from the box to it's
// value address.
Explosion box = val.getExplosion(*this);
auto addr = emitProjectBox(*this, box.claimNext(), boxTy);
setLoweredAddress(i, addr);
}
}
static void emitBeginAccess(IRGenSILFunction &IGF, BeginAccessInst *access,
Address addr) {
switch (access->getEnforcement()) {
case SILAccessEnforcement::Unknown:
llvm_unreachable("unknown access enforcement in IRGen!");
case SILAccessEnforcement::Static:
case SILAccessEnforcement::Unsafe:
// nothing to do
return;
case SILAccessEnforcement::Dynamic:
// TODO
return;
}
llvm_unreachable("bad access enforcement");
}
static void emitEndAccess(IRGenSILFunction &IGF, BeginAccessInst *access) {
switch (access->getEnforcement()) {
case SILAccessEnforcement::Unknown:
llvm_unreachable("unknown access enforcement in IRGen!");
case SILAccessEnforcement::Static:
case SILAccessEnforcement::Unsafe:
// nothing to do
return;
case SILAccessEnforcement::Dynamic:
// TODO
return;
}
llvm_unreachable("bad access enforcement");
}
void IRGenSILFunction::visitBeginAccessInst(BeginAccessInst *i) {
Address addr = getLoweredAddress(i->getOperand());
emitBeginAccess(*this, i, addr);
setLoweredAddress(i, addr);
}
void IRGenSILFunction::visitEndAccessInst(EndAccessInst *i) {
emitEndAccess(*this, i->getBeginAccess());
}
void IRGenSILFunction::visitConvertFunctionInst(swift::ConvertFunctionInst *i) {
// This instruction is specified to be a no-op.
Explosion temp = getLoweredExplosion(i->getOperand());
setLoweredExplosion(i, temp);
}
void IRGenSILFunction::visitThinFunctionToPointerInst(
swift::ThinFunctionToPointerInst *i) {
Explosion in = getLoweredExplosion(i->getOperand());
llvm::Value *fn = in.claimNext();
fn = Builder.CreateBitCast(fn, IGM.Int8PtrTy);
Explosion out;
out.add(fn);
setLoweredExplosion(i, out);
}
void IRGenSILFunction::visitPointerToThinFunctionInst(
swift::PointerToThinFunctionInst *i) {
Explosion in = getLoweredExplosion(i->getOperand());
llvm::Value *fn = in.claimNext();
fn = Builder.CreateBitCast(fn, IGM.FunctionPtrTy);
Explosion out;
out.add(fn);
setLoweredExplosion(i, out);
}
void IRGenSILFunction::visitAddressToPointerInst(swift::AddressToPointerInst *i)
{
Explosion to;
llvm::Value *addrValue = getLoweredAddress(i->getOperand()).getAddress();
if (addrValue->getType() != IGM.Int8PtrTy)
addrValue = Builder.CreateBitCast(addrValue, IGM.Int8PtrTy);
to.add(addrValue);
setLoweredExplosion(i, to);
}
// Ignores the isStrict flag because Swift TBAA is not lowered into LLVM IR.
void IRGenSILFunction::visitPointerToAddressInst(swift::PointerToAddressInst *i)
{
Explosion from = getLoweredExplosion(i->getOperand());
llvm::Value *ptrValue = from.claimNext();
auto &ti = getTypeInfo(i->getType());
llvm::Type *destType = ti.getStorageType()->getPointerTo();
ptrValue = Builder.CreateBitCast(ptrValue, destType);
setLoweredAddress(i,
ti.getAddressForPointer(ptrValue));
}
static void emitPointerCastInst(IRGenSILFunction &IGF,
SILValue src,
SILValue dest,
const TypeInfo &ti) {
Explosion from = IGF.getLoweredExplosion(src);
llvm::Value *ptrValue = from.claimNext();
// The input may have witness tables or other additional data, but the class
// reference is always first.
(void)from.claimAll();
auto schema = ti.getSchema();
assert(schema.size() == 1
&& schema[0].isScalar()
&& "pointer schema is not a single scalar?!");
auto castToType = schema[0].getScalarType();
// A retainable pointer representation may be wrapped in an optional, so we
// need to provide inttoptr/ptrtoint in addition to bitcast.
ptrValue = IGF.Builder.CreateBitOrPointerCast(ptrValue, castToType);
Explosion to;
to.add(ptrValue);
IGF.setLoweredExplosion(dest, to);
}
void IRGenSILFunction::visitUncheckedRefCastInst(
swift::UncheckedRefCastInst *i) {
auto &ti = getTypeInfo(i->getType());
emitPointerCastInst(*this, i->getOperand(), i, ti);
}
// TODO: Although runtime checks are not required, we get them anyway when
// asking the runtime to perform this cast. If this is a performance impact, we
// can add a CheckedCastMode::Unchecked.
void IRGenSILFunction::
visitUncheckedRefCastAddrInst(swift::UncheckedRefCastAddrInst *i) {
Address dest = getLoweredAddress(i->getDest());
Address src = getLoweredAddress(i->getSrc());
emitCheckedCast(*this, src, i->getSourceType(), dest, i->getTargetType(),
i->getConsumptionKind(), CheckedCastMode::Unconditional);
}
void IRGenSILFunction::visitUncheckedAddrCastInst(
swift::UncheckedAddrCastInst *i) {
auto addr = getLoweredAddress(i->getOperand());
auto &ti = getTypeInfo(i->getType());
auto result = Builder.CreateBitCast(addr,ti.getStorageType()->getPointerTo());
setLoweredAddress(i, result);
}
static bool isStructurallySame(const llvm::Type *T1, const llvm::Type *T2) {
if (T1 == T2) return true;
if (auto *S1 = dyn_cast<llvm::StructType>(T1))
if (auto *S2 = dyn_cast<llvm::StructType>(T2))
return S1->isLayoutIdentical(const_cast<llvm::StructType*>(S2));
return false;
}
// Emit a trap in the event a type does not match expected layout constraints.
//
// We can hit this case in specialized functions even for correct user code.
// If the user dynamically checks for correct type sizes in the generic
// function, a specialized function can contain the (not executed) bitcast
// with mismatching fixed sizes.
// Usually llvm can eliminate this code again because the user's safety
// check should be constant foldable on llvm level.
static void emitTrapAndUndefValue(IRGenSILFunction &IGF,
Explosion &in,
Explosion &out,
const LoadableTypeInfo &outTI) {
llvm::BasicBlock *failBB =
llvm::BasicBlock::Create(IGF.IGM.getLLVMContext());
IGF.Builder.CreateBr(failBB);
IGF.FailBBs.push_back(failBB);
IGF.Builder.emitBlock(failBB);
llvm::Function *trapIntrinsic = llvm::Intrinsic::getDeclaration(
&IGF.IGM.Module, llvm::Intrinsic::ID::trap);
IGF.Builder.CreateCall(trapIntrinsic, {});
IGF.Builder.CreateUnreachable();
llvm::BasicBlock *contBB = llvm::BasicBlock::Create(IGF.IGM.getLLVMContext());
IGF.Builder.emitBlock(contBB);
(void)in.claimAll();
for (auto schema : outTI.getSchema())
out.add(llvm::UndefValue::get(schema.getScalarType()));
}
static void emitUncheckedValueBitCast(IRGenSILFunction &IGF,
SourceLoc loc,
Explosion &in,
const LoadableTypeInfo &inTI,
Explosion &out,
const LoadableTypeInfo &outTI) {
// If the transfer is doable bitwise, and if the elements of the explosion are
// the same type, then just transfer the elements.
if (inTI.isBitwiseTakable(ResilienceExpansion::Maximal) &&
outTI.isBitwiseTakable(ResilienceExpansion::Maximal) &&
isStructurallySame(inTI.getStorageType(), outTI.getStorageType())) {
in.transferInto(out, in.size());
return;
}
// TODO: We could do bitcasts entirely in the value domain in some cases, but
// for simplicity, let's just always go through the stack for now.
// Create the allocation.
auto inStorage = IGF.createAlloca(inTI.getStorageType(),
std::max(inTI.getFixedAlignment(),
outTI.getFixedAlignment()),
"bitcast");
auto maxSize = std::max(inTI.getFixedSize(), outTI.getFixedSize());
IGF.Builder.CreateLifetimeStart(inStorage, maxSize);
// Store the 'in' value.
inTI.initialize(IGF, in, inStorage);
// Load the 'out' value as the destination type.
auto outStorage = IGF.Builder.CreateBitCast(inStorage,
outTI.getStorageType()->getPointerTo());
outTI.loadAsTake(IGF, outStorage, out);
IGF.Builder.CreateLifetimeEnd(inStorage, maxSize);
return;
}
static void emitValueBitwiseCast(IRGenSILFunction &IGF,
SourceLoc loc,
Explosion &in,
const LoadableTypeInfo &inTI,
Explosion &out,
const LoadableTypeInfo &outTI) {
// Unfortunately, we can't check this invariant until we get to IRGen, since
// the AST and SIL don't know anything about type layout.
if (inTI.getFixedSize() < outTI.getFixedSize()) {
emitTrapAndUndefValue(IGF, in, out, outTI);
return;
}
emitUncheckedValueBitCast(IGF, loc, in, inTI, out, outTI);
}
void IRGenSILFunction::visitUncheckedTrivialBitCastInst(
swift::UncheckedTrivialBitCastInst *i) {
Explosion in = getLoweredExplosion(i->getOperand());
Explosion out;
emitValueBitwiseCast(*this, i->getLoc().getSourceLoc(),
in, cast<LoadableTypeInfo>(getTypeInfo(i->getOperand()->getType())),
out, cast<LoadableTypeInfo>(getTypeInfo(i->getType())));
setLoweredExplosion(i, out);
}
void IRGenSILFunction::
visitUncheckedBitwiseCastInst(swift::UncheckedBitwiseCastInst *i) {
Explosion in = getLoweredExplosion(i->getOperand());
Explosion out;
emitValueBitwiseCast(*this, i->getLoc().getSourceLoc(),
in, cast<LoadableTypeInfo>(getTypeInfo(i->getOperand()->getType())),
out, cast<LoadableTypeInfo>(getTypeInfo(i->getType())));
setLoweredExplosion(i, out);
}
void IRGenSILFunction::visitRefToRawPointerInst(
swift::RefToRawPointerInst *i) {
auto &ti = getTypeInfo(i->getType());
emitPointerCastInst(*this, i->getOperand(), i, ti);
}
void IRGenSILFunction::visitRawPointerToRefInst(swift::RawPointerToRefInst *i) {
auto &ti = getTypeInfo(i->getType());
emitPointerCastInst(*this, i->getOperand(), i, ti);
}
// SIL scalar conversions which never change the IR type.
// FIXME: Except for optionals, which get bit-packed into an integer.
static void trivialRefConversion(IRGenSILFunction &IGF,
SILValue input,
SILValue result) {
Explosion temp = IGF.getLoweredExplosion(input);
auto &inputTI = IGF.getTypeInfo(input->getType());
auto &resultTI = IGF.getTypeInfo(result->getType());
// If the types are the same, forward the existing value.
if (inputTI.getStorageType() == resultTI.getStorageType()) {
IGF.setLoweredExplosion(result, temp);
return;
}
auto schema = resultTI.getSchema();
Explosion out;
for (auto schemaElt : schema) {
auto resultTy = schemaElt.getScalarType();
llvm::Value *value = temp.claimNext();
if (value->getType() == resultTy) {
// Nothing to do. This happens with the unowned conversions.
} else if (resultTy->isPointerTy()) {
value = IGF.Builder.CreateIntToPtr(value, resultTy);
} else {
value = IGF.Builder.CreatePtrToInt(value, resultTy);
}
out.add(value);
}
IGF.setLoweredExplosion(result, out);
}
// SIL scalar conversions which never change the IR type.
// FIXME: Except for optionals, which get bit-packed into an integer.
#define NOOP_CONVERSION(KIND) \
void IRGenSILFunction::visit##KIND##Inst(swift::KIND##Inst *i) { \
::trivialRefConversion(*this, i->getOperand(), i); \
}
NOOP_CONVERSION(UnownedToRef)
NOOP_CONVERSION(RefToUnowned)
NOOP_CONVERSION(UnmanagedToRef)
NOOP_CONVERSION(RefToUnmanaged)
#undef NOOP_CONVERSION
void IRGenSILFunction::visitThinToThickFunctionInst(
swift::ThinToThickFunctionInst *i) {
// Take the incoming function pointer and add a null context pointer to it.
Explosion from = getLoweredExplosion(i->getOperand());
Explosion to;
to.add(from.claimNext());
to.add(IGM.RefCountedNull);
setLoweredExplosion(i, to);
}
void IRGenSILFunction::visitThickToObjCMetatypeInst(ThickToObjCMetatypeInst *i){
Explosion from = getLoweredExplosion(i->getOperand());
llvm::Value *swiftMeta = from.claimNext();
CanType instanceType(i->getType().castTo<AnyMetatypeType>().getInstanceType());
Explosion to;
llvm::Value *classPtr =
emitClassHeapMetadataRefForMetatype(*this, swiftMeta, instanceType);
to.add(Builder.CreateBitCast(classPtr, IGM.ObjCClassPtrTy));
setLoweredExplosion(i, to);
}
void IRGenSILFunction::visitObjCToThickMetatypeInst(
ObjCToThickMetatypeInst *i) {
Explosion from = getLoweredExplosion(i->getOperand());
llvm::Value *classPtr = from.claimNext();
// Fetch the metadata for that class.
Explosion to;
auto metadata = emitObjCMetadataRefForMetadata(*this, classPtr);
to.add(metadata);
setLoweredExplosion(i, to);
}
void IRGenSILFunction::visitUnconditionalCheckedCastInst(
swift::UnconditionalCheckedCastInst *i) {
Explosion value = getLoweredExplosion(i->getOperand());
Explosion ex;
emitScalarCheckedCast(*this, value, i->getOperand()->getType(), i->getType(),
CheckedCastMode::Unconditional, ex);
setLoweredExplosion(i, ex);
}
void IRGenSILFunction::visitObjCMetatypeToObjectInst(
ObjCMetatypeToObjectInst *i){
// Bitcast the @objc metatype reference, which is already an ObjC object, to
// the destination type.
Explosion from = getLoweredExplosion(i->getOperand());
llvm::Value *value = from.claimNext();
value = Builder.CreateBitCast(value, IGM.UnknownRefCountedPtrTy);
Explosion to;
to.add(value);
setLoweredExplosion(i, to);
}
void IRGenSILFunction::visitObjCExistentialMetatypeToObjectInst(
ObjCExistentialMetatypeToObjectInst *i){
// Bitcast the @objc metatype reference, which is already an ObjC object, to
// the destination type. The metatype may carry additional witness tables we
// can drop.
Explosion from = getLoweredExplosion(i->getOperand());
llvm::Value *value = from.claimNext();
(void)from.claimAll();
value = Builder.CreateBitCast(value, IGM.UnknownRefCountedPtrTy);
Explosion to;
to.add(value);
setLoweredExplosion(i, to);
}
void IRGenSILFunction::visitObjCProtocolInst(ObjCProtocolInst *i) {
// Get the protocol reference.
llvm::Value *protoRef = emitReferenceToObjCProtocol(*this, i->getProtocol());
// Bitcast it to the class reference type.
protoRef = Builder.CreateBitCast(protoRef,
getTypeInfo(i->getType()).getStorageType());
Explosion ex;
ex.add(protoRef);
setLoweredExplosion(i, ex);
}
void IRGenSILFunction::visitRefToBridgeObjectInst(
swift::RefToBridgeObjectInst *i) {
Explosion refEx = getLoweredExplosion(i->getConverted());
llvm::Value *ref = refEx.claimNext();
Explosion bitsEx = getLoweredExplosion(i->getBitsOperand());
llvm::Value *bits = bitsEx.claimNext();
// Mask the bits into the pointer representation.
llvm::Value *val = Builder.CreatePtrToInt(ref, IGM.SizeTy);
val = Builder.CreateOr(val, bits);
val = Builder.CreateIntToPtr(val, IGM.BridgeObjectPtrTy);
Explosion resultEx;
resultEx.add(val);
setLoweredExplosion(i, resultEx);
}
void IRGenSILFunction::visitBridgeObjectToRefInst(
swift::BridgeObjectToRefInst *i) {
Explosion boEx = getLoweredExplosion(i->getConverted());
llvm::Value *bo = boEx.claimNext();
Explosion resultEx;
auto &refTI = getTypeInfo(i->getType());
llvm::Type *refType = refTI.getSchema()[0].getScalarType();
// If the value is an ObjC tagged pointer, pass it through verbatim.
llvm::BasicBlock *taggedCont = nullptr,
*tagged = nullptr,
*notTagged = nullptr;
llvm::Value *taggedRef = nullptr;
llvm::Value *boBits = nullptr;
ClassDecl *Cl = i->getType().getClassOrBoundGenericClass();
if (IGM.TargetInfo.hasObjCTaggedPointers() &&
(!Cl || !isKnownNotTaggedPointer(IGM, Cl))) {
boBits = Builder.CreatePtrToInt(bo, IGM.SizeTy);
APInt maskValue = IGM.TargetInfo.ObjCPointerReservedBits.asAPInt();
llvm::Value *mask = llvm::ConstantInt::get(IGM.getLLVMContext(), maskValue);
llvm::Value *reserved = Builder.CreateAnd(boBits, mask);
llvm::Value *cond = Builder.CreateICmpEQ(reserved,
llvm::ConstantInt::get(IGM.SizeTy, 0));
tagged = createBasicBlock("tagged-pointer"),
notTagged = createBasicBlock("not-tagged-pointer");
taggedCont = createBasicBlock("tagged-cont");
Builder.CreateCondBr(cond, notTagged, tagged);
Builder.emitBlock(tagged);
taggedRef = Builder.CreateBitCast(bo, refType);
Builder.CreateBr(taggedCont);
// If it's not a tagged pointer, mask off the spare bits.
Builder.emitBlock(notTagged);
}
// Mask off the spare bits (if they exist).
auto &spareBits = IGM.getHeapObjectSpareBits();
llvm::Value *result;
if (spareBits.any()) {
APInt maskValue = ~spareBits.asAPInt();
if (!boBits)
boBits = Builder.CreatePtrToInt(bo, IGM.SizeTy);
llvm::Value *mask = llvm::ConstantInt::get(IGM.getLLVMContext(), maskValue);
llvm::Value *masked = Builder.CreateAnd(boBits, mask);
result = Builder.CreateIntToPtr(masked, refType);
} else {
result = Builder.CreateBitCast(bo, refType);
}
if (taggedCont) {
Builder.CreateBr(taggedCont);
Builder.emitBlock(taggedCont);
auto phi = Builder.CreatePHI(refType, 2);
phi->addIncoming(taggedRef, tagged);
phi->addIncoming(result, notTagged);
result = phi;
}
resultEx.add(result);
setLoweredExplosion(i, resultEx);
}
void IRGenSILFunction::visitBridgeObjectToWordInst(
swift::BridgeObjectToWordInst *i) {
Explosion boEx = getLoweredExplosion(i->getConverted());
llvm::Value *val = boEx.claimNext();
val = Builder.CreatePtrToInt(val, IGM.SizeTy);
Explosion wordEx;
wordEx.add(val);
setLoweredExplosion(i, wordEx);
}
void IRGenSILFunction::visitUnconditionalCheckedCastAddrInst(
swift::UnconditionalCheckedCastAddrInst *i) {
Address dest = getLoweredAddress(i->getDest());
Address src = getLoweredAddress(i->getSrc());
emitCheckedCast(*this, src, i->getSourceType(), dest, i->getTargetType(),
i->getConsumptionKind(), CheckedCastMode::Unconditional);
}
void IRGenSILFunction::visitUnconditionalCheckedCastValueInst(
swift::UnconditionalCheckedCastValueInst *i) {
llvm_unreachable("unsupported instruction during IRGen");
}
void IRGenSILFunction::visitCheckedCastValueBranchInst(
swift::CheckedCastValueBranchInst *i) {
llvm_unreachable("unsupported instruction during IRGen");
}
void IRGenSILFunction::visitCheckedCastBranchInst(
swift::CheckedCastBranchInst *i) {
SILType destTy = i->getCastType();
FailableCastResult castResult;
Explosion ex;
if (i->isExact()) {
auto operand = i->getOperand();
Explosion source = getLoweredExplosion(operand);
castResult = emitClassIdenticalCast(*this, source.claimNext(),
operand->getType(), destTy);
} else {
Explosion value = getLoweredExplosion(i->getOperand());
emitScalarCheckedCast(*this, value, i->getOperand()->getType(),
i->getCastType(), CheckedCastMode::Conditional, ex);
auto val = ex.claimNext();
castResult.casted = val;
llvm::Value *nil =
llvm::ConstantPointerNull::get(cast<llvm::PointerType>(val->getType()));
castResult.succeeded = Builder.CreateICmpNE(val, nil);
}
// Branch on the success of the cast.
// All cast operations currently return null on failure.
auto &successBB = getLoweredBB(i->getSuccessBB());
llvm::Type *toTy = IGM.getTypeInfo(destTy).getStorageType();
if (toTy->isPointerTy())
castResult.casted = Builder.CreateBitCast(castResult.casted, toTy);
Builder.CreateCondBr(castResult.succeeded,
successBB.bb,
getLoweredBB(i->getFailureBB()).bb);
// Feed the cast result into the nonnull branch.
unsigned phiIndex = 0;
Explosion ex2;
ex2.add(castResult.casted);
ex2.add(ex.claimAll());
addIncomingExplosionToPHINodes(*this, successBB, phiIndex, ex2);
}
void IRGenSILFunction::visitCheckedCastAddrBranchInst(
swift::CheckedCastAddrBranchInst *i) {
Address dest = getLoweredAddress(i->getDest());
Address src = getLoweredAddress(i->getSrc());
llvm::Value *castSucceeded =
emitCheckedCast(*this, src, i->getSourceType(), dest, i->getTargetType(),
i->getConsumptionKind(), CheckedCastMode::Conditional);
Builder.CreateCondBr(castSucceeded,
getLoweredBB(i->getSuccessBB()).bb,
getLoweredBB(i->getFailureBB()).bb);
}
void IRGenSILFunction::visitIsNonnullInst(swift::IsNonnullInst *i) {
// Get the value we're testing, which may be a function, an address or an
// instance pointer.
llvm::Value *val;
const LoweredValue &lv = getLoweredValue(i->getOperand());
if (i->getOperand()->getType().is<SILFunctionType>()) {
Explosion values = lv.getExplosion(*this);
val = values.claimNext(); // Function pointer.
values.claimNext(); // Ignore the data pointer.
} else if (lv.isAddress()) {
val = lv.getAddress().getAddress();
} else {
Explosion values = lv.getExplosion(*this);
val = values.claimNext();
}
// Check that the result isn't null.
auto *valTy = cast<llvm::PointerType>(val->getType());
llvm::Value *result = Builder.CreateICmp(llvm::CmpInst::ICMP_NE,
val, llvm::ConstantPointerNull::get(valTy));
Explosion out;
out.add(result);
setLoweredExplosion(i, out);
}
void IRGenSILFunction::visitUpcastInst(swift::UpcastInst *i) {
auto toTy = getTypeInfo(i->getType()).getSchema()[0].getScalarType();
// If we have an address, just bitcast, don't explode.
if (i->getOperand()->getType().isAddress()) {
Address fromAddr = getLoweredAddress(i->getOperand());
llvm::Value *toValue = Builder.CreateBitCast(
fromAddr.getAddress(), toTy->getPointerTo());
Address Addr(toValue, fromAddr.getAlignment());
setLoweredAddress(i, Addr);
return;
}
Explosion from = getLoweredExplosion(i->getOperand());
Explosion to;
assert(from.size() == 1 && "class should explode to single value");
llvm::Value *fromValue = from.claimNext();
to.add(Builder.CreateBitCast(fromValue, toTy));
setLoweredExplosion(i, to);
}
void IRGenSILFunction::visitIndexAddrInst(swift::IndexAddrInst *i) {
Address base = getLoweredAddress(i->getBase());
Explosion indexValues = getLoweredExplosion(i->getIndex());
llvm::Value *index = indexValues.claimNext();
auto baseTy = i->getBase()->getType();
auto &ti = getTypeInfo(baseTy);
Address dest = ti.indexArray(*this, base, index, baseTy);
setLoweredAddress(i, dest);
}
void IRGenSILFunction::visitTailAddrInst(swift::TailAddrInst *i) {
Address base = getLoweredAddress(i->getBase());
Explosion indexValues = getLoweredExplosion(i->getIndex());
llvm::Value *index = indexValues.claimNext();
SILType baseTy = i->getBase()->getType();
const TypeInfo &baseTI = getTypeInfo(baseTy);
Address dest = baseTI.indexArray(*this, base, index, baseTy);
const TypeInfo &TailTI = getTypeInfo(i->getTailType());
dest = TailTI.roundUpToTypeAlignment(*this, dest, i->getTailType());
llvm::Type *destType = TailTI.getStorageType()->getPointerTo();
dest = Builder.CreateBitCast(dest, destType);
setLoweredAddress(i, dest);
}
void IRGenSILFunction::visitIndexRawPointerInst(swift::IndexRawPointerInst *i) {
Explosion baseValues = getLoweredExplosion(i->getBase());
llvm::Value *base = baseValues.claimNext();
Explosion indexValues = getLoweredExplosion(i->getIndex());
llvm::Value *index = indexValues.claimNext();
// We don't expose a non-inbounds GEP operation.
llvm::Value *destValue = Builder.CreateInBoundsGEP(base, index);
Explosion result;
result.add(destValue);
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitAllocValueBufferInst(
swift::AllocValueBufferInst *i) {
Address buffer = getLoweredAddress(i->getOperand());
auto valueType = i->getValueType();
Address value;
if (getSILModule().getOptions().UseCOWExistentials) {
value = emitAllocateValueInBuffer(*this, valueType, buffer);
} else {
value = getTypeInfo(valueType).allocateBuffer(*this, buffer, valueType);
}
setLoweredAddress(i, value);
}
void IRGenSILFunction::visitProjectValueBufferInst(
swift::ProjectValueBufferInst *i) {
Address buffer = getLoweredAddress(i->getOperand());
auto valueType = i->getValueType();
Address value;
if (getSILModule().getOptions().UseCOWExistentials) {
value = emitProjectValueInBuffer(*this, valueType, buffer);
} else {
value = getTypeInfo(valueType).projectBuffer(*this, buffer, valueType);
}
setLoweredAddress(i, value);
}
void IRGenSILFunction::visitDeallocValueBufferInst(
swift::DeallocValueBufferInst *i) {
Address buffer = getLoweredAddress(i->getOperand());
auto valueType = i->getValueType();
if (getSILModule().getOptions().UseCOWExistentials) {
emitDeallocateValueInBuffer(*this, valueType, buffer);
return;
}
getTypeInfo(valueType).deallocateBuffer(*this, buffer, valueType);
}
void IRGenSILFunction::visitInitExistentialAddrInst(swift::InitExistentialAddrInst *i) {
Address container = getLoweredAddress(i->getOperand());
SILType destType = i->getOperand()->getType();
Address buffer = emitOpaqueExistentialContainerInit(*this,
container,
destType,
i->getFormalConcreteType(),
i->getLoweredConcreteType(),
i->getConformances());
auto srcType = i->getLoweredConcreteType();
auto &srcTI = getTypeInfo(srcType);
// Allocate a COW box for the value if necessary.
if (getSILModule().getOptions().UseCOWExistentials) {
auto *genericEnv = CurSILFn->getGenericEnvironment();
setLoweredAddress(i, emitAllocateBoxedOpaqueExistentialBuffer(
*this, destType, srcType, container, genericEnv));
return;
}
// See if we can defer initialization of the buffer to a copy_addr into it.
if (tryDeferFixedSizeBufferInitialization(*this, i, srcTI, buffer, ""))
return;
// Allocate in the destination fixed-size buffer.
Address address = srcTI.allocateBuffer(*this, buffer, srcType);
setLoweredAddress(i, address);
}
void IRGenSILFunction::visitInitExistentialOpaqueInst(
swift::InitExistentialOpaqueInst *i) {
llvm_unreachable("unsupported instruction during IRGen");
}
void IRGenSILFunction::visitInitExistentialMetatypeInst(
InitExistentialMetatypeInst *i) {
Explosion metatype = getLoweredExplosion(i->getOperand());
Explosion result;
emitExistentialMetatypeContainer(*this,
result, i->getType(),
metatype.claimNext(),
i->getOperand()->getType(),
i->getConformances());
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitInitExistentialRefInst(InitExistentialRefInst *i) {
Explosion instance = getLoweredExplosion(i->getOperand());
Explosion result;
emitClassExistentialContainer(*this,
result, i->getType(),
instance.claimNext(),
i->getFormalConcreteType(),
i->getOperand()->getType(),
i->getConformances());
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitDeinitExistentialAddrInst(
swift::DeinitExistentialAddrInst *i) {
Address container = getLoweredAddress(i->getOperand());
// Deallocate the COW box for the value if necessary.
if (getSILModule().getOptions().UseCOWExistentials) {
emitDeallocateBoxedOpaqueExistentialBuffer(
*this, i->getOperand()->getType(), container);
return;
}
emitOpaqueExistentialContainerDeinit(*this, container,
i->getOperand()->getType());
}
void IRGenSILFunction::visitDeinitExistentialOpaqueInst(
swift::DeinitExistentialOpaqueInst *i) {
llvm_unreachable("unsupported instruction during IRGen");
}
void IRGenSILFunction::visitOpenExistentialAddrInst(OpenExistentialAddrInst *i) {
SILType baseTy = i->getOperand()->getType();
Address base = getLoweredAddress(i->getOperand());
auto openedArchetype = cast<ArchetypeType>(
i->getType().getSwiftRValueType());
// Insert a copy of the boxed value for COW semantics if necessary.
if (getSILModule().getOptions().UseCOWExistentials) {
auto accessKind = i->getAccessKind();
Address object = emitOpaqueBoxedExistentialProjection(
*this, accessKind, base, baseTy, openedArchetype);
setLoweredAddress(i, object);
return;
}
Address object = emitOpaqueExistentialProjection(*this, base, baseTy,
openedArchetype);
setLoweredAddress(i, object);
}
void IRGenSILFunction::visitOpenExistentialRefInst(OpenExistentialRefInst *i) {
SILType baseTy = i->getOperand()->getType();
Explosion base = getLoweredExplosion(i->getOperand());
auto openedArchetype = cast<ArchetypeType>(
i->getType().getSwiftRValueType());
Explosion result;
llvm::Value *instance
= emitClassExistentialProjection(*this, base, baseTy,
openedArchetype);
result.add(instance);
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitOpenExistentialMetatypeInst(
OpenExistentialMetatypeInst *i) {
SILType baseTy = i->getOperand()->getType();
Explosion base = getLoweredExplosion(i->getOperand());
auto openedTy = i->getType().getSwiftRValueType();
llvm::Value *metatype =
emitExistentialMetatypeProjection(*this, base, baseTy, openedTy);
Explosion result;
result.add(metatype);
setLoweredExplosion(i, result);
}
void IRGenSILFunction::visitOpenExistentialOpaqueInst(
OpenExistentialOpaqueInst *i) {
llvm_unreachable("unsupported instruction during IRGen");
}
void IRGenSILFunction::visitProjectBlockStorageInst(ProjectBlockStorageInst *i){
// TODO
Address block = getLoweredAddress(i->getOperand());
Address capture = projectBlockStorageCapture(*this, block,
i->getOperand()->getType().castTo<SILBlockStorageType>());
setLoweredAddress(i, capture);
}
void IRGenSILFunction::visitInitBlockStorageHeaderInst(
InitBlockStorageHeaderInst *i) {
auto addr = getLoweredAddress(i->getBlockStorage());
// We currently only support static invoke functions.
auto &invokeVal = getLoweredValue(i->getInvokeFunction());
llvm::Function *invokeFn = nullptr;
ForeignFunctionInfo foreignInfo;
if (invokeVal.kind != LoweredValue::Kind::StaticFunction) {
IGM.unimplemented(i->getLoc().getSourceLoc(),
"non-static block invoke function");
} else {
invokeFn = invokeVal.getStaticFunction().getFunction();
foreignInfo = invokeVal.getStaticFunction().getForeignInfo();
}
assert(foreignInfo.ClangInfo && "no clang info for block function?");
// Initialize the header.
emitBlockHeader(*this, addr,
i->getBlockStorage()->getType().castTo<SILBlockStorageType>(),
invokeFn, i->getInvokeFunction()->getType().castTo<SILFunctionType>(),
foreignInfo);
// Cast the storage to the block type to produce the result value.
llvm::Value *asBlock = Builder.CreateBitCast(addr.getAddress(),
IGM.ObjCBlockPtrTy);
Explosion e;
e.add(asBlock);
setLoweredExplosion(i, e);
}
void IRGenSILFunction::visitAllocExistentialBoxInst(AllocExistentialBoxInst *i){
OwnedAddress boxWithAddr =
emitBoxedExistentialContainerAllocation(*this, i->getExistentialType(),
i->getFormalConcreteType(),
i->getConformances());
setLoweredBox(i, boxWithAddr);
}
void IRGenSILFunction::visitDeallocExistentialBoxInst(
DeallocExistentialBoxInst *i) {
Explosion box = getLoweredExplosion(i->getOperand());
emitBoxedExistentialContainerDeallocation(*this, box,
i->getOperand()->getType(),
i->getConcreteType());
}
void IRGenSILFunction::visitOpenExistentialBoxInst(OpenExistentialBoxInst *i) {
Explosion box = getLoweredExplosion(i->getOperand());
auto openedArchetype = cast<ArchetypeType>(i->getType().getSwiftRValueType());
auto addr = emitOpenExistentialBox(*this, box, i->getOperand()->getType(),
openedArchetype);
setLoweredAddress(i, addr);
}
void
IRGenSILFunction::visitProjectExistentialBoxInst(ProjectExistentialBoxInst *i) {
const LoweredValue &val = getLoweredValue(i->getOperand());
if (val.isBoxWithAddress()) {
// The operand is an alloc_existential_box.
// We can directly reuse the address.
setLoweredAddress(i, val.getAddressOfBox());
} else {
Explosion box = getLoweredExplosion(i->getOperand());
auto caddr = emitBoxedExistentialProjection(*this, box,
i->getOperand()->getType(),
i->getType().getSwiftRValueType());
setLoweredAddress(i, caddr.getAddress());
}
}
void IRGenSILFunction::visitDynamicMethodInst(DynamicMethodInst *i) {
assert(i->getMember().isForeign && "dynamic_method requires [objc] method");
setLoweredObjCMethod(i, i->getMember());
return;
}
void IRGenSILFunction::visitWitnessMethodInst(swift::WitnessMethodInst *i) {
// For Objective-C classes we need to arrange for a msgSend
// to happen when the method is called.
if (i->getMember().isForeign) {
setLoweredObjCMethod(i, i->getMember());
return;
}
CanType baseTy = i->getLookupType();
ProtocolConformanceRef conformance = i->getConformance();
SILDeclRef member = i->getMember();
// It would be nice if this weren't discarded.
llvm::Value *baseMetadataCache = nullptr;
Explosion lowered;
emitWitnessMethodValue(*this, baseTy, &baseMetadataCache,
member, conformance, lowered);
setLoweredExplosion(i, lowered);
}
void IRGenSILFunction::setAllocatedAddressForBuffer(SILValue v,
const Address &allocedAddress) {
overwriteAllocatedAddress(v, allocedAddress);
// Emit the debug info for the variable if any.
if (auto allocStack = dyn_cast<AllocStackInst>(v)) {
emitDebugInfoForAllocStack(allocStack, getTypeInfo(v->getType()),
allocedAddress.getAddress());
}
}
void IRGenSILFunction::visitCopyAddrInst(swift::CopyAddrInst *i) {
SILType addrTy = i->getSrc()->getType();
const TypeInfo &addrTI = getTypeInfo(addrTy);
Address src = getLoweredAddress(i->getSrc());
// See whether we have a deferred fixed-size buffer initialization.
auto &loweredDest = getLoweredValue(i->getDest());
if (loweredDest.isUnallocatedAddressInBuffer()) {
// We should never have a deferred initialization with COW existentials.
assert(!getSILModule().getOptions().UseCOWExistentials &&
"Should never have an unallocated buffer and COW existentials");
assert(i->isInitializationOfDest()
&& "need to initialize an unallocated buffer");
Address cont = loweredDest.getContainerOfAddress();
if (i->isTakeOfSrc()) {
Address addr = addrTI.initializeBufferWithTake(*this, cont, src, addrTy);
setAllocatedAddressForBuffer(i->getDest(), addr);
} else {
Address addr = addrTI.initializeBufferWithCopy(*this, cont, src, addrTy);
setAllocatedAddressForBuffer(i->getDest(), addr);
}
} else {
Address dest = loweredDest.getAddress();
if (i->isInitializationOfDest()) {
if (i->isTakeOfSrc()) {
addrTI.initializeWithTake(*this, dest, src, addrTy);
} else {
addrTI.initializeWithCopy(*this, dest, src, addrTy);
}
} else {
if (i->isTakeOfSrc()) {
addrTI.assignWithTake(*this, dest, src, addrTy);
} else {
addrTI.assignWithCopy(*this, dest, src, addrTy);
}
}
}
}
// This is a no-op because we do not lower Swift TBAA info to LLVM IR, and it
// does not produce any values.
void IRGenSILFunction::visitBindMemoryInst(swift::BindMemoryInst *) {}
void IRGenSILFunction::visitDestroyAddrInst(swift::DestroyAddrInst *i) {
SILType addrTy = i->getOperand()->getType();
const TypeInfo &addrTI = getTypeInfo(addrTy);
// Otherwise, do the normal thing.
Address base = getLoweredAddress(i->getOperand());
addrTI.destroy(*this, base, addrTy);
}
void IRGenSILFunction::visitCondFailInst(swift::CondFailInst *i) {
Explosion e = getLoweredExplosion(i->getOperand());
llvm::Value *cond = e.claimNext();
// Emit individual fail blocks so that we can map the failure back to a source
// line.
llvm::BasicBlock *failBB = llvm::BasicBlock::Create(IGM.getLLVMContext());
llvm::BasicBlock *contBB = llvm::BasicBlock::Create(IGM.getLLVMContext());
Builder.CreateCondBr(cond, failBB, contBB);
Builder.emitBlock(failBB);
if (IGM.IRGen.Opts.Optimize) {
// Emit unique side-effecting inline asm calls in order to eliminate
// the possibility that an LLVM optimization or code generation pass
// will merge these blocks back together again. We emit an empty asm
// string with the side-effect flag set, and with a unique integer
// argument for each cond_fail we see in the function.
llvm::IntegerType *asmArgTy = IGM.Int32Ty;
llvm::Type *argTys = { asmArgTy };
llvm::FunctionType *asmFnTy =
llvm::FunctionType::get(IGM.VoidTy, argTys, false /* = isVarArg */);
llvm::InlineAsm *inlineAsm =
llvm::InlineAsm::get(asmFnTy, "", "n", true /* = SideEffects */);
Builder.CreateCall(inlineAsm,
llvm::ConstantInt::get(asmArgTy, NumCondFails++));
}
// Emit the trap instruction.
llvm::Function *trapIntrinsic =
llvm::Intrinsic::getDeclaration(&IGM.Module, llvm::Intrinsic::ID::trap);
Builder.CreateCall(trapIntrinsic, {});
Builder.CreateUnreachable();
Builder.emitBlock(contBB);
FailBBs.push_back(failBB);
}
void IRGenSILFunction::visitSuperMethodInst(swift::SuperMethodInst *i) {
if (i->getMember().isForeign) {
setLoweredObjCMethodBounded(i, i->getMember(),
i->getOperand()->getType(),
/*startAtSuper=*/true);
return;
}
auto base = getLoweredExplosion(i->getOperand());
auto baseType = i->getOperand()->getType();
llvm::Value *baseValue = base.claimNext();
auto method = i->getMember();
auto methodType = i->getType().castTo<SILFunctionType>();
llvm::Value *fnValue = emitVirtualMethodValue(*this, baseValue,
baseType,
method, methodType,
/*useSuperVTable*/ true);
fnValue = Builder.CreateBitCast(fnValue, IGM.Int8PtrTy);
Explosion e;
e.add(fnValue);
setLoweredExplosion(i, e);
}
void IRGenSILFunction::visitClassMethodInst(swift::ClassMethodInst *i) {
// For Objective-C classes we need to arrange for a msgSend
// to happen when the method is called.
if (i->getMember().isForeign) {
setLoweredObjCMethod(i, i->getMember());
return;
}
Explosion base = getLoweredExplosion(i->getOperand());
llvm::Value *baseValue = base.claimNext();
SILDeclRef method = i->getMember();
auto methodType = i->getType().castTo<SILFunctionType>();
// For Swift classes, get the method implementation from the vtable.
// FIXME: better explosion kind, map as static.
llvm::Value *fnValue = emitVirtualMethodValue(*this, baseValue,
i->getOperand()->getType(),
method, methodType,
/*useSuperVTable*/ false);
fnValue = Builder.CreateBitCast(fnValue, IGM.Int8PtrTy);
Explosion e;
e.add(fnValue);
setLoweredExplosion(i, e);
}
void IRGenModule::emitSILStaticInitializers() {
SmallVector<SILFunction *, 8> StaticInitializers;
for (SILGlobalVariable &Global : getSILModule().getSILGlobals()) {
if (!Global.getInitializer())
continue;
auto *IRGlobal =
Module.getGlobalVariable(Global.getName(), true /* = AllowLocal */);
// A check for multi-threaded compilation: Is this the llvm module where the
// global is defined and not only referenced (or not referenced at all).
if (!IRGlobal || !IRGlobal->hasInitializer())
continue;
auto *InitValue = Global.getValueOfStaticInitializer();
// Set the IR global's initializer to the constant for this SIL
// struct.
if (auto *SI = dyn_cast<StructInst>(InitValue)) {
IRGlobal->setInitializer(emitConstantStruct(*this, SI));
continue;
}
// Set the IR global's initializer to the constant for this SIL
// tuple.
auto *TI = cast<TupleInst>(InitValue);
IRGlobal->setInitializer(emitConstantTuple(*this, TI));
}
}
| codestergit/swift | lib/IRGen/IRGenSIL.cpp | C++ | apache-2.0 | 187,807 |
/*
* Copyright (c) 2005-2014, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.event.input.adaptor.kafka.internal.ds;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.osgi.service.component.ComponentContext;
import org.wso2.carbon.event.input.adaptor.core.InputEventAdaptorFactory;
import org.wso2.carbon.event.input.adaptor.kafka.KafkaEventAdaptorFactory;
import org.wso2.carbon.utils.ConfigurationContextService;
/**
* @scr.component name="input.Kafka.EventAdaptorService.component" immediate="true"
* @scr.reference name="configurationcontext.service"
* interface="org.wso2.carbon.utils.ConfigurationContextService" cardinality="0..1"
* policy="dynamic" bind="setConfigurationContextService" unbind="unsetConfigurationContextService"
*/
public class ConsumerKafkaServiceDS {
private static final Log log = LogFactory.getLog(ConsumerKafkaServiceDS.class);
/**
* initialize the agent service here service here.
*
* @param context
*/
protected void activate(ComponentContext context) {
try {
InputEventAdaptorFactory testInEventAdaptorFactory = new KafkaEventAdaptorFactory();
context.getBundleContext().registerService(InputEventAdaptorFactory.class.getName(), testInEventAdaptorFactory, null);
log.info("Successfully deployed the KafkaConsumer input event adaptor service");
} catch (RuntimeException e) {
log.error("Can not create the KafkaConsumer input event adaptor service ", e);
}
}
protected void setConfigurationContextService(
ConfigurationContextService configurationContextService) {
KafkaEventAdaptorServiceHolder.registerConfigurationContextService(configurationContextService);
}
protected void unsetConfigurationContextService(
ConfigurationContextService configurationContextService) {
KafkaEventAdaptorServiceHolder.unregisterConfigurationContextService(configurationContextService);
}
}
| malakasilva/carbon-event-processing | components/adaptors/event-input-adaptor/org.wso2.carbon.event.input.adaptor.kafka/src/main/java/org/wso2/carbon/event/input/adaptor/kafka/internal/ds/ConsumerKafkaServiceDS.java | Java | apache-2.0 | 2,647 |
package com.carrotsearch.examples.randomizedrunner;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import com.carrotsearch.randomizedtesting.RandomizedRunner;
/**
* This is a test-based tutorial introducing to randomized JUnit testing using
* {@link RandomizedRunner}. Follow test cases in their alphabetic order.
*
* <p>One way to start using {@link RandomizedRunner} is to declare
* your suite class as being executed by {@link RandomizedRunner} (using
* {@link RunWith} annotation). The {@link #success()} method doesn't do anything
* useful but runs under {@link RandomizedRunner}. We know this for sure because we
* can hide a hook (<code>before()</code>) method to be private
* (normal JUnit doesn't allow this).
*/
@RunWith(RandomizedRunner.class)
public class Test001SimpleUseCase {
@Before
private void before() {
// This won't work under the default JUnit runner.
}
@Test
public void success() {
// Do nothing.
}
}
| randomizedtesting/randomizedtesting | examples/maven/src/main/java/com/carrotsearch/examples/randomizedrunner/Test001SimpleUseCase.java | Java | apache-2.0 | 997 |
/**
* Copyright (c), Data Geekery GmbH, contact@datageekery.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jooq.lambda;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.util.function.Predicate;
import org.jooq.lambda.function.Functions;
import org.junit.Test;
/**
* @author Lukas Eder
*/
public class PredicateTests {
@Test
public void testPredicates() {
Predicate<Integer> even = i -> i % 2 == 0;
Predicate<Integer> threes = i -> i % 3 == 0;
assertTrue(even.test(0));
assertFalse(even.test(1));
assertFalse(Functions.not(even).test(0));
assertTrue(Functions.not(even).test(1));
assertTrue(Functions.and(even, threes).test(0));
assertFalse(Functions.and(even, threes).test(1));
assertFalse(Functions.and(even, threes).test(2));
assertFalse(Functions.and(even, threes).test(3));
assertFalse(Functions.and(even, threes).test(4));
assertFalse(Functions.and(even, threes).test(5));
assertTrue(Functions.and(even, threes).test(6));
assertTrue(Functions.or(even, threes).test(0));
assertFalse(Functions.or(even, threes).test(1));
assertTrue(Functions.or(even, threes).test(2));
assertTrue(Functions.or(even, threes).test(3));
assertTrue(Functions.or(even, threes).test(4));
assertFalse(Functions.or(even, threes).test(5));
assertTrue(Functions.or(even, threes).test(6));
}
}
| jOOQ/jOOL | jOOL/src/test/java/org/jooq/lambda/PredicateTests.java | Java | apache-2.0 | 2,035 |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.config;
import com.thoughtworks.go.config.validation.FilePathTypeValidator;
import com.thoughtworks.go.domain.ArtifactType;
import com.thoughtworks.go.plugin.access.artifact.ArtifactMetadataStore;
import com.thoughtworks.go.plugin.api.info.PluginDescriptor;
import com.thoughtworks.go.plugin.domain.artifact.ArtifactPluginInfo;
import com.thoughtworks.go.plugin.domain.common.Metadata;
import com.thoughtworks.go.plugin.domain.common.PluggableInstanceSettings;
import com.thoughtworks.go.plugin.domain.common.PluginConfiguration;
import com.thoughtworks.go.security.CryptoException;
import com.thoughtworks.go.security.GoCipher;
import org.hamcrest.Matchers;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static com.thoughtworks.go.config.BuildArtifactConfig.DEST;
import static com.thoughtworks.go.config.BuildArtifactConfig.SRC;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class ArtifactTypeConfigsTest {
@Test
public void shouldAddDuplicatedArtifactSoThatValidationKicksIn() throws Exception {
final ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
assertThat(artifactTypeConfigs.size(), is(0));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
assertThat(artifactTypeConfigs.size(), is(2));
}
@Test
public void shouldLoadArtifactPlans() {
HashMap<String, String> artifactPlan1 = new HashMap<>();
artifactPlan1.put(SRC, "blah");
artifactPlan1.put(DEST, "something");
artifactPlan1.put("artifactTypeValue", TestArtifactConfig.TEST_PLAN_DISPLAY_NAME);
HashMap<String, String> artifactPlan2 = new HashMap<>();
artifactPlan2.put(SRC, "blah2");
artifactPlan2.put(DEST, "something2");
artifactPlan2.put("artifactTypeValue", BuildArtifactConfig.ARTIFACT_PLAN_DISPLAY_NAME);
List<HashMap> artifactPlansList = new ArrayList<>();
artifactPlansList.add(artifactPlan1);
artifactPlansList.add(artifactPlan2);
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.setConfigAttributes(artifactPlansList);
assertThat(artifactTypeConfigs.size(), is(2));
TestArtifactConfig plan = new TestArtifactConfig();
plan.setSource("blah");
plan.setDestination("something");
assertThat(artifactTypeConfigs.get(0), is(plan));
assertThat(artifactTypeConfigs.get(1), is(new BuildArtifactConfig("blah2", "something2")));
}
@Test
public void setConfigAttributes_shouldIgnoreEmptySourceAndDest() {
HashMap<String, String> artifactPlan1 = new HashMap<>();
artifactPlan1.put(SRC, "blah");
artifactPlan1.put(DEST, "something");
artifactPlan1.put("artifactTypeValue", TestArtifactConfig.TEST_PLAN_DISPLAY_NAME);
HashMap<String, String> artifactPlan2 = new HashMap<>();
artifactPlan2.put(SRC, "blah2");
artifactPlan2.put(DEST, "something2");
artifactPlan2.put("artifactTypeValue", BuildArtifactConfig.ARTIFACT_PLAN_DISPLAY_NAME);
HashMap<String, String> artifactPlan3 = new HashMap<>();
artifactPlan3.put(SRC, "");
artifactPlan3.put(DEST, "");
artifactPlan3.put("artifactTypeValue", BuildArtifactConfig.ARTIFACT_PLAN_DISPLAY_NAME);
List<HashMap> artifactPlansList = new ArrayList<>();
artifactPlansList.add(artifactPlan1);
artifactPlansList.add(artifactPlan3);
artifactPlansList.add(artifactPlan2);
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.setConfigAttributes(artifactPlansList);
assertThat(artifactTypeConfigs.size(), is(2));
TestArtifactConfig plan = new TestArtifactConfig();
plan.setSource("blah");
plan.setDestination("something");
assertThat(artifactTypeConfigs.get(0), is(plan));
assertThat(artifactTypeConfigs.get(1), is(new BuildArtifactConfig("blah2", "something2")));
}
@Test
public void setConfigAttributes_shouldSetExternalArtifactWithPlainTextValuesIfPluginIdIsProvided() {
ArtifactPluginInfo artifactPluginInfo = mock(ArtifactPluginInfo.class);
PluginDescriptor pluginDescriptor = mock(PluginDescriptor.class);
when(artifactPluginInfo.getDescriptor()).thenReturn(pluginDescriptor);
when(pluginDescriptor.id()).thenReturn("cd.go.artifact.foo");
PluginConfiguration image = new PluginConfiguration("Image", new Metadata(true, true));
PluginConfiguration tag = new PluginConfiguration("Tag", new Metadata(true, false));
ArrayList<PluginConfiguration> pluginMetadata = new ArrayList<>();
pluginMetadata.add(image);
pluginMetadata.add(tag);
when(artifactPluginInfo.getArtifactConfigSettings()).thenReturn(new PluggableInstanceSettings(pluginMetadata));
ArtifactMetadataStore.instance().setPluginInfo(artifactPluginInfo);
HashMap<Object, Object> configurationMap1 = new HashMap<>();
configurationMap1.put("Image", "gocd/gocd-server");
configurationMap1.put("Tag", "v18.6.0");
HashMap<String, Object> artifactPlan1 = new HashMap<>();
artifactPlan1.put("artifactTypeValue", "Pluggable Artifact");
artifactPlan1.put("id", "artifactId");
artifactPlan1.put("storeId", "storeId");
artifactPlan1.put("pluginId", "cd.go.artifact.foo");
artifactPlan1.put("configuration", configurationMap1);
List<Map> artifactPlansList = new ArrayList<>();
artifactPlansList.add(artifactPlan1);
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.setConfigAttributes(artifactPlansList);
assertThat(artifactTypeConfigs.size(), is(1));
PluggableArtifactConfig artifactConfig = (PluggableArtifactConfig) artifactTypeConfigs.get(0);
assertThat(artifactConfig.getArtifactType(), is(ArtifactType.external));
assertThat(artifactConfig.getId(), is("artifactId"));
assertThat(artifactConfig.getStoreId(), is("storeId"));
assertThat(artifactConfig.getConfiguration().getProperty("Image").isSecure(), is(false));
}
@Test
public void setConfigAttributes_shouldSetConfigurationAsIsIfPluginIdIsBlank() throws CryptoException {
HashMap<Object, Object> imageMap = new HashMap<>();
imageMap.put("value", new GoCipher().encrypt("some-encrypted-value"));
imageMap.put("isSecure", "true");
HashMap<Object, Object> tagMap = new HashMap<>();
tagMap.put("value", "18.6.0");
tagMap.put("isSecure", "false");
HashMap<Object, Object> configurationMap1 = new HashMap<>();
configurationMap1.put("Image", imageMap);
configurationMap1.put("Tag", tagMap);
HashMap<String, Object> artifactPlan1 = new HashMap<>();
artifactPlan1.put("artifactTypeValue", "Pluggable Artifact");
artifactPlan1.put("id", "artifactId");
artifactPlan1.put("storeId", "storeId");
artifactPlan1.put("pluginId", "");
artifactPlan1.put("configuration", configurationMap1);
List<Map> artifactPlansList = new ArrayList<>();
artifactPlansList.add(artifactPlan1);
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.setConfigAttributes(artifactPlansList);
assertThat(artifactTypeConfigs.size(), is(1));
PluggableArtifactConfig artifactConfig = (PluggableArtifactConfig) artifactTypeConfigs.get(0);
assertThat(artifactConfig.getArtifactType(), is(ArtifactType.external));
assertThat(artifactConfig.getId(), is("artifactId"));
assertThat(artifactConfig.getStoreId(), is("storeId"));
assertThat(artifactConfig.getConfiguration().getProperty("Image").getValue(), is("some-encrypted-value"));
assertThat(artifactConfig.getConfiguration().getProperty("Tag").getValue(), is("18.6.0"));
}
@Test
public void shouldClearAllArtifactsWhenTheMapIsNull() {
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.setConfigAttributes(null);
assertThat(artifactTypeConfigs.size(), is(0));
}
@Test
public void shouldValidateTree() {
ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs();
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "../a"));
artifactTypeConfigs.validateTree(null);
assertThat(artifactTypeConfigs.get(0).errors().on(BuiltinArtifactConfig.DEST), is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(0).errors().on(BuiltinArtifactConfig.SRC), is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(1).errors().on(BuiltinArtifactConfig.DEST), is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(1).errors().on(BuiltinArtifactConfig.SRC), is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(2).errors().on(BuiltinArtifactConfig.DEST), is("Invalid destination path. Destination path should match the pattern " + FilePathTypeValidator.PATH_PATTERN));
}
@Test
public void shouldErrorOutWhenDuplicateArtifactConfigExists() {
final ArtifactTypeConfigs artifactTypeConfigs = new ArtifactTypeConfigs(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.add(new BuildArtifactConfig("src", "dest"));
artifactTypeConfigs.validate(null);
assertFalse(artifactTypeConfigs.get(0).errors().isEmpty());
assertThat(artifactTypeConfigs.get(0).errors().on(BuiltinArtifactConfig.SRC), Matchers.is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(0).errors().on(BuiltinArtifactConfig.DEST), Matchers.is("Duplicate artifacts defined."));
assertFalse(artifactTypeConfigs.get(1).errors().isEmpty());
assertThat(artifactTypeConfigs.get(1).errors().on(BuiltinArtifactConfig.SRC), Matchers.is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(1).errors().on(BuiltinArtifactConfig.DEST), Matchers.is("Duplicate artifacts defined."));
assertFalse(artifactTypeConfigs.get(2).errors().isEmpty());
assertThat(artifactTypeConfigs.get(2).errors().on(BuiltinArtifactConfig.SRC), Matchers.is("Duplicate artifacts defined."));
assertThat(artifactTypeConfigs.get(2).errors().on(BuiltinArtifactConfig.DEST), Matchers.is("Duplicate artifacts defined."));
}
@Test
public void getArtifactConfigs_shouldReturnBuiltinArtifactConfigs() {
ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs();
allConfigs.add(new BuildArtifactConfig("src", "dest"));
allConfigs.add(new BuildArtifactConfig("java", null));
allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3"));
allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker"));
final List<BuiltinArtifactConfig> artifactConfigs = allConfigs.getBuiltInArtifactConfigs();
assertThat(artifactConfigs, hasSize(2));
assertThat(artifactConfigs, containsInAnyOrder(
new BuildArtifactConfig("src", "dest"),
new BuildArtifactConfig("java", null)
));
}
@Test
public void getPluggableArtifactConfigs_shouldReturnPluggableArtifactConfigs() {
ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs();
allConfigs.add(new BuildArtifactConfig("src", "dest"));
allConfigs.add(new BuildArtifactConfig("java", null));
allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3"));
allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker"));
final List<PluggableArtifactConfig> artifactConfigs = allConfigs.getPluggableArtifactConfigs();
assertThat(artifactConfigs, hasSize(2));
assertThat(artifactConfigs, containsInAnyOrder(
new PluggableArtifactConfig("s3", "cd.go.s3"),
new PluggableArtifactConfig("docker", "cd.go.docker")
));
}
@Test
public void findByArtifactId_shouldReturnPluggableArtifactConfigs() {
ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs();
allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3"));
allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker"));
final PluggableArtifactConfig s3 = allConfigs.findByArtifactId("s3");
assertThat(s3, is(new PluggableArtifactConfig("s3", "cd.go.s3")));
}
@Test
public void findByArtifactId_shouldReturnNullWhenPluggableArtifactConfigNotExistWithGivenId() {
ArtifactTypeConfigs allConfigs = new ArtifactTypeConfigs();
allConfigs.add(new PluggableArtifactConfig("s3", "cd.go.s3"));
allConfigs.add(new PluggableArtifactConfig("docker", "cd.go.docker"));
final PluggableArtifactConfig s3 = allConfigs.findByArtifactId("foo");
assertNull(s3);
}
}
| gocd/gocd | config/config-api/src/test/java/com/thoughtworks/go/config/ArtifactTypeConfigsTest.java | Java | apache-2.0 | 14,363 |
<?php
/*
* This file is part of the Doctrine\OrientDB package.
*
* (c) Alessandro Nadalin <alessandro.nadalin@gmail.com>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* Class Updates
*
* @package Doctrine\OrientDB
* @subpackage Formatter
* @author Alessandro Nadalin <alessandro.nadalin@gmail.com>
*/
namespace Doctrine\OrientDB\Query\Formatter\Query;
use Doctrine\OrientDB\Query\Formatter\Query;
class Updates extends Query implements TokenInterface
{
public static function format(array $values)
{
$string = "";
foreach ($values as $key => $value) {
if ($key = self::stripNonSQLCharacters($key)) {
if ($value === null) {
$value = 'NULL';
} else if (is_int($value) || is_float($value)) {
// Preserve content of $value as is
} else if (is_bool($value)) {
$value = $value ? 'TRUE' : 'FALSE';
} elseif(is_array($value)) {
$value = '[' . implode(',', $value) . ']';
} else {
$value = '"' . addslashes($value) . '"';
}
$string .= " $key = $value,";
}
}
return substr($string, 0, strlen($string) - 1);
}
}
| spartaksun/orientdb-query | src/Formatter/Query/Updates.php | PHP | apache-2.0 | 1,400 |
/*
* Copyright 2015-2021 The OpenZipkin Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/* eslint-disable no-shadow */
import { faPlus } from '@fortawesome/free-solid-svg-icons';
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome';
import {
Box,
Button,
Theme,
createStyles,
makeStyles,
} from '@material-ui/core';
import React, { useCallback, useEffect, useRef, useState } from 'react';
import { connect } from 'react-redux';
import { ThunkDispatch } from 'redux-thunk';
import Criterion, { newCriterion } from '../Criterion';
import CriterionBox from './CriterionBox';
import { loadAutocompleteValues } from '../../../slices/autocompleteValuesSlice';
import { loadRemoteServices } from '../../../slices/remoteServicesSlice';
import { loadSpans } from '../../../slices/spansSlice';
import { RootState } from '../../../store';
const useStyles = makeStyles((theme: Theme) =>
createStyles({
addButton: {
height: 40,
width: 40,
minWidth: 40,
color: theme.palette.common.white,
},
}),
);
type SearchBarProps = {
searchTraces: () => void;
criteria: Criterion[];
onChange: (criteria: Criterion[]) => void;
serviceNames: string[];
isLoadingServiceNames: boolean;
spanNames: string[];
isLoadingSpanNames: boolean;
remoteServiceNames: string[];
isLoadingRemoteServiceNames: boolean;
autocompleteKeys: string[];
autocompleteValues: string[];
isLoadingAutocompleteValues: boolean;
loadRemoteServices: (serviceName: string) => void;
loadSpans: (serviceName: string) => void;
loadAutocompleteValues: (autocompleteKey: string) => void;
};
export const SearchBarImpl: React.FC<SearchBarProps> = ({
searchTraces,
criteria,
onChange,
serviceNames,
isLoadingServiceNames,
spanNames,
isLoadingSpanNames,
remoteServiceNames,
isLoadingRemoteServiceNames,
autocompleteKeys,
autocompleteValues,
isLoadingAutocompleteValues,
loadRemoteServices,
loadSpans,
loadAutocompleteValues,
}) => {
const classes = useStyles();
// criterionIndex is the index of the criterion currently being edited.
// If the value is -1, there is no criterion being edited.
const [criterionIndex, setCriterionIndex] = useState(-1);
const handleCriterionFocus = (index: number) => {
setCriterionIndex(index);
};
const handleCriterionChange = (index: number, criterion: Criterion) => {
const newCriteria = [...criteria];
newCriteria[index] = criterion;
onChange(newCriteria);
};
const handleCriterionBlur = () => {
setCriterionIndex(-1);
};
const handleCriterionDelete = (index: number) => {
const newCriteria = criteria.filter((_, i) => i !== index);
onChange(newCriteria);
setCriterionIndex(-1);
};
const handleCriterionDecide = (index: number) => {
if (index === criteria.length - 1) {
const newCriteria = [...criteria];
newCriteria.push(newCriterion('', ''));
onChange(newCriteria);
const nextCriterionIndex = criteria.length;
setCriterionIndex(nextCriterionIndex);
} else {
setCriterionIndex(-1);
}
};
const handleAddButtonClick = useCallback(() => {
const newCriteria = [...criteria];
newCriteria.push(newCriterion('', ''));
onChange(newCriteria);
const nextCriterionIndex = criteria.length;
setCriterionIndex(nextCriterionIndex);
}, [criteria, onChange]);
const prevServiceName = useRef('');
useEffect(() => {
const criterion = criteria.find(
// eslint-disable-next-line no-shadow
(criterion) => criterion.key === 'serviceName',
);
const serviceName = criterion ? criterion.value : '';
if (serviceName !== prevServiceName.current) {
prevServiceName.current = serviceName;
loadSpans(serviceName);
loadRemoteServices(serviceName);
}
}, [criteria, loadSpans, loadRemoteServices]);
// Search for traces if not all criterions are in focus
// and the Enter key is pressed.
// Use ref to use the latest criterionIndex state in the callback.
const isFocusedRef = useRef(false);
isFocusedRef.current = criterionIndex !== -1;
const handleKeyDown = useCallback(
(event: KeyboardEvent) => {
// Use setTimeout to ensure that the callback is called
// after the criterionIndex has been updated.
setTimeout(() => {
if (!document.activeElement) {
return;
}
if (
!isFocusedRef.current &&
document.activeElement.tagName === 'BODY' &&
event.key === 'Enter'
) {
searchTraces();
}
}, 0); // Maybe 0 is enough.
},
[searchTraces],
);
useEffect(() => {
window.addEventListener('keydown', handleKeyDown);
return () => {
window.removeEventListener('keydown', handleKeyDown);
};
}, [handleKeyDown]);
return (
<Box
minHeight={60}
display="flex"
alignItems="center"
pr={2}
pl={2}
pt={1}
pb={1}
borderRadius={3}
bgcolor="background.paper"
flexWrap="wrap"
borderColor="grey.400"
border={1}
>
{criteria.map((criterion, index) => (
<CriterionBox
key={criterion.id}
criteria={criteria}
criterion={criterion}
criterionIndex={index}
serviceNames={serviceNames}
remoteServiceNames={remoteServiceNames}
spanNames={spanNames}
autocompleteKeys={autocompleteKeys}
autocompleteValues={autocompleteValues}
isLoadingServiceNames={isLoadingServiceNames}
isLoadingRemoteServiceNames={isLoadingRemoteServiceNames}
isLoadingSpanNames={isLoadingSpanNames}
isLoadingAutocompleteValues={isLoadingAutocompleteValues}
isFocused={index === criterionIndex}
onFocus={handleCriterionFocus}
onBlur={handleCriterionBlur}
onDecide={handleCriterionDecide}
onChange={handleCriterionChange}
onDelete={handleCriterionDelete}
loadAutocompleteValues={loadAutocompleteValues}
/>
))}
<Button
color="secondary"
variant="contained"
onClick={handleAddButtonClick}
className={classes.addButton}
data-testid="add-button"
>
<FontAwesomeIcon icon={faPlus} size="lg" />
</Button>
</Box>
);
};
// For unit testing, `connect` is easier to use than
// useSelector or useDispatch hooks.
const mapStateToProps = (state: RootState) => ({
serviceNames: state.services.services,
isLoadingServiceNames: state.services.isLoading,
spanNames: state.spans.spans,
isLoadingSpanNames: state.spans.isLoading,
remoteServiceNames: state.remoteServices.remoteServices,
isLoadingRemoteServiceNames: state.remoteServices.isLoading,
autocompleteKeys: state.autocompleteKeys.autocompleteKeys,
autocompleteValues: state.autocompleteValues.autocompleteValues,
isLoadingAutocompleteValues: state.autocompleteValues.isLoading,
});
// TODO: Give the appropriate type to ThunkDispatch after TypeScriptizing all action creators.
const mapDispatchToProps = (
dispatch: ThunkDispatch<RootState, undefined, any>,
) => ({
loadRemoteServices: (serviceName: string) => {
dispatch(loadRemoteServices(serviceName));
},
loadSpans: (serviceName: string) => {
dispatch(loadSpans(serviceName));
},
loadAutocompleteValues: (autocompleteKey: string) => {
dispatch(loadAutocompleteValues(autocompleteKey));
},
});
export default connect(mapStateToProps, mapDispatchToProps)(SearchBarImpl);
| openzipkin/zipkin | zipkin-lens/src/components/DiscoverPage/SearchBar/SearchBar.tsx | TypeScript | apache-2.0 | 8,051 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.action;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction;
import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Request;
import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Response;
import org.elasticsearch.xpack.core.ml.job.messages.Messages;
import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper;
import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
public class TransportGetTrainedModelsAction extends HandledTransportAction<Request, Response> {
private final TrainedModelProvider provider;
@Inject
public TransportGetTrainedModelsAction(TransportService transportService,
ActionFilters actionFilters,
TrainedModelProvider trainedModelProvider) {
super(GetTrainedModelsAction.NAME, transportService, actionFilters, GetTrainedModelsAction.Request::new);
this.provider = trainedModelProvider;
}
@Override
protected void doExecute(Task task, Request request, ActionListener<Response> listener) {
Response.Builder responseBuilder = Response.builder();
ActionListener<Tuple<Long, Set<String>>> idExpansionListener = ActionListener.wrap(
totalAndIds -> {
responseBuilder.setTotalCount(totalAndIds.v1());
if (totalAndIds.v2().isEmpty()) {
listener.onResponse(responseBuilder.build());
return;
}
if (request.getIncludes().isIncludeModelDefinition() && totalAndIds.v2().size() > 1) {
listener.onFailure(
ExceptionsHelper.badRequestException(Messages.INFERENCE_TOO_MANY_DEFINITIONS_REQUESTED)
);
return;
}
if (request.getIncludes().isIncludeModelDefinition()) {
provider.getTrainedModel(
totalAndIds.v2().iterator().next(),
request.getIncludes(),
ActionListener.wrap(
config -> listener.onResponse(responseBuilder.setModels(Collections.singletonList(config)).build()),
listener::onFailure
)
);
} else {
provider.getTrainedModels(
totalAndIds.v2(),
request.getIncludes(),
request.isAllowNoResources(),
ActionListener.wrap(
configs -> listener.onResponse(responseBuilder.setModels(configs).build()),
listener::onFailure
)
);
}
},
listener::onFailure
);
provider.expandIds(request.getResourceId(),
request.isAllowNoResources(),
request.getPageParams(),
new HashSet<>(request.getTags()),
idExpansionListener);
}
}
| nknize/elasticsearch | x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java | Java | apache-2.0 | 3,814 |
// Copyright 2004, 2005, 2006 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.ioc.test.internal.services;
public interface MiddleFilter
{
public void execute(int count, char ch, MiddleService service, StringBuilder buffer);
}
| apache/tapestry-5 | tapestry-ioc/src/test/java/org/apache/tapestry5/ioc/test/internal/services/MiddleFilter.java | Java | apache-2.0 | 802 |
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Shell commands module
module Shell
@@commands = {}
def self.commands
@@commands
end
@@command_groups = {}
def self.command_groups
@@command_groups
end
def self.load_command(name, group, aliases = [])
return if commands[name]
# Register command in the group
raise ArgumentError, "Unknown group: #{group}" unless command_groups[group]
command_groups[group][:commands] << name
# Load command
begin
require "shell/commands/#{name}"
klass_name = name.to_s.gsub(/(?:^|_)(.)/) { Regexp.last_match(1).upcase } # camelize
commands[name] = eval("Commands::#{klass_name}")
aliases.each do |an_alias|
commands[an_alias] = commands[name]
end
rescue => e
raise "Can't load hbase shell command: #{name}. Error: #{e}\n#{e.backtrace.join("\n")}"
end
end
def self.load_command_group(group, opts)
raise ArgumentError, "No :commands for group #{group}" unless opts[:commands]
command_groups[group] = {
commands: [],
command_names: opts[:commands],
full_name: opts[:full_name] || group,
comment: opts[:comment]
}
all_aliases = opts[:aliases] || {}
opts[:commands].each do |command|
aliases = all_aliases[command] || []
load_command(command, group, aliases)
end
end
#----------------------------------------------------------------------
class Shell
attr_accessor :hbase
attr_accessor :interactive
alias interactive? interactive
@debug = false
attr_accessor :debug
def initialize(hbase, interactive = true)
self.hbase = hbase
self.interactive = interactive
end
# Returns Admin class from admin.rb
def admin
@admin ||= hbase.admin
end
def hbase_taskmonitor
@hbase_taskmonitor ||= hbase.taskmonitor
end
def hbase_table(name)
hbase.table(name, self)
end
def hbase_replication_admin
@hbase_replication_admin ||= hbase.replication_admin
end
def hbase_security_admin
@hbase_security_admin ||= hbase.security_admin
end
def hbase_visibility_labels_admin
@hbase_visibility_labels_admin ||= hbase.visibility_labels_admin
end
def hbase_quotas_admin
@hbase_quotas_admin ||= hbase.quotas_admin
end
def hbase_rsgroup_admin
@rsgroup_admin ||= hbase.rsgroup_admin
end
def export_commands(where)
::Shell.commands.keys.each do |cmd|
# here where is the IRB namespace
# this method just adds the call to the specified command
# which just references back to 'this' shell object
# a decently extensible way to add commands
where.send :instance_eval, <<-EOF
def #{cmd}(*args)
ret = @shell.command('#{cmd}', *args)
puts
return ret
end
EOF
end
end
def command_instance(command)
::Shell.commands[command.to_s].new(self)
end
# call the method 'command' on the specified command
# If interactive is enabled, then we suppress the return value. The command should have
# printed relevant output.
# Return value is only useful in non-interactive mode, for e.g. tests.
def command(command, *args)
ret = internal_command(command, :command, *args)
if interactive
return nil
else
return ret
end
end
# call a specific internal method in the command instance
# command - name of the command to call
# method_name - name of the method on the command to call. Defaults to just 'command'
# args - to be passed to the named method
def internal_command(command, method_name = :command, *args)
command_instance(command).command_safe(debug, method_name, *args)
end
def print_banner
puts 'HBase Shell'
puts 'Use "help" to get list of supported commands.'
puts 'Use "exit" to quit this interactive shell.'
print 'Version '
command('version')
puts
end
def help_multi_command(command)
puts "Command: #{command}"
puts command_instance(command).help
puts
nil
end
def help_command(command)
puts command_instance(command).help
nil
end
def help_group(group_name)
group = ::Shell.command_groups[group_name.to_s]
group[:commands].sort.each { |cmd| help_multi_command(cmd) }
if group[:comment]
puts '-' * 80
puts
puts group[:comment]
puts
end
nil
end
def help(command = nil)
if command
return help_command(command) if ::Shell.commands[command.to_s]
return help_group(command) if ::Shell.command_groups[command.to_s]
puts "ERROR: Invalid command or command group name: #{command}"
puts
end
puts help_header
puts
puts 'COMMAND GROUPS:'
::Shell.command_groups.each do |name, group|
puts ' Group name: ' + name
puts ' Commands: ' + group[:command_names].sort.join(', ')
puts
end
unless command
puts 'SHELL USAGE:'
help_footer
end
nil
end
def help_header
"HBase Shell, version #{org.apache.hadoop.hbase.util.VersionInfo.getVersion}, " \
"r#{org.apache.hadoop.hbase.util.VersionInfo.getRevision}, " \
"#{org.apache.hadoop.hbase.util.VersionInfo.getDate}" + "\n" \
"Type 'help \"COMMAND\"', (e.g. 'help \"get\"' -- the quotes are necessary) for help on a specific command.\n" \
"Commands are grouped. Type 'help \"COMMAND_GROUP\"', (e.g. 'help \"general\"') for help on a command group."
end
def help_footer
puts <<-HERE
Quote all names in HBase Shell such as table and column names. Commas delimit
command parameters. Type <RETURN> after entering a command to run it.
Dictionaries of configuration used in the creation and alteration of tables are
Ruby Hashes. They look like this:
{'key1' => 'value1', 'key2' => 'value2', ...}
and are opened and closed with curley-braces. Key/values are delimited by the
'=>' character combination. Usually keys are predefined constants such as
NAME, VERSIONS, COMPRESSION, etc. Constants do not need to be quoted. Type
'Object.constants' to see a (messy) list of all constants in the environment.
If you are using binary keys or values and need to enter them in the shell, use
double-quote'd hexadecimal representation. For example:
hbase> get 't1', "key\\x03\\x3f\\xcd"
hbase> get 't1', "key\\003\\023\\011"
hbase> put 't1', "test\\xef\\xff", 'f1:', "\\x01\\x33\\x40"
The HBase shell is the (J)Ruby IRB with the above HBase-specific commands added.
For more on the HBase Shell, see http://hbase.apache.org/book.html
HERE
end
end
end
# Load commands base class
require 'shell/commands'
# Load all commands
Shell.load_command_group(
'general',
full_name: 'GENERAL HBASE SHELL COMMANDS',
commands: %w[
status
version
table_help
whoami
processlist
]
)
Shell.load_command_group(
'ddl',
full_name: 'TABLES MANAGEMENT COMMANDS',
commands: %w[
alter
create
describe
disable
disable_all
is_disabled
drop
drop_all
enable
enable_all
is_enabled
exists
list
show_filters
alter_status
alter_async
get_table
locate_region
list_regions
],
aliases: {
'describe' => ['desc']
}
)
Shell.load_command_group(
'namespace',
full_name: 'NAMESPACE MANAGEMENT COMMANDS',
commands: %w[
create_namespace
drop_namespace
alter_namespace
describe_namespace
list_namespace
list_namespace_tables
]
)
Shell.load_command_group(
'dml',
full_name: 'DATA MANIPULATION COMMANDS',
commands: %w[
count
delete
deleteall
get
get_counter
incr
put
scan
truncate
truncate_preserve
append
get_splits
]
)
Shell.load_command_group(
'tools',
full_name: 'HBASE SURGERY TOOLS',
comment: "WARNING: Above commands are for 'experts'-only as misuse can damage an install",
commands: %w[
assign
balancer
balance_switch
balancer_enabled
normalize
normalizer_switch
normalizer_enabled
close_region
compact
flush
major_compact
move
split
merge_region
unassign
zk_dump
wal_roll
catalogjanitor_run
catalogjanitor_switch
catalogjanitor_enabled
cleaner_chore_run
cleaner_chore_switch
cleaner_chore_enabled
compact_rs
compaction_state
trace
splitormerge_switch
splitormerge_enabled
clear_compaction_queues
list_deadservers
clear_deadservers
],
# TODO: remove older hlog_roll command
aliases: {
'wal_roll' => ['hlog_roll']
}
)
Shell.load_command_group(
'replication',
full_name: 'CLUSTER REPLICATION TOOLS',
commands: %w[
add_peer
remove_peer
list_peers
enable_peer
disable_peer
set_peer_namespaces
append_peer_namespaces
remove_peer_namespaces
show_peer_tableCFs
set_peer_tableCFs
set_peer_bandwidth
list_replicated_tables
append_peer_tableCFs
remove_peer_tableCFs
enable_table_replication
disable_table_replication
get_peer_config
list_peer_configs
update_peer_config
]
)
Shell.load_command_group(
'snapshots',
full_name: 'CLUSTER SNAPSHOT TOOLS',
commands: %w[
snapshot
clone_snapshot
restore_snapshot
delete_snapshot
delete_all_snapshot
delete_table_snapshots
list_snapshots
list_table_snapshots
]
)
Shell.load_command_group(
'configuration',
full_name: 'ONLINE CONFIGURATION TOOLS',
commands: %w[
update_config
update_all_config
]
)
Shell.load_command_group(
'quotas',
full_name: 'CLUSTER QUOTAS TOOLS',
commands: %w[
set_quota
list_quotas
list_quota_table_sizes
list_quota_snapshots
list_snapshot_sizes
]
)
Shell.load_command_group(
'security',
full_name: 'SECURITY TOOLS',
comment: 'NOTE: Above commands are only applicable if running with the AccessController coprocessor',
commands: %w[
list_security_capabilities
grant
revoke
user_permission
]
)
Shell.load_command_group(
'procedures',
full_name: 'PROCEDURES & LOCKS MANAGEMENT',
commands: %w[
abort_procedure
list_procedures
list_locks
]
)
Shell.load_command_group(
'visibility labels',
full_name: 'VISIBILITY LABEL TOOLS',
comment: 'NOTE: Above commands are only applicable if running with the VisibilityController coprocessor',
commands: %w[
add_labels
list_labels
set_auths
get_auths
clear_auths
set_visibility
]
)
Shell.load_command_group(
'rsgroup',
full_name: 'RSGroups',
comment: "NOTE: The rsgroup Coprocessor Endpoint must be enabled on the Master else commands fail with:
UnknownProtocolException: No registered Master Coprocessor Endpoint found for RSGroupAdminService",
commands: %w[
list_rsgroups
get_rsgroup
add_rsgroup
remove_rsgroup
balance_rsgroup
move_servers_rsgroup
move_tables_rsgroup
move_servers_tables_rsgroup
get_server_rsgroup
get_table_rsgroup
]
)
| JingchengDu/hbase | hbase-shell/src/main/ruby/shell.rb | Ruby | apache-2.0 | 11,986 |
package io.cattle.platform.process.agent;
import io.cattle.platform.agent.util.AgentUtils;
import io.cattle.platform.core.constants.StoragePoolConstants;
import io.cattle.platform.core.model.Account;
import io.cattle.platform.core.model.Agent;
import io.cattle.platform.core.model.StoragePool;
import io.cattle.platform.engine.handler.HandlerResult;
import io.cattle.platform.engine.process.ProcessInstance;
import io.cattle.platform.engine.process.ProcessState;
import io.cattle.platform.process.common.handler.AbstractObjectProcessHandler;
import io.github.ibuildthecloud.gdapi.factory.SchemaFactory;
import javax.inject.Inject;
import javax.inject.Named;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Named
public class AgentRemove extends AbstractObjectProcessHandler {
private static final Logger log = LoggerFactory.getLogger(AgentRemove.class);
@Inject
@Named("CoreSchemaFactory")
SchemaFactory schemaFactory;
@Override
public String[] getProcessNames() {
return new String[] {"agent.remove"};
}
@Override
public HandlerResult handle(ProcessState state, ProcessInstance process) {
Agent agent = (Agent)state.getResource();
for (String type : AgentUtils.AGENT_RESOURCES.get()) {
Class<?> clz = schemaFactory.getSchemaClass(type);
if (clz == null) {
log.error("Failed to find class for [{}]", type);
continue;
}
for (Object obj : objectManager.children(agent, clz)) {
if (obj instanceof StoragePool) {
StoragePool sp = (StoragePool)obj;
if (StoragePoolConstants.TYPE.equals(sp.getKind())) {
// Don't automatically delete shared storage pools
continue;
}
}
deactivateThenScheduleRemove(obj, state.getData());
}
}
deactivateThenScheduleRemove(objectManager.loadResource(Account.class, agent.getAccountId()), state.getData());
return null;
}
}
| wlan0/cattle | code/iaas/logic/src/main/java/io/cattle/platform/process/agent/AgentRemove.java | Java | apache-2.0 | 2,106 |
/**
*
*/
package org.hamster.weixinmp.model.menu;
import java.util.List;
import org.hamster.weixinmp.dao.entity.menu.WxMenuBtnEntity;
/**
* @author honey.zhao@aliyun.com
* @version Aug 4, 2013
*
*/
public class WxMenuCreateJson {
private List<WxMenuBtnEntity> button;
public List<WxMenuBtnEntity> getButton() {
return button;
}
public void setButton(List<WxMenuBtnEntity> button) {
this.button = button;
}
public WxMenuCreateJson() {
super();
// TODO Auto-generated constructor stub
}
public WxMenuCreateJson(List<WxMenuBtnEntity> button) {
super();
this.button = button;
}
}
| Wingo7239/WeixinMultiPlatform | src/main/java/org/hamster/weixinmp/model/menu/WxMenuCreateJson.java | Java | apache-2.0 | 617 |
#
# actions.py: routines that actually run the svn client.
#
# Subversion is a tool for revision control.
# See http://subversion.tigris.org for more information.
#
# ====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
######################################################################
import os, shutil, re, sys, errno
import difflib, pprint
import xml.parsers.expat
from xml.dom.minidom import parseString
import svntest
from svntest import main, verify, tree, wc
from svntest import Failure
def no_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'yes'
def do_sleep_for_timestamps():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS'] = 'no'
def no_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'yes'
def do_relocate_validation():
os.environ['SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_RELOCATE_VALIDATION'] = 'no'
def setup_pristine_greek_repository():
"""Create the pristine repository and 'svn import' the greek tree"""
# these directories don't exist out of the box, so we may have to create them
if not os.path.exists(main.general_wc_dir):
os.makedirs(main.general_wc_dir)
if not os.path.exists(main.general_repo_dir):
os.makedirs(main.general_repo_dir) # this also creates all the intermediate dirs
# If there's no pristine repos, create one.
if not os.path.exists(main.pristine_greek_repos_dir):
main.create_repos(main.pristine_greek_repos_dir)
# if this is dav, gives us access rights to import the greek tree.
if main.is_ra_type_dav():
authz_file = os.path.join(main.work_dir, "authz")
main.file_write(authz_file, "[/]\n* = rw\n")
# dump the greek tree to disk.
main.greek_state.write_to_disk(main.greek_dump_dir)
# import the greek tree, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'import', '-m',
'Log message for revision 1.',
main.greek_dump_dir,
main.pristine_greek_repos_url)
# check for any errors from the import
if len(errput):
display_lines("Errors during initial 'svn import':",
'STDERR', None, errput)
sys.exit(1)
# verify the printed output of 'svn import'.
lastline = output.pop().strip()
match = re.search("(Committed|Imported) revision [0-9]+.", lastline)
if not match:
print("ERROR: import did not succeed, while creating greek repos.")
print("The final line from 'svn import' was:")
print(lastline)
sys.exit(1)
output_tree = wc.State.from_commit(output)
expected_output_tree = main.greek_state.copy(main.greek_dump_dir)
expected_output_tree.tweak(verb='Adding',
contents=None)
try:
expected_output_tree.compare_and_display('output', output_tree)
except tree.SVNTreeUnequal:
verify.display_trees("ERROR: output of import command is unexpected.",
"OUTPUT TREE",
expected_output_tree.old_tree(),
output_tree.old_tree())
sys.exit(1)
# Finally, disallow any changes to the "pristine" repos.
error_msg = "Don't modify the pristine repository"
create_failing_hook(main.pristine_greek_repos_dir, 'start-commit', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-lock', error_msg)
create_failing_hook(main.pristine_greek_repos_dir, 'pre-revprop-change', error_msg)
######################################################################
def guarantee_empty_repository(path):
"""Guarantee that a local svn repository exists at PATH, containing
nothing."""
if path == main.pristine_greek_repos_dir:
print("ERROR: attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# create an empty repository at PATH.
main.safe_rmtree(path)
main.create_repos(path)
# Used by every test, so that they can run independently of one
# another. Every time this routine is called, it recursively copies
# the `pristine repos' to a new location.
# Note: make sure setup_pristine_greek_repository was called once before
# using this function.
def guarantee_greek_repository(path):
"""Guarantee that a local svn repository exists at PATH, containing
nothing but the greek-tree at revision 1."""
if path == main.pristine_greek_repos_dir:
print("ERROR: attempt to overwrite the pristine repos! Aborting.")
sys.exit(1)
# copy the pristine repository to PATH.
main.safe_rmtree(path)
if main.copy_repos(main.pristine_greek_repos_dir, path, 1):
print("ERROR: copying repository failed.")
sys.exit(1)
# make the repos world-writeable, for mod_dav_svn's sake.
main.chmod_tree(path, 0666, 0666)
def run_and_verify_atomic_ra_revprop_change(message,
expected_stdout,
expected_stderr,
expected_exit,
url, revision, propname,
old_propval, propval,
want_error):
"""Run atomic-ra-revprop-change helper and check its output and exit code.
Transforms OLD_PROPVAL and PROPVAL into a skel.
For HTTP, the default HTTP library is used."""
KEY_OLD_PROPVAL = "old_value_p"
KEY_NEW_PROPVAL = "value"
def skel_make_atom(word):
return "%d %s" % (len(word), word)
def make_proplist_skel_part(nick, val):
if val is None:
return ""
else:
return "%s %s" % (skel_make_atom(nick), skel_make_atom(val))
skel = "( %s %s )" % (make_proplist_skel_part(KEY_OLD_PROPVAL, old_propval),
make_proplist_skel_part(KEY_NEW_PROPVAL, propval))
exit_code, out, err = main.run_atomic_ra_revprop_change(url, revision,
propname, skel,
want_error)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnlook(message, expected_stdout,
expected_stderr, *varargs):
"""Like run_and_verify_svnlook2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnlook2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnlook command and check its output and exit code."""
exit_code, out, err = main.run_svnlook(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnadmin(message, expected_stdout,
expected_stderr, *varargs):
"""Like run_and_verify_svnadmin2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnadmin2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnadmin2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnadmin command and check its output and exit code."""
exit_code, out, err = main.run_svnadmin(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svnversion(message, wc_dir, trail_url,
expected_stdout, expected_stderr, *varargs):
"""like run_and_verify_svnversion2, but the expected exit code is
assumed to be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnversion2(message, wc_dir, trail_url,
expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnversion2(message, wc_dir, trail_url,
expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnversion command and check its output and exit code."""
if trail_url is None:
exit_code, out, err = main.run_svnversion(wc_dir, *varargs)
else:
exit_code, out, err = main.run_svnversion(wc_dir, trail_url, *varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_svn(message, expected_stdout, expected_stderr, *varargs):
"""like run_and_verify_svn2, but the expected exit code is assumed to
be 0 if no output is expected on stderr, and 1 otherwise."""
expected_exit = 0
if expected_stderr is not None:
if isinstance(expected_stderr, verify.ExpectedOutput):
if not expected_stderr.matches([]):
expected_exit = 1
elif expected_stderr != []:
expected_exit = 1
return run_and_verify_svn2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svn2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Invoke main.run_svn() with *VARARGS. Return exit code as int; stdout,
stderr as lists of lines (including line terminators). For both
EXPECTED_STDOUT and EXPECTED_STDERR, create an appropriate instance of
verify.ExpectedOutput (if necessary):
- If it is an array of strings, create a vanilla ExpectedOutput.
- If it is a single string, create a RegexOutput that must match every
line (for stdout) or any line (for stderr) of the expected output.
- If it is already an instance of ExpectedOutput
(e.g. UnorderedOutput), leave it alone.
...and invoke compare_and_display_lines() on MESSAGE, a label based
on the name of the stream being compared (e.g. STDOUT), the
ExpectedOutput instance, and the actual output.
If EXPECTED_STDOUT is None, do not check stdout.
EXPECTED_STDERR may not be None.
If output checks pass, the expected and actual codes are compared.
If a comparison fails, a Failure will be raised."""
if expected_stderr is None:
raise verify.SVNIncorrectDatatype("expected_stderr must not be None")
want_err = None
if isinstance(expected_stderr, verify.ExpectedOutput):
if not expected_stderr.matches([]):
want_err = True
elif expected_stderr != []:
want_err = True
exit_code, out, err = main.run_svn(want_err, *varargs)
verify.verify_outputs(message, out, err, expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def run_and_verify_load(repo_dir, dump_file_content,
bypass_prop_validation = False):
"Runs 'svnadmin load' and reports any errors."
if not isinstance(dump_file_content, list):
raise TypeError("dump_file_content argument should have list type")
expected_stderr = []
if bypass_prop_validation:
exit_code, output, errput = main.run_command_stdin(
main.svnadmin_binary, expected_stderr, 0, 1, dump_file_content,
'load', '--force-uuid', '--quiet', '--bypass-prop-validation', repo_dir)
else:
exit_code, output, errput = main.run_command_stdin(
main.svnadmin_binary, expected_stderr, 0, 1, dump_file_content,
'load', '--force-uuid', '--quiet', repo_dir)
verify.verify_outputs("Unexpected stderr output", None, errput,
None, expected_stderr)
def run_and_verify_dump(repo_dir, deltas=False):
"Runs 'svnadmin dump' and reports any errors, returning the dump content."
if deltas:
exit_code, output, errput = main.run_svnadmin('dump', '--deltas',
repo_dir)
else:
exit_code, output, errput = main.run_svnadmin('dump', repo_dir)
verify.verify_outputs("Missing expected output(s)", output, errput,
verify.AnyOutput, verify.AnyOutput)
return output
def run_and_verify_svnrdump(dumpfile_content, expected_stdout,
expected_stderr, expected_exit, *varargs):
"""Runs 'svnrdump dump|load' depending on dumpfile_content and
reports any errors."""
exit_code, output, err = main.run_svnrdump(dumpfile_content, *varargs)
# Since main.run_svnrdump() uses binary mode, normalize the stderr
# line endings on Windows ourselves.
if sys.platform == 'win32':
err = map(lambda x : x.replace('\r\n', '\n'), err)
for index, line in enumerate(err[:]):
if re.search("warning: W200007", line):
del err[index]
verify.verify_outputs("Unexpected output", output, err,
expected_stdout, expected_stderr)
verify.verify_exit_code("Unexpected return code", exit_code, expected_exit)
return output
def run_and_verify_svnmucc(message, expected_stdout, expected_stderr,
*varargs):
"""Run svnmucc command and check its output"""
expected_exit = 0
if expected_stderr is not None and expected_stderr != []:
expected_exit = 1
return run_and_verify_svnmucc2(message, expected_stdout, expected_stderr,
expected_exit, *varargs)
def run_and_verify_svnmucc2(message, expected_stdout, expected_stderr,
expected_exit, *varargs):
"""Run svnmucc command and check its output and exit code."""
exit_code, out, err = main.run_svnmucc(*varargs)
verify.verify_outputs("Unexpected output", out, err,
expected_stdout, expected_stderr)
verify.verify_exit_code(message, exit_code, expected_exit)
return exit_code, out, err
def load_repo(sbox, dumpfile_path = None, dump_str = None,
bypass_prop_validation = False):
"Loads the dumpfile into sbox"
if not dump_str:
dump_str = open(dumpfile_path, "rb").read()
# Create a virgin repos and working copy
main.safe_rmtree(sbox.repo_dir, 1)
main.safe_rmtree(sbox.wc_dir, 1)
main.create_repos(sbox.repo_dir)
# Load the mergetracking dumpfile into the repos, and check it out the repo
run_and_verify_load(sbox.repo_dir, dump_str.splitlines(True),
bypass_prop_validation)
run_and_verify_svn(None, None, [], "co", sbox.repo_url, sbox.wc_dir)
return dump_str
def expected_noop_update_output(rev):
"""Return an ExpectedOutput object describing what we'd expect to
see from an update to revision REV that was effectively a no-op (no
server changes transmitted)."""
return verify.createExpectedOutput("Updating '.*':|At revision %d."
% (rev),
"no-op update")
######################################################################
# Subversion Actions
#
# These are all routines that invoke 'svn' in particular ways, and
# then verify the results by comparing expected trees with actual
# trees.
#
def run_and_verify_checkout2(do_remove,
URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
*args):
"""Checkout the URL into a new directory WC_DIR_NAME. *ARGS are any
extra optional args to the checkout subcommand.
The subcommand output will be verified against OUTPUT_TREE,
and the working copy itself will be verified against DISK_TREE.
For the latter comparison, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details. Return if successful, raise
on failure.
WC_DIR_NAME is deleted if DO_REMOVE is True.
"""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(disk_tree, wc.State):
disk_tree = disk_tree.old_tree()
# Remove dir if it's already there, unless this is a forced checkout.
# In that case assume we want to test a forced checkout's toleration
# of obstructing paths.
if do_remove:
main.safe_rmtree(wc_dir_name)
# Checkout and make a tree of the output, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'co',
URL, wc_dir_name, *args)
actual = tree.build_tree_from_checkout(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
# Create a tree by scanning the working copy
actual = tree.build_tree_from_wc(wc_dir_name)
# Verify expected disk against actual disk.
try:
tree.compare_trees("disk", actual, disk_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeUnequal:
print("ACTUAL DISK TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
def run_and_verify_checkout(URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
*args):
"""Same as run_and_verify_checkout2(), but without the DO_REMOVE arg.
WC_DIR_NAME is deleted if present unless the '--force' option is passed
in *ARGS."""
# Remove dir if it's already there, unless this is a forced checkout.
# In that case assume we want to test a forced checkout's toleration
# of obstructing paths.
return run_and_verify_checkout2(('--force' not in args),
URL, wc_dir_name, output_tree, disk_tree,
singleton_handler_a,
a_baton,
singleton_handler_b,
b_baton,
*args)
def run_and_verify_export(URL, export_dir_name, output_tree, disk_tree,
*args):
"""Export the URL into a new directory WC_DIR_NAME.
The subcommand output will be verified against OUTPUT_TREE,
and the exported copy itself will be verified against DISK_TREE.
Return if successful, raise on failure.
"""
assert isinstance(output_tree, wc.State)
assert isinstance(disk_tree, wc.State)
disk_tree = disk_tree.old_tree()
output_tree = output_tree.old_tree()
# Export and make a tree of the output, using l:foo/p:bar
### todo: svn should not be prompting for auth info when using
### repositories with no auth/auth requirements
exit_code, output, errput = main.run_svn(None, 'export',
URL, export_dir_name, *args)
actual = tree.build_tree_from_checkout(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual, export_dir_name + os.sep)
raise
# Create a tree by scanning the working copy. Don't ignore
# the .svn directories so that we generate an error if they
# happen to show up.
actual = tree.build_tree_from_wc(export_dir_name, ignore_svn=False)
# Verify expected disk against actual disk.
try:
tree.compare_trees("disk", actual, disk_tree)
except tree.SVNTreeUnequal:
print("ACTUAL DISK TREE:")
tree.dump_tree_script(actual, export_dir_name + os.sep)
raise
# run_and_verify_log_xml
class LogEntry:
def __init__(self, revision, changed_paths=None, revprops=None):
self.revision = revision
if changed_paths == None:
self.changed_paths = {}
else:
self.changed_paths = changed_paths
if revprops == None:
self.revprops = {}
else:
self.revprops = revprops
def assert_changed_paths(self, changed_paths):
"""Assert that changed_paths is the same as this entry's changed_paths
Raises svntest.Failure if not.
"""
raise Failure('NOT IMPLEMENTED')
def assert_revprops(self, revprops):
"""Assert that the dict revprops is the same as this entry's revprops.
Raises svntest.Failure if not.
"""
if self.revprops != revprops:
raise Failure('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(revprops).splitlines(),
pprint.pformat(self.revprops).splitlines())))
class LogParser:
def parse(self, data):
"""Return a list of LogEntrys parsed from the sequence of strings data.
This is the only method of interest to callers.
"""
try:
for i in data:
self.parser.Parse(i)
self.parser.Parse('', True)
except xml.parsers.expat.ExpatError, e:
raise verify.SVNUnexpectedStdout('%s\n%s\n' % (e, ''.join(data),))
return self.entries
def __init__(self):
# for expat
self.parser = xml.parsers.expat.ParserCreate()
self.parser.StartElementHandler = self.handle_start_element
self.parser.EndElementHandler = self.handle_end_element
self.parser.CharacterDataHandler = self.handle_character_data
# Ignore some things.
self.ignore_elements('log', 'paths', 'path', 'revprops')
self.ignore_tags('logentry_end', 'author_start', 'date_start', 'msg_start')
# internal state
self.cdata = []
self.property = None
# the result
self.entries = []
def ignore(self, *args, **kwargs):
del self.cdata[:]
def ignore_tags(self, *args):
for tag in args:
setattr(self, tag, self.ignore)
def ignore_elements(self, *args):
for element in args:
self.ignore_tags(element + '_start', element + '_end')
# expat handlers
def handle_start_element(self, name, attrs):
getattr(self, name + '_start')(attrs)
def handle_end_element(self, name):
getattr(self, name + '_end')()
def handle_character_data(self, data):
self.cdata.append(data)
# element handler utilities
def use_cdata(self):
result = ''.join(self.cdata).strip()
del self.cdata[:]
return result
def svn_prop(self, name):
self.entries[-1].revprops['svn:' + name] = self.use_cdata()
# element handlers
def logentry_start(self, attrs):
self.entries.append(LogEntry(int(attrs['revision'])))
def author_end(self):
self.svn_prop('author')
def msg_end(self):
self.svn_prop('log')
def date_end(self):
# svn:date could be anything, so just note its presence.
self.cdata[:] = ['']
self.svn_prop('date')
def property_start(self, attrs):
self.property = attrs['name']
def property_end(self):
self.entries[-1].revprops[self.property] = self.use_cdata()
def run_and_verify_log_xml(message=None, expected_paths=None,
expected_revprops=None, expected_stdout=None,
expected_stderr=None, args=[]):
"""Call run_and_verify_svn with log --xml and args (optional) as command
arguments, and pass along message, expected_stdout, and expected_stderr.
If message is None, pass the svn log command as message.
expected_paths checking is not yet implemented.
expected_revprops is an optional list of dicts, compared to each
revision's revprops. The list must be in the same order the log entries
come in. Any svn:date revprops in the dicts must be '' in order to
match, as the actual dates could be anything.
expected_paths and expected_revprops are ignored if expected_stdout or
expected_stderr is specified.
"""
if message == None:
message = ' '.join(args)
# We'll parse the output unless the caller specifies expected_stderr or
# expected_stdout for run_and_verify_svn.
parse = True
if expected_stderr == None:
expected_stderr = []
else:
parse = False
if expected_stdout != None:
parse = False
log_args = list(args)
if expected_paths != None:
log_args.append('-v')
(exit_code, stdout, stderr) = run_and_verify_svn(
message, expected_stdout, expected_stderr,
'log', '--xml', *log_args)
if not parse:
return
entries = LogParser().parse(stdout)
for index in range(len(entries)):
entry = entries[index]
if expected_revprops != None:
entry.assert_revprops(expected_revprops[index])
if expected_paths != None:
entry.assert_changed_paths(expected_paths[index])
def verify_update(actual_output,
actual_mergeinfo_output,
actual_elision_output,
wc_dir_name,
output_tree,
mergeinfo_output_tree,
elision_output_tree,
disk_tree,
status_tree,
singleton_handler_a=None,
a_baton=None,
singleton_handler_b=None,
b_baton=None,
check_props=False):
"""Verify update of WC_DIR_NAME.
The subcommand output (found in ACTUAL_OUTPUT, ACTUAL_MERGEINFO_OUTPUT,
and ACTUAL_ELISION_OUTPUT) will be verified against OUTPUT_TREE,
MERGEINFO_OUTPUT_TREE, and ELISION_OUTPUT_TREE respectively (if any of
these is provided, they may be None in which case a comparison is not
done). The working copy itself will be verified against DISK_TREE (if
provided), and the working copy's 'svn status' output will be verified
against STATUS_TREE (if provided). (This is a good way to check that
revision numbers were bumped.)
Return if successful, raise on failure.
For the comparison with DISK_TREE, pass SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B to tree.compare_trees -- see that function's doc
string for more details. If CHECK_PROPS is set, then disk
comparison will examine props."""
if isinstance(actual_output, wc.State):
actual_output = actual_output.old_tree()
if isinstance(actual_mergeinfo_output, wc.State):
actual_mergeinfo_output = actual_mergeinfo_output.old_tree()
if isinstance(actual_elision_output, wc.State):
actual_elision_output = actual_elision_output.old_tree()
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(mergeinfo_output_tree, wc.State):
mergeinfo_output_tree = mergeinfo_output_tree.old_tree()
if isinstance(elision_output_tree, wc.State):
elision_output_tree = elision_output_tree.old_tree()
if isinstance(disk_tree, wc.State):
disk_tree = disk_tree.old_tree()
if isinstance(status_tree, wc.State):
status_tree = status_tree.old_tree()
# Verify actual output against expected output.
if output_tree:
try:
tree.compare_trees("output", actual_output, output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual_output, wc_dir_name + os.sep)
raise
# Verify actual mergeinfo recording output against expected output.
if mergeinfo_output_tree:
try:
tree.compare_trees("mergeinfo_output", actual_mergeinfo_output,
mergeinfo_output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL MERGEINFO OUTPUT TREE:")
tree.dump_tree_script(actual_mergeinfo_output,
wc_dir_name + os.sep)
raise
# Verify actual mergeinfo elision output against expected output.
if elision_output_tree:
try:
tree.compare_trees("elision_output", actual_elision_output,
elision_output_tree)
except tree.SVNTreeUnequal:
print("ACTUAL ELISION OUTPUT TREE:")
tree.dump_tree_script(actual_elision_output,
wc_dir_name + os.sep)
raise
# Create a tree by scanning the working copy, and verify it
if disk_tree:
actual_disk = tree.build_tree_from_wc(wc_dir_name, check_props)
try:
tree.compare_trees("disk", actual_disk, disk_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeUnequal:
print("EXPECTED DISK TREE:")
tree.dump_tree_script(disk_tree)
print("ACTUAL DISK TREE:")
tree.dump_tree_script(actual_disk)
raise
# Verify via 'status' command too, if possible.
if status_tree:
run_and_verify_status(wc_dir_name, status_tree)
def verify_disk(wc_dir_name, disk_tree, check_props=False):
"""Verify WC_DIR_NAME against DISK_TREE. If CHECK_PROPS is set,
the comparison will examin props. Returns if successful, raises on
failure."""
verify_update(None, None, None, wc_dir_name, None, None, None, disk_tree,
None, check_props=check_props)
def run_and_verify_update(wc_dir_name,
output_tree, disk_tree, status_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
*args):
"""Update WC_DIR_NAME. *ARGS are any extra optional args to the
update subcommand. NOTE: If *ARGS is specified at all, explicit
target paths must be passed in *ARGS as well (or a default `.' will
be chosen by the 'svn' binary). This allows the caller to update
many items in a single working copy dir, but still verify the entire
working copy dir.
If ERROR_RE_STRING, the update must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
If OUTPUT_TREE is not None, the subcommand output will be verified
against OUTPUT_TREE. If DISK_TREE is not None, the working copy
itself will be verified against DISK_TREE. If STATUS_TREE is not
None, the 'svn status' output will be verified against STATUS_TREE.
(This is a good way to check that revision numbers were bumped.)
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
Return if successful, raise on failure."""
# Update and make a tree of the output.
if len(args):
exit_code, output, errput = main.run_svn(error_re_string, 'up', *args)
else:
exit_code, output, errput = main.run_svn(error_re_string,
'up', wc_dir_name,
*args)
if error_re_string:
rm = re.compile(error_re_string)
for line in errput:
match = rm.search(line)
if match:
return
raise main.SVNUnmatchedError
actual = wc.State.from_checkout(output)
verify_update(actual, None, None, wc_dir_name,
output_tree, None, None, disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def run_and_parse_info(*args):
"""Run 'svn info ARGS' and parse its output into a list of dicts,
one dict per reported node."""
# the returned array
all_infos = []
# per-target variables
iter_info = {}
prev_key = None
lock_comment_lines = 0
lock_comments = []
exit_code, output, errput = main.run_svn(None, 'info', *args)
for line in output:
line = line[:-1] # trim '\n'
if lock_comment_lines > 0:
# mop up any lock comment lines
lock_comments.append(line)
lock_comment_lines = lock_comment_lines - 1
if lock_comment_lines == 0:
iter_info[prev_key] = lock_comments
elif len(line) == 0:
# separator line between items
all_infos.append(iter_info)
iter_info = {}
prev_key = None
lock_comment_lines = 0
lock_comments = []
elif line[0].isspace():
# continuation line (for tree conflicts)
iter_info[prev_key] += line[1:]
else:
# normal line
key, value = line.split(':', 1)
if re.search(' \(\d+ lines?\)$', key):
# numbered continuation lines
match = re.match('^(.*) \((\d+) lines?\)$', key)
key = match.group(1)
lock_comment_lines = int(match.group(2))
elif len(value) > 1:
# normal normal line
iter_info[key] = value[1:]
else:
### originally added for "Tree conflict:\n" lines;
### tree-conflicts output format has changed since then
# continuation lines are implicit (prefixed by whitespace)
iter_info[key] = ''
prev_key = key
return all_infos
def run_and_verify_info(expected_infos, *args):
"""Run 'svn info' with the arguments in *ARGS and verify the results
against expected_infos. The latter should be a list of dicts, one dict
per reported node, in the order in which the 'Path' fields of the output
will appear after sorting them as Python strings. (The dicts in
EXPECTED_INFOS, however, need not have a 'Path' key.)
In the dicts, each key is the before-the-colon part of the 'svn info' output,
and each value is either None (meaning that the key should *not* appear in
the 'svn info' output) or a regex matching the output value. Output lines
not matching a key in the dict are ignored.
Return if successful, raise on failure."""
actual_infos = run_and_parse_info(*args)
actual_infos.sort(key=lambda info: info['Path'])
try:
# zip() won't complain, so check this manually
if len(actual_infos) != len(expected_infos):
raise verify.SVNUnexpectedStdout(
"Expected %d infos, found %d infos"
% (len(expected_infos), len(actual_infos)))
for actual, expected in zip(actual_infos, expected_infos):
# compare dicts
for key, value in expected.items():
assert ':' not in key # caller passed impossible expectations?
if value is None and key in actual:
raise main.SVNLineUnequal("Found unexpected key '%s' with value '%s'"
% (key, actual[key]))
if value is not None and key not in actual:
raise main.SVNLineUnequal("Expected key '%s' (with value '%s') "
"not found" % (key, value))
if value is not None and not re.match(value, actual[key]):
raise verify.SVNUnexpectedStdout("Values of key '%s' don't match:\n"
" Expected: '%s' (regex)\n"
" Found: '%s' (string)\n"
% (key, value, actual[key]))
except:
sys.stderr.write("Bad 'svn info' output:\n"
" Received: %s\n"
" Expected: %s\n"
% (actual_infos, expected_infos))
raise
def run_and_verify_merge(dir, rev1, rev2, url1, url2,
output_tree,
mergeinfo_output_tree,
elision_output_tree,
disk_tree, status_tree, skip_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
dry_run = True,
*args):
"""Run 'svn merge URL1@REV1 URL2@REV2 DIR' if URL2 is not None
(for a three-way merge between URLs and WC).
If URL2 is None, run 'svn merge -rREV1:REV2 URL1 DIR'. If both REV1
and REV2 are None, leave off the '-r' argument.
If ERROR_RE_STRING, the merge must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE. Output
related to mergeinfo notifications will be verified against
MERGEINFO_OUTPUT_TREE if that is not None. Output related to mergeinfo
elision will be verified against ELISION_OUTPUT_TREE if that is not None.
The working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be compared. The
'skipped' merge output will be compared to SKIP_TREE.
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
If DRY_RUN is set then a --dry-run merge will be carried out first and
the output compared with that of the full merge.
Return if successful, raise on failure.
*ARGS are any extra optional args to the merge subcommand.
NOTE: If *ARGS is specified at all, an explicit target path must be passed
in *ARGS as well. This allows the caller to merge into single items inside
the working copy, but still verify the entire working copy dir. """
merge_command = [ "merge" ]
if url2:
merge_command.extend((url1 + "@" + str(rev1), url2 + "@" + str(rev2)))
else:
if not (rev1 is None and rev2 is None):
merge_command.append("-r" + str(rev1) + ":" + str(rev2))
merge_command.append(url1)
if len(args) == 0:
merge_command.append(dir)
merge_command = tuple(merge_command)
if dry_run:
pre_disk = tree.build_tree_from_wc(dir)
dry_run_command = merge_command + ('--dry-run',)
dry_run_command = dry_run_command + args
exit_code, out_dry, err_dry = main.run_svn(error_re_string,
*dry_run_command)
post_disk = tree.build_tree_from_wc(dir)
try:
tree.compare_trees("disk", post_disk, pre_disk)
except tree.SVNTreeError:
print("=============================================================")
print("Dry-run merge altered working copy")
print("=============================================================")
raise
# Update and make a tree of the output.
merge_command = merge_command + args
exit_code, out, err = main.run_svn(error_re_string, *merge_command)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
return
elif err:
raise verify.SVNUnexpectedStderr(err)
# Split the output into that related to application of the actual diff
# and that related to the recording of mergeinfo describing the merge.
merge_diff_out = []
mergeinfo_notification_out = []
mergeinfo_elision_out = []
mergeinfo_notifications = False
elision_notifications = False
for line in out:
if line.startswith('--- Recording'):
mergeinfo_notifications = True
elision_notifications = False
elif line.startswith('--- Eliding'):
mergeinfo_notifications = False
elision_notifications = True
elif line.startswith('--- Merging') or \
line.startswith('--- Reverse-merging') or \
line.startswith('Summary of conflicts') or \
line.startswith('Skipped missing target'):
mergeinfo_notifications = False
elision_notifications = False
if mergeinfo_notifications:
mergeinfo_notification_out.append(line)
elif elision_notifications:
mergeinfo_elision_out.append(line)
else:
merge_diff_out.append(line)
if dry_run and merge_diff_out != out_dry:
# Due to the way ra_serf works, it's possible that the dry-run and
# real merge operations did the same thing, but the output came in
# a different order. Let's see if maybe that's the case by comparing
# the outputs as unordered sets rather than as lists.
#
# This now happens for other RA layers with modern APR because the
# hash order now varies.
#
# The different orders of the real and dry-run merges may cause
# the "Merging rX through rY into" lines to be duplicated a
# different number of times in the two outputs. The list-set
# conversion removes duplicates so these differences are ignored.
# It also removes "U some/path" duplicate lines. Perhaps we
# should avoid that?
out_copy = set(merge_diff_out[:])
out_dry_copy = set(out_dry[:])
if out_copy != out_dry_copy:
print("=============================================================")
print("Merge outputs differ")
print("The dry-run merge output:")
for x in out_dry:
sys.stdout.write(x)
print("The full merge output:")
for x in out:
sys.stdout.write(x)
print("=============================================================")
raise main.SVNUnmatchedError
def missing_skip(a, b):
print("=============================================================")
print("Merge failed to skip: " + a.path)
print("=============================================================")
raise Failure
def extra_skip(a, b):
print("=============================================================")
print("Merge unexpectedly skipped: " + a.path)
print("=============================================================")
raise Failure
myskiptree = tree.build_tree_from_skipped(out)
if isinstance(skip_tree, wc.State):
skip_tree = skip_tree.old_tree()
try:
tree.compare_trees("skip", myskiptree, skip_tree,
extra_skip, None, missing_skip, None)
except tree.SVNTreeUnequal:
print("ACTUAL SKIP TREE:")
tree.dump_tree_script(myskiptree, dir + os.sep)
raise
actual_diff = svntest.wc.State.from_checkout(merge_diff_out, False)
actual_mergeinfo = svntest.wc.State.from_checkout(mergeinfo_notification_out,
False)
actual_elision = svntest.wc.State.from_checkout(mergeinfo_elision_out,
False)
verify_update(actual_diff, actual_mergeinfo, actual_elision, dir,
output_tree, mergeinfo_output_tree, elision_output_tree,
disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def run_and_verify_patch(dir, patch_path,
output_tree, disk_tree, status_tree, skip_tree,
error_re_string=None,
check_props=False,
dry_run=True,
*args):
"""Run 'svn patch patch_path DIR'.
If ERROR_RE_STRING, 'svn patch' must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE, and the
working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be compared.
The 'skipped' merge output will be compared to SKIP_TREE.
If CHECK_PROPS is set, then disk comparison will examine props.
If DRY_RUN is set then a --dry-run patch will be carried out first and
the output compared with that of the full patch application.
Returns if successful, raises on failure."""
patch_command = [ "patch" ]
patch_command.append(patch_path)
patch_command.append(dir)
patch_command = tuple(patch_command)
if dry_run:
pre_disk = tree.build_tree_from_wc(dir)
dry_run_command = patch_command + ('--dry-run',)
dry_run_command = dry_run_command + args
exit_code, out_dry, err_dry = main.run_svn(error_re_string,
*dry_run_command)
post_disk = tree.build_tree_from_wc(dir)
try:
tree.compare_trees("disk", post_disk, pre_disk)
except tree.SVNTreeError:
print("=============================================================")
print("'svn patch --dry-run' altered working copy")
print("=============================================================")
raise
# Update and make a tree of the output.
patch_command = patch_command + args
exit_code, out, err = main.run_svn(True, *patch_command)
if error_re_string:
rm = re.compile(error_re_string)
match = None
for line in err:
match = rm.search(line)
if match:
break
if not match:
raise main.SVNUnmatchedError
elif err:
print("UNEXPECTED STDERR:")
for x in err:
sys.stdout.write(x)
raise verify.SVNUnexpectedStderr
if dry_run and out != out_dry:
# APR hash order means the output order can vary, assume everything is OK
# if only the order changes.
out_dry_expected = svntest.verify.UnorderedOutput(out)
verify.compare_and_display_lines('dry-run patch output not as expected',
'', out_dry_expected, out_dry)
def missing_skip(a, b):
print("=============================================================")
print("'svn patch' failed to skip: " + a.path)
print("=============================================================")
raise Failure
def extra_skip(a, b):
print("=============================================================")
print("'svn patch' unexpectedly skipped: " + a.path)
print("=============================================================")
raise Failure
myskiptree = tree.build_tree_from_skipped(out)
if isinstance(skip_tree, wc.State):
skip_tree = skip_tree.old_tree()
tree.compare_trees("skip", myskiptree, skip_tree,
extra_skip, None, missing_skip, None)
mytree = tree.build_tree_from_checkout(out, 0)
# when the expected output is a list, we want a line-by-line
# comparison to happen instead of a tree comparison
if (isinstance(output_tree, list)
or isinstance(output_tree, verify.UnorderedOutput)):
verify.verify_outputs(None, out, err, output_tree, error_re_string)
output_tree = None
verify_update(mytree, None, None, dir,
output_tree, None, None, disk_tree, status_tree,
check_props=check_props)
def run_and_verify_mergeinfo(error_re_string = None,
expected_output = [],
*args):
"""Run 'svn mergeinfo ARGS', and compare the result against
EXPECTED_OUTPUT, a list of string representations of revisions
expected in the output. Raise an exception if an unexpected
output is encountered."""
mergeinfo_command = ["mergeinfo"]
mergeinfo_command.extend(args)
exit_code, out, err = main.run_svn(error_re_string, *mergeinfo_command)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
return
out = sorted([_f for _f in [x.rstrip()[1:] for x in out] if _f])
expected_output.sort()
extra_out = []
if out != expected_output:
exp_hash = dict.fromkeys(expected_output)
for rev in out:
if rev in exp_hash:
del(exp_hash[rev])
else:
extra_out.append(rev)
extra_exp = list(exp_hash.keys())
raise Exception("Unexpected 'svn mergeinfo' output:\n"
" expected but not found: %s\n"
" found but not expected: %s"
% (', '.join([str(x) for x in extra_exp]),
', '.join([str(x) for x in extra_out])))
def run_and_verify_switch(wc_dir_name,
wc_target,
switch_url,
output_tree, disk_tree, status_tree,
error_re_string = None,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None,
check_props = False,
*args):
"""Switch WC_TARGET (in working copy dir WC_DIR_NAME) to SWITCH_URL.
If ERROR_RE_STRING, the switch must exit with error, and the error
message must match regular expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, then:
The subcommand output will be verified against OUTPUT_TREE, and the
working copy itself will be verified against DISK_TREE. If optional
STATUS_TREE is given, then 'svn status' output will be
compared. (This is a good way to check that revision numbers were
bumped.)
For the DISK_TREE verification, SINGLETON_HANDLER_A and
SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that
function's doc string for more details.
If CHECK_PROPS is set, then disk comparison will examine props.
Return if successful, raise on failure."""
# Update and make a tree of the output.
exit_code, output, errput = main.run_svn(error_re_string, 'switch',
switch_url, wc_target, *args)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, errput, None, expected_err)
return
elif errput:
raise verify.SVNUnexpectedStderr(err)
actual = wc.State.from_checkout(output)
verify_update(actual, None, None, wc_dir_name,
output_tree, None, None, disk_tree, status_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton,
check_props)
def process_output_for_commit(output):
"""Helper for run_and_verify_commit(), also used in the factory."""
# Remove the final output line, and verify that the commit succeeded.
lastline = ""
rest = []
def external_removal(line):
return line.startswith('Removing external') \
or line.startswith('Removed external')
if len(output):
lastline = output.pop().strip()
while len(output) and external_removal(lastline):
rest.append(lastline)
lastline = output.pop().strip()
cm = re.compile("(Committed|Imported) revision [0-9]+.")
match = cm.search(lastline)
if not match:
print("ERROR: commit did not succeed.")
print("The final line from 'svn ci' was:")
print(lastline)
raise main.SVNCommitFailure
# The new 'final' line in the output is either a regular line that
# mentions {Adding, Deleting, Sending, ...}, or it could be a line
# that says "Transmitting file data ...". If the latter case, we
# want to remove the line from the output; it should be ignored when
# building a tree.
if len(output):
lastline = output.pop()
tm = re.compile("Transmitting file data.+")
match = tm.search(lastline)
if not match:
# whoops, it was important output, put it back.
output.append(lastline)
if len(rest):
output.extend(rest)
return output
def run_and_verify_commit(wc_dir_name, output_tree, status_tree,
error_re_string = None,
*args):
"""Commit and verify results within working copy WC_DIR_NAME,
sending ARGS to the commit subcommand.
The subcommand output will be verified against OUTPUT_TREE. If
optional STATUS_TREE is given, then 'svn status' output will
be compared. (This is a good way to check that revision numbers
were bumped.)
If ERROR_RE_STRING is None, the commit must not exit with error. If
ERROR_RE_STRING is a string, the commit must exit with error, and
the error message must match regular expression ERROR_RE_STRING.
Return if successful, raise on failure."""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
if isinstance(status_tree, wc.State):
status_tree = status_tree.old_tree()
# Commit.
if '-m' not in args and '-F' not in args:
args = list(args) + ['-m', 'log msg']
exit_code, output, errput = main.run_svn(error_re_string, 'ci',
*args)
if error_re_string:
if not error_re_string.startswith(".*"):
error_re_string = ".*(" + error_re_string + ")"
expected_err = verify.RegexOutput(error_re_string, match_all=False)
verify.verify_outputs(None, None, errput, None, expected_err)
return
# Else not expecting error:
# Convert the output into a tree.
output = process_output_for_commit(output)
actual = tree.build_tree_from_commit(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeError:
verify.display_trees("Output of commit is unexpected",
"OUTPUT TREE", output_tree, actual)
print("ACTUAL OUTPUT TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
# Verify via 'status' command too, if possible.
if status_tree:
run_and_verify_status(wc_dir_name, status_tree)
# This function always passes '-q' to the status command, which
# suppresses the printing of any unversioned or nonexistent items.
def run_and_verify_status(wc_dir_name, output_tree,
singleton_handler_a = None,
a_baton = None,
singleton_handler_b = None,
b_baton = None):
"""Run 'status' on WC_DIR_NAME and compare it with the
expected OUTPUT_TREE. SINGLETON_HANDLER_A and SINGLETON_HANDLER_B will
be passed to tree.compare_trees - see that function's doc string for
more details.
Returns on success, raises on failure."""
if isinstance(output_tree, wc.State):
output_state = output_tree
output_tree = output_tree.old_tree()
else:
output_state = None
exit_code, output, errput = main.run_svn(None, 'status', '-v', '-u', '-q',
wc_dir_name)
actual = tree.build_tree_from_status(output)
# Verify actual output against expected output.
try:
tree.compare_trees("status", actual, output_tree,
singleton_handler_a, a_baton,
singleton_handler_b, b_baton)
except tree.SVNTreeError:
verify.display_trees(None, 'STATUS OUTPUT TREE', output_tree, actual)
print("ACTUAL STATUS TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
# if we have an output State, and we can/are-allowed to create an
# entries-based State, then compare the two.
if output_state:
entries_state = wc.State.from_entries(wc_dir_name)
if entries_state:
tweaked = output_state.copy()
tweaked.tweak_for_entries_compare()
try:
tweaked.compare_and_display('entries', entries_state)
except tree.SVNTreeUnequal:
### do something more
raise
# A variant of previous func, but doesn't pass '-q'. This allows us
# to verify unversioned or nonexistent items in the list.
def run_and_verify_unquiet_status(wc_dir_name, status_tree):
"""Run 'status' on WC_DIR_NAME and compare it with the
expected STATUS_TREE.
Returns on success, raises on failure."""
if isinstance(status_tree, wc.State):
status_tree = status_tree.old_tree()
exit_code, output, errput = main.run_svn(None, 'status', '-v',
'-u', wc_dir_name)
actual = tree.build_tree_from_status(output)
# Verify actual output against expected output.
try:
tree.compare_trees("UNQUIET STATUS", actual, status_tree)
except tree.SVNTreeError:
print("ACTUAL UNQUIET STATUS TREE:")
tree.dump_tree_script(actual, wc_dir_name + os.sep)
raise
def run_and_verify_status_xml(expected_entries = [],
*args):
""" Run 'status --xml' with arguments *ARGS. If successful the output
is parsed into an XML document and will be verified by comparing against
EXPECTED_ENTRIES.
"""
exit_code, output, errput = run_and_verify_svn(None, None, [],
'status', '--xml', *args)
if len(errput) > 0:
raise Failure
doc = parseString(''.join(output))
entries = doc.getElementsByTagName('entry')
def getText(nodelist):
rc = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
rc.append(node.data)
return ''.join(rc)
actual_entries = {}
for entry in entries:
wcstatus = entry.getElementsByTagName('wc-status')[0]
commit = entry.getElementsByTagName('commit')
author = entry.getElementsByTagName('author')
rstatus = entry.getElementsByTagName('repos-status')
actual_entry = {'wcprops' : wcstatus.getAttribute('props'),
'wcitem' : wcstatus.getAttribute('item'),
}
if wcstatus.hasAttribute('revision'):
actual_entry['wcrev'] = wcstatus.getAttribute('revision')
if (commit):
actual_entry['crev'] = commit[0].getAttribute('revision')
if (author):
actual_entry['author'] = getText(author[0].childNodes)
if (rstatus):
actual_entry['rprops'] = rstatus[0].getAttribute('props')
actual_entry['ritem'] = rstatus[0].getAttribute('item')
actual_entries[entry.getAttribute('path')] = actual_entry
if expected_entries != actual_entries:
raise Failure('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(expected_entries).splitlines(),
pprint.pformat(actual_entries).splitlines())))
def run_and_verify_diff_summarize_xml(error_re_string = [],
expected_prefix = None,
expected_paths = [],
expected_items = [],
expected_props = [],
expected_kinds = [],
*args):
"""Run 'diff --summarize --xml' with the arguments *ARGS, which should
contain all arguments beyond for your 'diff --summarize --xml' omitting
said arguments. EXPECTED_PREFIX will store a "common" path prefix
expected to be at the beginning of each summarized path. If
EXPECTED_PREFIX is None, then EXPECTED_PATHS will need to be exactly
as 'svn diff --summarize --xml' will output. If ERROR_RE_STRING, the
command must exit with error, and the error message must match regular
expression ERROR_RE_STRING.
Else if ERROR_RE_STRING is None, the subcommand output will be parsed
into an XML document and will then be verified by comparing the parsed
output to the contents in the EXPECTED_PATHS, EXPECTED_ITEMS,
EXPECTED_PROPS and EXPECTED_KINDS. Returns on success, raises
on failure."""
exit_code, output, errput = run_and_verify_svn(None, None, error_re_string,
'diff', '--summarize',
'--xml', *args)
# Return if errors are present since they were expected
if len(errput) > 0:
return
doc = parseString(''.join(output))
paths = doc.getElementsByTagName("path")
items = expected_items
kinds = expected_kinds
for path in paths:
modified_path = path.childNodes[0].data
if (expected_prefix is not None
and modified_path.find(expected_prefix) == 0):
modified_path = modified_path.replace(expected_prefix, '')[1:].strip()
# Workaround single-object diff
if len(modified_path) == 0:
modified_path = path.childNodes[0].data.split(os.sep)[-1]
# From here on, we use '/' as path separator.
if os.sep != "/":
modified_path = modified_path.replace(os.sep, "/")
if modified_path not in expected_paths:
print("ERROR: %s not expected in the changed paths." % modified_path)
raise Failure
index = expected_paths.index(modified_path)
expected_item = items[index]
expected_kind = kinds[index]
expected_prop = expected_props[index]
actual_item = path.getAttribute('item')
actual_kind = path.getAttribute('kind')
actual_prop = path.getAttribute('props')
if expected_item != actual_item:
print("ERROR: expected: %s actual: %s" % (expected_item, actual_item))
raise Failure
if expected_kind != actual_kind:
print("ERROR: expected: %s actual: %s" % (expected_kind, actual_kind))
raise Failure
if expected_prop != actual_prop:
print("ERROR: expected: %s actual: %s" % (expected_prop, actual_prop))
raise Failure
def run_and_verify_diff_summarize(output_tree, *args):
"""Run 'diff --summarize' with the arguments *ARGS.
The subcommand output will be verified against OUTPUT_TREE. Returns
on success, raises on failure.
"""
if isinstance(output_tree, wc.State):
output_tree = output_tree.old_tree()
exit_code, output, errput = main.run_svn(None, 'diff', '--summarize',
*args)
actual = tree.build_tree_from_diff_summarize(output)
# Verify actual output against expected output.
try:
tree.compare_trees("output", actual, output_tree)
except tree.SVNTreeError:
verify.display_trees(None, 'DIFF OUTPUT TREE', output_tree, actual)
print("ACTUAL DIFF OUTPUT TREE:")
tree.dump_tree_script(actual)
raise
def run_and_validate_lock(path, username):
"""`svn lock' the given path and validate the contents of the lock.
Use the given username. This is important because locks are
user specific."""
comment = "Locking path:%s." % path
# lock the path
run_and_verify_svn(None, ".*locked by user", [], 'lock',
'--username', username,
'-m', comment, path)
# Run info and check that we get the lock fields.
exit_code, output, err = run_and_verify_svn(None, None, [],
'info','-R',
path)
### TODO: Leverage RegexOuput([...], match_all=True) here.
# prepare the regexs to compare against
token_re = re.compile(".*?Lock Token: opaquelocktoken:.*?", re.DOTALL)
author_re = re.compile(".*?Lock Owner: %s\n.*?" % username, re.DOTALL)
created_re = re.compile(".*?Lock Created:.*?", re.DOTALL)
comment_re = re.compile(".*?%s\n.*?" % re.escape(comment), re.DOTALL)
# join all output lines into one
output = "".join(output)
# Fail even if one regex does not match
if ( not (token_re.match(output) and
author_re.match(output) and
created_re.match(output) and
comment_re.match(output))):
raise Failure
def _run_and_verify_resolve(cmd, expected_paths, *args):
"""Run "svn CMD" (where CMD is 'resolve' or 'resolved') with arguments
ARGS, and verify that it resolves the paths in EXPECTED_PATHS and no others.
If no ARGS are specified, use the elements of EXPECTED_PATHS as the
arguments."""
# TODO: verify that the status of PATHS changes accordingly.
if len(args) == 0:
args = expected_paths
expected_output = verify.UnorderedOutput([
"Resolved conflicted state of '" + path + "'\n" for path in
expected_paths])
run_and_verify_svn(None, expected_output, [],
cmd, *args)
def run_and_verify_resolve(expected_paths, *args):
"""Run "svn resolve" with arguments ARGS, and verify that it resolves the
paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
elements of EXPECTED_PATHS as the arguments."""
_run_and_verify_resolve('resolve', expected_paths, *args)
def run_and_verify_resolved(expected_paths, *args):
"""Run "svn resolved" with arguments ARGS, and verify that it resolves the
paths in EXPECTED_PATHS and no others. If no ARGS are specified, use the
elements of EXPECTED_PATHS as the arguments."""
_run_and_verify_resolve('resolved', expected_paths, *args)
def run_and_verify_revert(expected_paths, *args):
"""Run "svn revert" with arguments ARGS, and verify that it reverts
the paths in EXPECTED_PATHS and no others. If no ARGS are
specified, use the elements of EXPECTED_PATHS as the arguments."""
if len(args) == 0:
args = expected_paths
expected_output = verify.UnorderedOutput([
"Reverted '" + path + "'\n" for path in
expected_paths])
run_and_verify_svn(None, expected_output, [],
"revert", *args)
######################################################################
# Other general utilities
# This allows a test to *quickly* bootstrap itself.
def make_repo_and_wc(sbox, create_wc = True, read_only = False):
"""Create a fresh 'Greek Tree' repository and check out a WC from it.
If READ_ONLY is False, a dedicated repository will be created, at the path
SBOX.repo_dir. If READ_ONLY is True, the pristine repository will be used.
In either case, SBOX.repo_url is assumed to point to the repository that
will be used.
If create_wc is True, a dedicated working copy will be checked out from
the repository, at the path SBOX.wc_dir.
Returns on success, raises on failure."""
# Create (or copy afresh) a new repos with a greek tree in it.
if not read_only:
guarantee_greek_repository(sbox.repo_dir)
if create_wc:
# Generate the expected output tree.
expected_output = main.greek_state.copy()
expected_output.wc_dir = sbox.wc_dir
expected_output.tweak(status='A ', contents=None)
# Generate an expected wc tree.
expected_wc = main.greek_state
# Do a checkout, and verify the resulting output and disk contents.
run_and_verify_checkout(sbox.repo_url,
sbox.wc_dir,
expected_output,
expected_wc)
else:
# just make sure the parent folder of our working copy is created
try:
os.mkdir(main.general_wc_dir)
except OSError, err:
if err.errno != errno.EEXIST:
raise
# Duplicate a working copy or other dir.
def duplicate_dir(wc_name, wc_copy_name):
"""Copy the working copy WC_NAME to WC_COPY_NAME. Overwrite any
existing tree at that location."""
main.safe_rmtree(wc_copy_name)
shutil.copytree(wc_name, wc_copy_name)
def get_virginal_state(wc_dir, rev):
"Return a virginal greek tree state for a WC and repos at revision REV."
rev = str(rev) ### maybe switch rev to an integer?
# copy the greek tree, shift it to the new wc_dir, insert a root elem,
# then tweak all values
state = main.greek_state.copy()
state.wc_dir = wc_dir
state.desc[''] = wc.StateItem()
state.tweak(contents=None, status=' ', wc_rev=rev)
return state
# Cheap administrative directory locking
def lock_admin_dir(wc_dir, recursive=False):
"Lock a SVN administrative directory"
db, root_path, relpath = wc.open_wc_db(wc_dir)
svntest.main.run_wc_lock_tester(recursive, wc_dir)
def set_incomplete(wc_dir, revision):
"Make wc_dir incomplete at revision"
svntest.main.run_wc_incomplete_tester(wc_dir, revision)
def get_wc_uuid(wc_dir):
"Return the UUID of the working copy at WC_DIR."
return run_and_parse_info(wc_dir)[0]['Repository UUID']
def get_wc_base_rev(wc_dir):
"Return the BASE revision of the working copy at WC_DIR."
return run_and_parse_info(wc_dir)[0]['Revision']
def hook_failure_message(hook_name):
"""Return the error message that the client prints for failure of the
specified hook HOOK_NAME. The wording changed with Subversion 1.5."""
if svntest.main.options.server_minor_version < 5:
return "'%s' hook failed with error output:\n" % hook_name
else:
if hook_name in ["start-commit", "pre-commit"]:
action = "Commit"
elif hook_name == "pre-revprop-change":
action = "Revprop change"
elif hook_name == "pre-lock":
action = "Lock"
elif hook_name == "pre-unlock":
action = "Unlock"
else:
action = None
if action is None:
message = "%s hook failed (exit code 1)" % (hook_name,)
else:
message = "%s blocked by %s hook (exit code 1)" % (action, hook_name)
return message + " with output:\n"
def create_failing_hook(repo_dir, hook_name, text):
"""Create a HOOK_NAME hook in the repository at REPO_DIR that prints
TEXT to stderr and exits with an error."""
hook_path = os.path.join(repo_dir, 'hooks', hook_name)
# Embed the text carefully: it might include characters like "%" and "'".
main.create_python_hook_script(hook_path, 'import sys\n'
'sys.stderr.write(' + repr(text) + ')\n'
'sys.exit(1)\n')
def enable_revprop_changes(repo_dir):
"""Enable revprop changes in the repository at REPO_DIR by creating a
pre-revprop-change hook script and (if appropriate) making it executable."""
hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
main.create_python_hook_script(hook_path, 'import sys; sys.exit(0)')
def disable_revprop_changes(repo_dir):
"""Disable revprop changes in the repository at REPO_DIR by creating a
pre-revprop-change hook script that prints "pre-revprop-change" followed
by its arguments, and returns an error."""
hook_path = main.get_pre_revprop_change_hook_path(repo_dir)
main.create_python_hook_script(hook_path,
'import sys\n'
'sys.stderr.write("pre-revprop-change %s" % " ".join(sys.argv[1:6]))\n'
'sys.exit(1)\n')
def create_failing_post_commit_hook(repo_dir):
"""Create a post-commit hook script in the repository at REPO_DIR that always
reports an error."""
hook_path = main.get_post_commit_hook_path(repo_dir)
main.create_python_hook_script(hook_path, 'import sys\n'
'sys.stderr.write("Post-commit hook failed")\n'
'sys.exit(1)')
# set_prop can be used for properties with NULL characters which are not
# handled correctly when passed to subprocess.Popen() and values like "*"
# which are not handled correctly on Windows.
def set_prop(name, value, path, expected_re_string=None):
"""Set a property with specified value"""
if value and (value[0] == '-' or '\x00' in value or sys.platform == 'win32'):
from tempfile import mkstemp
(fd, value_file_path) = mkstemp()
value_file = open(value_file_path, 'wb')
value_file.write(value)
value_file.flush()
value_file.close()
exit_code, out, err = main.run_svn(expected_re_string, 'propset',
'-F', value_file_path, name, path)
os.close(fd)
os.remove(value_file_path)
else:
exit_code, out, err = main.run_svn(expected_re_string, 'propset',
name, value, path)
if expected_re_string:
if not expected_re_string.startswith(".*"):
expected_re_string = ".*(" + expected_re_string + ")"
expected_err = verify.RegexOutput(expected_re_string, match_all=False)
verify.verify_outputs(None, None, err, None, expected_err)
def check_prop(name, path, exp_out, revprop=None):
"""Verify that property NAME on PATH has a value of EXP_OUT.
If REVPROP is not None, then it is a revision number and
a revision property is sought."""
if revprop is not None:
revprop_options = ['--revprop', '-r', revprop]
else:
revprop_options = []
# Not using run_svn because binary_mode must be set
exit_code, out, err = main.run_command(main.svn_binary, None, 1, 'pg',
'--strict', name, path,
'--config-dir',
main.default_config_dir,
'--username', main.wc_author,
'--password', main.wc_passwd,
*revprop_options)
if out != exp_out:
print("svn pg --strict %s output does not match expected." % name)
print("Expected standard output: %s\n" % exp_out)
print("Actual standard output: %s\n" % out)
raise Failure
def fill_file_with_lines(wc_path, line_nbr, line_descrip=None,
append=True):
"""Change the file at WC_PATH (adding some lines), and return its
new contents. LINE_NBR indicates the line number at which the new
contents should assume that it's being appended. LINE_DESCRIP is
something like 'This is line' (the default) or 'Conflicting line'."""
if line_descrip is None:
line_descrip = "This is line"
# Generate the new contents for the file.
contents = ""
for n in range(line_nbr, line_nbr + 3):
contents = contents + line_descrip + " " + repr(n) + " in '" + \
os.path.basename(wc_path) + "'.\n"
# Write the new contents to the file.
if append:
main.file_append(wc_path, contents)
else:
main.file_write(wc_path, contents)
return contents
def inject_conflict_into_wc(sbox, state_path, file_path,
expected_disk, expected_status, merged_rev):
"""Create a conflict at FILE_PATH by replacing its contents,
committing the change, backdating it to its previous revision,
changing its contents again, then updating it to merge in the
previous change."""
wc_dir = sbox.wc_dir
# Make a change to the file.
contents = fill_file_with_lines(file_path, 1, "This is line", append=False)
# Commit the changed file, first taking note of the current revision.
prev_rev = expected_status.desc[state_path].wc_rev
expected_output = wc.State(wc_dir, {
state_path : wc.StateItem(verb='Sending'),
})
if expected_status:
expected_status.tweak(state_path, wc_rev=merged_rev)
run_and_verify_commit(wc_dir, expected_output, expected_status,
None, file_path)
# Backdate the file.
exit_code, output, errput = main.run_svn(None, "up", "-r", str(prev_rev),
file_path)
if expected_status:
expected_status.tweak(state_path, wc_rev=prev_rev)
# Make a conflicting change to the file, and backdate the file.
conflicting_contents = fill_file_with_lines(file_path, 1, "Conflicting line",
append=False)
# Merge the previous change into the file to produce a conflict.
if expected_disk:
expected_disk.tweak(state_path, contents="")
expected_output = wc.State(wc_dir, {
state_path : wc.StateItem(status='C '),
})
inject_conflict_into_expected_state(state_path,
expected_disk, expected_status,
conflicting_contents, contents,
merged_rev)
exit_code, output, errput = main.run_svn(None, "up", "-r", str(merged_rev),
file_path)
if expected_status:
expected_status.tweak(state_path, wc_rev=merged_rev)
def inject_conflict_into_expected_state(state_path,
expected_disk, expected_status,
wc_text, merged_text, merged_rev):
"""Update the EXPECTED_DISK and EXPECTED_STATUS trees for the
conflict at STATE_PATH (ignored if None). WC_TEXT, MERGED_TEXT, and
MERGED_REV are used to determine the contents of the conflict (the
text parameters should be newline-terminated)."""
if expected_disk:
conflict_marker = make_conflict_marker_text(wc_text, merged_text,
merged_rev)
existing_text = expected_disk.desc[state_path].contents or ""
expected_disk.tweak(state_path, contents=existing_text + conflict_marker)
if expected_status:
expected_status.tweak(state_path, status='C ')
def make_conflict_marker_text(wc_text, merged_text, merged_rev):
"""Return the conflict marker text described by WC_TEXT (the current
text in the working copy, MERGED_TEXT (the conflicting text merged
in), and MERGED_REV (the revision from whence the conflicting text
came)."""
return "<<<<<<< .working\n" + wc_text + "=======\n" + \
merged_text + ">>>>>>> .merge-right.r" + str(merged_rev) + "\n"
def build_greek_tree_conflicts(sbox):
"""Create a working copy that has tree-conflict markings.
After this function has been called, sbox.wc_dir is a working
copy that has specific tree-conflict markings.
In particular, this does two conflicting sets of edits and performs an
update so that tree conflicts appear.
Note that this function calls sbox.build() because it needs a clean sbox.
So, there is no need to call sbox.build() before this.
The conflicts are the result of an 'update' on the following changes:
Incoming Local
A/D/G/pi text-mod del
A/D/G/rho del text-mod
A/D/G/tau del del
This function is useful for testing that tree-conflicts are handled
properly once they have appeared, e.g. that commits are blocked, that the
info output is correct, etc.
See also the tree-conflicts tests using deep_trees in various other
.py files, and tree_conflict_tests.py.
"""
sbox.build()
wc_dir = sbox.wc_dir
j = os.path.join
G = j(wc_dir, 'A', 'D', 'G')
pi = j(G, 'pi')
rho = j(G, 'rho')
tau = j(G, 'tau')
# Make incoming changes and "store them away" with a commit.
main.file_append(pi, "Incoming edit.\n")
main.run_svn(None, 'del', rho)
main.run_svn(None, 'del', tau)
expected_output = wc.State(wc_dir, {
'A/D/G/pi' : Item(verb='Sending'),
'A/D/G/rho' : Item(verb='Deleting'),
'A/D/G/tau' : Item(verb='Deleting'),
})
expected_status = get_virginal_state(wc_dir, 1)
expected_status.tweak('A/D/G/pi', wc_rev='2')
expected_status.remove('A/D/G/rho', 'A/D/G/tau')
run_and_verify_commit(wc_dir, expected_output, expected_status, None,
'-m', 'Incoming changes.', wc_dir )
# Update back to the pristine state ("time-warp").
expected_output = wc.State(wc_dir, {
'A/D/G/pi' : Item(status='U '),
'A/D/G/rho' : Item(status='A '),
'A/D/G/tau' : Item(status='A '),
})
expected_disk = main.greek_state
expected_status = get_virginal_state(wc_dir, 1)
run_and_verify_update(wc_dir, expected_output, expected_disk,
expected_status, None, None, None, None, None, False,
'-r', '1', wc_dir)
# Make local changes
main.run_svn(None, 'del', pi)
main.file_append(rho, "Local edit.\n")
main.run_svn(None, 'del', tau)
# Update, receiving the incoming changes on top of the local changes,
# causing tree conflicts. Don't check for any particular result: that is
# the job of other tests.
run_and_verify_svn(None, verify.AnyOutput, [], 'update', wc_dir)
def make_deep_trees(base):
"""Helper function for deep trees conflicts. Create a set of trees,
each in its own "container" dir. Any conflicts can be tested separately
in each container.
"""
j = os.path.join
# Create the container dirs.
F = j(base, 'F')
D = j(base, 'D')
DF = j(base, 'DF')
DD = j(base, 'DD')
DDF = j(base, 'DDF')
DDD = j(base, 'DDD')
os.makedirs(F)
os.makedirs(j(D, 'D1'))
os.makedirs(j(DF, 'D1'))
os.makedirs(j(DD, 'D1', 'D2'))
os.makedirs(j(DDF, 'D1', 'D2'))
os.makedirs(j(DDD, 'D1', 'D2', 'D3'))
# Create their files.
alpha = j(F, 'alpha')
beta = j(DF, 'D1', 'beta')
gamma = j(DDF, 'D1', 'D2', 'gamma')
main.file_append(alpha, "This is the file 'alpha'.\n")
main.file_append(beta, "This is the file 'beta'.\n")
main.file_append(gamma, "This is the file 'gamma'.\n")
def add_deep_trees(sbox, base_dir_name):
"""Prepare a "deep_trees" within a given directory.
The directory <sbox.wc_dir>/<base_dir_name> is created and a deep_tree
is created within. The items are only added, a commit has to be
called separately, if needed.
<base_dir_name> will thus be a container for the set of containers
mentioned in make_deep_trees().
"""
j = os.path.join
base = j(sbox.wc_dir, base_dir_name)
make_deep_trees(base)
main.run_svn(None, 'add', base)
Item = wc.StateItem
# initial deep trees state
deep_trees_virginal_state = wc.State('', {
'F' : Item(),
'F/alpha' : Item("This is the file 'alpha'.\n"),
'D' : Item(),
'D/D1' : Item(),
'DF' : Item(),
'DF/D1' : Item(),
'DF/D1/beta' : Item("This is the file 'beta'.\n"),
'DD' : Item(),
'DD/D1' : Item(),
'DD/D1/D2' : Item(),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDF/D1/D2/gamma' : Item("This is the file 'gamma'.\n"),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
'DDD/D1/D2/D3' : Item(),
})
# Many actions on deep trees and their resulting states...
def deep_trees_leaf_edit(base):
"""Helper function for deep trees test cases. Append text to files,
create new files in empty directories, and change leaf node properties."""
j = os.path.join
F = j(base, 'F', 'alpha')
DF = j(base, 'DF', 'D1', 'beta')
DDF = j(base, 'DDF', 'D1', 'D2', 'gamma')
main.file_append(F, "More text for file alpha.\n")
main.file_append(DF, "More text for file beta.\n")
main.file_append(DDF, "More text for file gamma.\n")
run_and_verify_svn(None, verify.AnyOutput, [],
'propset', 'prop1', '1', F, DF, DDF)
D = j(base, 'D', 'D1')
DD = j(base, 'DD', 'D1', 'D2')
DDD = j(base, 'DDD', 'D1', 'D2', 'D3')
run_and_verify_svn(None, verify.AnyOutput, [],
'propset', 'prop1', '1', D, DD, DDD)
D = j(base, 'D', 'D1', 'delta')
DD = j(base, 'DD', 'D1', 'D2', 'epsilon')
DDD = j(base, 'DDD', 'D1', 'D2', 'D3', 'zeta')
main.file_append(D, "This is the file 'delta'.\n")
main.file_append(DD, "This is the file 'epsilon'.\n")
main.file_append(DDD, "This is the file 'zeta'.\n")
run_and_verify_svn(None, verify.AnyOutput, [],
'add', D, DD, DDD)
# deep trees state after a call to deep_trees_leaf_edit
deep_trees_after_leaf_edit = wc.State('', {
'F' : Item(),
'F/alpha' : Item("This is the file 'alpha'.\nMore text for file alpha.\n"),
'D' : Item(),
'D/D1' : Item(),
'D/D1/delta' : Item("This is the file 'delta'.\n"),
'DF' : Item(),
'DF/D1' : Item(),
'DF/D1/beta' : Item("This is the file 'beta'.\nMore text for file beta.\n"),
'DD' : Item(),
'DD/D1' : Item(),
'DD/D1/D2' : Item(),
'DD/D1/D2/epsilon' : Item("This is the file 'epsilon'.\n"),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDF/D1/D2/gamma' : Item("This is the file 'gamma'.\nMore text for file gamma.\n"),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
'DDD/D1/D2/D3' : Item(),
'DDD/D1/D2/D3/zeta' : Item("This is the file 'zeta'.\n"),
})
def deep_trees_leaf_del(base):
"""Helper function for deep trees test cases. Delete files and empty
dirs."""
j = os.path.join
F = j(base, 'F', 'alpha')
D = j(base, 'D', 'D1')
DF = j(base, 'DF', 'D1', 'beta')
DD = j(base, 'DD', 'D1', 'D2')
DDF = j(base, 'DDF', 'D1', 'D2', 'gamma')
DDD = j(base, 'DDD', 'D1', 'D2', 'D3')
main.run_svn(None, 'rm', F, D, DF, DD, DDF, DDD)
# deep trees state after a call to deep_trees_leaf_del
deep_trees_after_leaf_del = wc.State('', {
'F' : Item(),
'D' : Item(),
'DF' : Item(),
'DF/D1' : Item(),
'DD' : Item(),
'DD/D1' : Item(),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
})
# deep trees state after a call to deep_trees_leaf_del with no commit
def deep_trees_after_leaf_del_no_ci(wc_dir):
if svntest.main.wc_is_singledb(wc_dir):
return deep_trees_after_leaf_del
else:
return deep_trees_empty_dirs
def deep_trees_tree_del(base):
"""Helper function for deep trees test cases. Delete top-level dirs."""
j = os.path.join
F = j(base, 'F', 'alpha')
D = j(base, 'D', 'D1')
DF = j(base, 'DF', 'D1')
DD = j(base, 'DD', 'D1')
DDF = j(base, 'DDF', 'D1')
DDD = j(base, 'DDD', 'D1')
main.run_svn(None, 'rm', F, D, DF, DD, DDF, DDD)
def deep_trees_rmtree(base):
"""Helper function for deep trees test cases. Delete top-level dirs
with rmtree instead of svn del."""
j = os.path.join
F = j(base, 'F', 'alpha')
D = j(base, 'D', 'D1')
DF = j(base, 'DF', 'D1')
DD = j(base, 'DD', 'D1')
DDF = j(base, 'DDF', 'D1')
DDD = j(base, 'DDD', 'D1')
os.unlink(F)
main.safe_rmtree(D)
main.safe_rmtree(DF)
main.safe_rmtree(DD)
main.safe_rmtree(DDF)
main.safe_rmtree(DDD)
# deep trees state after a call to deep_trees_tree_del
deep_trees_after_tree_del = wc.State('', {
'F' : Item(),
'D' : Item(),
'DF' : Item(),
'DD' : Item(),
'DDF' : Item(),
'DDD' : Item(),
})
# deep trees state without any files
deep_trees_empty_dirs = wc.State('', {
'F' : Item(),
'D' : Item(),
'D/D1' : Item(),
'DF' : Item(),
'DF/D1' : Item(),
'DD' : Item(),
'DD/D1' : Item(),
'DD/D1/D2' : Item(),
'DDF' : Item(),
'DDF/D1' : Item(),
'DDF/D1/D2' : Item(),
'DDD' : Item(),
'DDD/D1' : Item(),
'DDD/D1/D2' : Item(),
'DDD/D1/D2/D3' : Item(),
})
# deep trees state after a call to deep_trees_tree_del with no commit
def deep_trees_after_tree_del_no_ci(wc_dir):
if svntest.main.wc_is_singledb(wc_dir):
return deep_trees_after_tree_del
else:
return deep_trees_empty_dirs
def deep_trees_tree_del_repos(base):
"""Helper function for deep trees test cases. Delete top-level dirs,
directly in the repository."""
j = '/'.join
F = j([base, 'F', 'alpha'])
D = j([base, 'D', 'D1'])
DF = j([base, 'DF', 'D1'])
DD = j([base, 'DD', 'D1'])
DDF = j([base, 'DDF', 'D1'])
DDD = j([base, 'DDD', 'D1'])
main.run_svn(None, 'mkdir', '-m', '', F, D, DF, DD, DDF, DDD)
# Expected merge/update/switch output.
deep_trees_conflict_output = wc.State('', {
'F/alpha' : Item(status=' ', treeconflict='C'),
'D/D1' : Item(status=' ', treeconflict='C'),
'DF/D1' : Item(status=' ', treeconflict='C'),
'DD/D1' : Item(status=' ', treeconflict='C'),
'DDF/D1' : Item(status=' ', treeconflict='C'),
'DDD/D1' : Item(status=' ', treeconflict='C'),
})
deep_trees_conflict_output_skipped = wc.State('', {
'D/D1' : Item(verb='Skipped'),
'F/alpha' : Item(verb='Skipped'),
'DD/D1' : Item(verb='Skipped'),
'DF/D1' : Item(verb='Skipped'),
'DDD/D1' : Item(verb='Skipped'),
'DDF/D1' : Item(verb='Skipped'),
})
# Expected status output after merge/update/switch.
deep_trees_status_local_tree_del = wc.State('', {
'' : Item(status=' ', wc_rev=3),
'D' : Item(status=' ', wc_rev=3),
'D/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DD' : Item(status=' ', wc_rev=3),
'DD/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DD/D1/D2' : Item(status='D ', wc_rev=2),
'DDD' : Item(status=' ', wc_rev=3),
'DDD/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DDD/D1/D2' : Item(status='D ', wc_rev=2),
'DDD/D1/D2/D3' : Item(status='D ', wc_rev=2),
'DDF' : Item(status=' ', wc_rev=3),
'DDF/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DDF/D1/D2' : Item(status='D ', wc_rev=2),
'DDF/D1/D2/gamma' : Item(status='D ', wc_rev=2),
'DF' : Item(status=' ', wc_rev=3),
'DF/D1' : Item(status='D ', wc_rev=2, treeconflict='C'),
'DF/D1/beta' : Item(status='D ', wc_rev=2),
'F' : Item(status=' ', wc_rev=3),
'F/alpha' : Item(status='D ', wc_rev=2, treeconflict='C'),
})
deep_trees_status_local_leaf_edit = wc.State('', {
'' : Item(status=' ', wc_rev=3),
'D' : Item(status=' ', wc_rev=3),
'D/D1' : Item(status=' M', wc_rev=2, treeconflict='C'),
'D/D1/delta' : Item(status='A ', wc_rev=0),
'DD' : Item(status=' ', wc_rev=3),
'DD/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DD/D1/D2' : Item(status=' M', wc_rev=2),
'DD/D1/D2/epsilon' : Item(status='A ', wc_rev=0),
'DDD' : Item(status=' ', wc_rev=3),
'DDD/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DDD/D1/D2' : Item(status=' ', wc_rev=2),
'DDD/D1/D2/D3' : Item(status=' M', wc_rev=2),
'DDD/D1/D2/D3/zeta' : Item(status='A ', wc_rev=0),
'DDF' : Item(status=' ', wc_rev=3),
'DDF/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DDF/D1/D2' : Item(status=' ', wc_rev=2),
'DDF/D1/D2/gamma' : Item(status='MM', wc_rev=2),
'DF' : Item(status=' ', wc_rev=3),
'DF/D1' : Item(status=' ', wc_rev=2, treeconflict='C'),
'DF/D1/beta' : Item(status='MM', wc_rev=2),
'F' : Item(status=' ', wc_rev=3),
'F/alpha' : Item(status='MM', wc_rev=2, treeconflict='C'),
})
class DeepTreesTestCase:
"""Describes one tree-conflicts test case.
See deep_trees_run_tests_scheme_for_update(), ..._switch(), ..._merge().
The name field is the subdirectory name in which the test should be run.
The local_action and incoming_action are the functions to run
to construct the local changes and incoming changes, respectively.
See deep_trees_leaf_edit, deep_trees_tree_del, etc.
The expected_* and error_re_string arguments are described in functions
run_and_verify_[update|switch|merge]
except expected_info, which is a dict that has path keys with values
that are dicts as passed to run_and_verify_info():
expected_info = {
'F/alpha' : {
'Revision' : '3',
'Tree conflict' :
'^local delete, incoming edit upon update'
+ ' Source left: .file.*/F/alpha@2'
+ ' Source right: .file.*/F/alpha@3$',
},
'DF/D1' : {
'Tree conflict' :
'^local delete, incoming edit upon update'
+ ' Source left: .dir.*/DF/D1@2'
+ ' Source right: .dir.*/DF/D1@3$',
},
...
}
Note: expected_skip is only used in merge, i.e. using
deep_trees_run_tests_scheme_for_merge.
"""
def __init__(self, name, local_action, incoming_action,
expected_output = None, expected_disk = None,
expected_status = None, expected_skip = None,
error_re_string = None,
commit_block_string = ".*remains in conflict.*",
expected_info = None):
self.name = name
self.local_action = local_action
self.incoming_action = incoming_action
self.expected_output = expected_output
self.expected_disk = expected_disk
self.expected_status = expected_status
self.expected_skip = expected_skip
self.error_re_string = error_re_string
self.commit_block_string = commit_block_string
self.expected_info = expected_info
def deep_trees_run_tests_scheme_for_update(sbox, greater_scheme):
"""
Runs a given list of tests for conflicts occuring at an update operation.
This function wants to save time and perform a number of different
test cases using just a single repository and performing just one commit
for all test cases instead of one for each test case.
1) Each test case is initialized in a separate subdir. Each subdir
again contains one set of "deep_trees", being separate container
dirs for different depths of trees (F, D, DF, DD, DDF, DDD).
2) A commit is performed across all test cases and depths.
(our initial state, -r2)
3) In each test case subdir (e.g. "local_tree_del_incoming_leaf_edit"),
its *incoming* action is performed (e.g. "deep_trees_leaf_edit"), in
each of the different depth trees (F, D, DF, ... DDD).
4) A commit is performed across all test cases and depths:
our "incoming" state is "stored away in the repository for now",
-r3.
5) All test case dirs and contained deep_trees are time-warped
(updated) back to -r2, the initial state containing deep_trees.
6) In each test case subdir (e.g. "local_tree_del_incoming_leaf_edit"),
its *local* action is performed (e.g. "deep_trees_leaf_del"), in
each of the different depth trees (F, D, DF, ... DDD).
7) An update to -r3 is performed across all test cases and depths.
This causes tree-conflicts between the "local" state in the working
copy and the "incoming" state from the repository, -r3.
8) A commit is performed in each separate container, to verify
that each tree-conflict indeed blocks a commit.
The sbox parameter is just the sbox passed to a test function. No need
to call sbox.build(), since it is called (once) within this function.
The "table" greater_scheme models all of the different test cases
that should be run using a single repository.
greater_scheme is a list of DeepTreesTestCase items, which define complete
test setups, so that they can be performed as described above.
"""
j = os.path.join
if not sbox.is_built():
sbox.build()
wc_dir = sbox.wc_dir
# 1) create directories
for test_case in greater_scheme:
try:
add_deep_trees(sbox, test_case.name)
except:
print("ERROR IN: Tests scheme for update: "
+ "while setting up deep trees in '%s'" % test_case.name)
raise
# 2) commit initial state
main.run_svn(None, 'commit', '-m', 'initial state', wc_dir)
# 3) apply incoming changes
for test_case in greater_scheme:
try:
test_case.incoming_action(j(sbox.wc_dir, test_case.name))
except:
print("ERROR IN: Tests scheme for update: "
+ "while performing incoming action in '%s'" % test_case.name)
raise
# 4) commit incoming changes
main.run_svn(None, 'commit', '-m', 'incoming changes', wc_dir)
# 5) time-warp back to -r2
main.run_svn(None, 'update', '-r2', wc_dir)
# 6) apply local changes
for test_case in greater_scheme:
try:
test_case.local_action(j(wc_dir, test_case.name))
except:
print("ERROR IN: Tests scheme for update: "
+ "while performing local action in '%s'" % test_case.name)
raise
# 7) update to -r3, conflicting with incoming changes.
# A lot of different things are expected.
# Do separate update operations for each test case.
for test_case in greater_scheme:
try:
base = j(wc_dir, test_case.name)
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = base
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = base
run_and_verify_update(base, x_out, x_disk, None,
error_re_string = test_case.error_re_string)
if x_status:
run_and_verify_unquiet_status(base, x_status)
x_info = test_case.expected_info or {}
for path in x_info:
run_and_verify_info([x_info[path]], j(base, path))
except:
print("ERROR IN: Tests scheme for update: "
+ "while verifying in '%s'" % test_case.name)
raise
# 8) Verify that commit fails.
for test_case in greater_scheme:
try:
base = j(wc_dir, test_case.name)
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = base
run_and_verify_commit(base, None, x_status,
test_case.commit_block_string,
base)
except:
print("ERROR IN: Tests scheme for update: "
+ "while checking commit-blocking in '%s'" % test_case.name)
raise
def deep_trees_skipping_on_update(sbox, test_case, skip_paths,
chdir_skip_paths):
"""
Create tree conflicts, then update again, expecting the existing tree
conflicts to be skipped.
SKIP_PATHS is a list of paths, relative to the "base dir", for which
"update" on the "base dir" should report as skipped.
CHDIR_SKIP_PATHS is a list of (target-path, skipped-path) pairs for which
an update of "target-path" (relative to the "base dir") should result in
"skipped-path" (relative to "target-path") being reported as skipped.
"""
"""FURTHER_ACTION is a function that will make a further modification to
each target, this being the modification that we expect to be skipped. The
function takes the "base dir" (the WC path to the test case directory) as
its only argument."""
further_action = deep_trees_tree_del_repos
j = os.path.join
wc_dir = sbox.wc_dir
base = j(wc_dir, test_case.name)
# Initialize: generate conflicts. (We do not check anything here.)
setup_case = DeepTreesTestCase(test_case.name,
test_case.local_action,
test_case.incoming_action,
None,
None,
None)
deep_trees_run_tests_scheme_for_update(sbox, [setup_case])
# Make a further change to each target in the repository so there is a new
# revision to update to. (This is r4.)
further_action(sbox.repo_url + '/' + test_case.name)
# Update whole working copy, expecting the nodes still in conflict to be
# skipped.
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = base
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status = x_status.copy()
x_status.wc_dir = base
# Account for nodes that were updated by further_action
x_status.tweak('', 'D', 'F', 'DD', 'DF', 'DDD', 'DDF', wc_rev=4)
run_and_verify_update(base, x_out, x_disk, None,
error_re_string = test_case.error_re_string)
run_and_verify_unquiet_status(base, x_status)
# Try to update each in-conflict subtree. Expect a 'Skipped' output for
# each, and the WC status to be unchanged.
for path in skip_paths:
run_and_verify_update(j(base, path),
wc.State(base, {path : Item(verb='Skipped')}),
None, None)
run_and_verify_unquiet_status(base, x_status)
# Try to update each in-conflict subtree. Expect a 'Skipped' output for
# each, and the WC status to be unchanged.
# This time, cd to the subdir before updating it.
was_cwd = os.getcwd()
for path, skipped in chdir_skip_paths:
if isinstance(skipped, list):
expected_skip = {}
for p in skipped:
expected_skip[p] = Item(verb='Skipped')
else:
expected_skip = {skipped : Item(verb='Skipped')}
p = j(base, path)
run_and_verify_update(p,
wc.State(p, expected_skip),
None, None)
os.chdir(was_cwd)
run_and_verify_unquiet_status(base, x_status)
# Verify that commit still fails.
for path, skipped in chdir_skip_paths:
run_and_verify_commit(j(base, path), None, None,
test_case.commit_block_string,
base)
run_and_verify_unquiet_status(base, x_status)
def deep_trees_run_tests_scheme_for_switch(sbox, greater_scheme):
"""
Runs a given list of tests for conflicts occuring at a switch operation.
This function wants to save time and perform a number of different
test cases using just a single repository and performing just one commit
for all test cases instead of one for each test case.
1) Each test case is initialized in a separate subdir. Each subdir
again contains two subdirs: one "local" and one "incoming" for
the switch operation. These contain a set of deep_trees each.
2) A commit is performed across all test cases and depths.
(our initial state, -r2)
3) In each test case subdir's incoming subdir, the
incoming actions are performed.
4) A commit is performed across all test cases and depths. (-r3)
5) In each test case subdir's local subdir, the local actions are
performed. They remain uncommitted in the working copy.
6) In each test case subdir's local dir, a switch is performed to its
corresponding incoming dir.
This causes conflicts between the "local" state in the working
copy and the "incoming" state from the incoming subdir (still -r3).
7) A commit is performed in each separate container, to verify
that each tree-conflict indeed blocks a commit.
The sbox parameter is just the sbox passed to a test function. No need
to call sbox.build(), since it is called (once) within this function.
The "table" greater_scheme models all of the different test cases
that should be run using a single repository.
greater_scheme is a list of DeepTreesTestCase items, which define complete
test setups, so that they can be performed as described above.
"""
j = os.path.join
if not sbox.is_built():
sbox.build()
wc_dir = sbox.wc_dir
# 1) Create directories.
for test_case in greater_scheme:
try:
base = j(sbox.wc_dir, test_case.name)
os.makedirs(base)
make_deep_trees(j(base, "local"))
make_deep_trees(j(base, "incoming"))
main.run_svn(None, 'add', base)
except:
print("ERROR IN: Tests scheme for switch: "
+ "while setting up deep trees in '%s'" % test_case.name)
raise
# 2) Commit initial state (-r2).
main.run_svn(None, 'commit', '-m', 'initial state', wc_dir)
# 3) Apply incoming changes
for test_case in greater_scheme:
try:
test_case.incoming_action(j(sbox.wc_dir, test_case.name, "incoming"))
except:
print("ERROR IN: Tests scheme for switch: "
+ "while performing incoming action in '%s'" % test_case.name)
raise
# 4) Commit all changes (-r3).
main.run_svn(None, 'commit', '-m', 'incoming changes', wc_dir)
# 5) Apply local changes in their according subdirs.
for test_case in greater_scheme:
try:
test_case.local_action(j(sbox.wc_dir, test_case.name, "local"))
except:
print("ERROR IN: Tests scheme for switch: "
+ "while performing local action in '%s'" % test_case.name)
raise
# 6) switch the local dir to the incoming url, conflicting with incoming
# changes. A lot of different things are expected.
# Do separate switch operations for each test case.
for test_case in greater_scheme:
try:
local = j(wc_dir, test_case.name, "local")
incoming = sbox.repo_url + "/" + test_case.name + "/incoming"
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = local
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
run_and_verify_switch(local, local, incoming, x_out, x_disk, None,
test_case.error_re_string, None, None, None,
None, False, '--ignore-ancestry')
run_and_verify_unquiet_status(local, x_status)
x_info = test_case.expected_info or {}
for path in x_info:
run_and_verify_info([x_info[path]], j(local, path))
except:
print("ERROR IN: Tests scheme for switch: "
+ "while verifying in '%s'" % test_case.name)
raise
# 7) Verify that commit fails.
for test_case in greater_scheme:
try:
local = j(wc_dir, test_case.name, 'local')
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
run_and_verify_commit(local, None, x_status,
test_case.commit_block_string,
local)
except:
print("ERROR IN: Tests scheme for switch: "
+ "while checking commit-blocking in '%s'" % test_case.name)
raise
def deep_trees_run_tests_scheme_for_merge(sbox, greater_scheme,
do_commit_local_changes,
do_commit_conflicts=True,
ignore_ancestry=False):
"""
Runs a given list of tests for conflicts occuring at a merge operation.
This function wants to save time and perform a number of different
test cases using just a single repository and performing just one commit
for all test cases instead of one for each test case.
1) Each test case is initialized in a separate subdir. Each subdir
initially contains another subdir, called "incoming", which
contains a set of deep_trees.
2) A commit is performed across all test cases and depths.
(a pre-initial state)
3) In each test case subdir, the "incoming" subdir is copied to "local",
via the `svn copy' command. Each test case's subdir now has two sub-
dirs: "local" and "incoming", initial states for the merge operation.
4) An update is performed across all test cases and depths, so that the
copies made in 3) are pulled into the wc.
5) In each test case's "incoming" subdir, the incoming action is
performed.
6) A commit is performed across all test cases and depths, to commit
the incoming changes.
If do_commit_local_changes is True, this becomes step 7 (swap steps).
7) In each test case's "local" subdir, the local_action is performed.
If do_commit_local_changes is True, this becomes step 6 (swap steps).
Then, in effect, the local changes are committed as well.
8) In each test case subdir, the "incoming" subdir is merged into the
"local" subdir. If ignore_ancestry is True, then the merge is done
with the --ignore-ancestry option, so mergeinfo is neither considered
nor recorded. This causes conflicts between the "local" state in the
working copy and the "incoming" state from the incoming subdir.
9) If do_commit_conflicts is True, then a commit is performed in each
separate container, to verify that each tree-conflict indeed blocks
a commit.
The sbox parameter is just the sbox passed to a test function. No need
to call sbox.build(), since it is called (once) within this function.
The "table" greater_scheme models all of the different test cases
that should be run using a single repository.
greater_scheme is a list of DeepTreesTestCase items, which define complete
test setups, so that they can be performed as described above.
"""
j = os.path.join
if not sbox.is_built():
sbox.build()
wc_dir = sbox.wc_dir
# 1) Create directories.
for test_case in greater_scheme:
try:
base = j(sbox.wc_dir, test_case.name)
os.makedirs(base)
make_deep_trees(j(base, "incoming"))
main.run_svn(None, 'add', base)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while setting up deep trees in '%s'" % test_case.name)
raise
# 2) Commit pre-initial state (-r2).
main.run_svn(None, 'commit', '-m', 'pre-initial state', wc_dir)
# 3) Copy "incoming" to "local".
for test_case in greater_scheme:
try:
base_url = sbox.repo_url + "/" + test_case.name
incoming_url = base_url + "/incoming"
local_url = base_url + "/local"
main.run_svn(None, 'cp', incoming_url, local_url, '-m',
'copy incoming to local')
except:
print("ERROR IN: Tests scheme for merge: "
+ "while copying deep trees in '%s'" % test_case.name)
raise
# 4) Update to load all of the "/local" subdirs into the working copies.
try:
main.run_svn(None, 'up', sbox.wc_dir)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while updating local subdirs")
raise
# 5) Perform incoming actions
for test_case in greater_scheme:
try:
test_case.incoming_action(j(sbox.wc_dir, test_case.name, "incoming"))
except:
print("ERROR IN: Tests scheme for merge: "
+ "while performing incoming action in '%s'" % test_case.name)
raise
# 6) or 7) Commit all incoming actions
if not do_commit_local_changes:
try:
main.run_svn(None, 'ci', '-m', 'Committing incoming actions',
sbox.wc_dir)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while committing incoming actions")
raise
# 7) or 6) Perform all local actions.
for test_case in greater_scheme:
try:
test_case.local_action(j(sbox.wc_dir, test_case.name, "local"))
except:
print("ERROR IN: Tests scheme for merge: "
+ "while performing local action in '%s'" % test_case.name)
raise
# 6) or 7) Commit all incoming actions
if do_commit_local_changes:
try:
main.run_svn(None, 'ci', '-m', 'Committing incoming and local actions',
sbox.wc_dir)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while committing incoming and local actions")
raise
# 8) Merge all "incoming" subdirs to their respective "local" subdirs.
# This creates conflicts between the local changes in the "local" wc
# subdirs and the incoming states committed in the "incoming" subdirs.
for test_case in greater_scheme:
try:
local = j(sbox.wc_dir, test_case.name, "local")
incoming = sbox.repo_url + "/" + test_case.name + "/incoming"
x_out = test_case.expected_output
if x_out != None:
x_out = x_out.copy()
x_out.wc_dir = local
x_disk = test_case.expected_disk
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
x_skip = test_case.expected_skip
if x_skip != None:
x_skip.copy()
x_skip.wc_dir = local
varargs = (local,)
if ignore_ancestry:
varargs = varargs + ('--ignore-ancestry',)
run_and_verify_merge(local, None, None, incoming, None,
x_out, None, None, x_disk, None, x_skip,
test_case.error_re_string,
None, None, None, None,
False, False, *varargs)
run_and_verify_unquiet_status(local, x_status)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while verifying in '%s'" % test_case.name)
raise
# 9) Verify that commit fails.
if do_commit_conflicts:
for test_case in greater_scheme:
try:
local = j(wc_dir, test_case.name, 'local')
x_status = test_case.expected_status
if x_status != None:
x_status.copy()
x_status.wc_dir = local
run_and_verify_commit(local, None, x_status,
test_case.commit_block_string,
local)
except:
print("ERROR IN: Tests scheme for merge: "
+ "while checking commit-blocking in '%s'" % test_case.name)
raise
| wbond/subversion | subversion/tests/cmdline/svntest/actions.py | Python | apache-2.0 | 113,603 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""TFRecord sources and sinks."""
from __future__ import absolute_import
import logging
import struct
from apache_beam import coders
from apache_beam.io import filebasedsource
from apache_beam.io import fileio
from apache_beam.io.iobase import Read
from apache_beam.io.iobase import Write
from apache_beam.transforms import PTransform
import crcmod
__all__ = ['ReadFromTFRecord', 'WriteToTFRecord']
def _default_crc32c_fn(value):
"""Calculates crc32c by either snappy or crcmod based on installation."""
if not _default_crc32c_fn.fn:
try:
import snappy # pylint: disable=import-error
_default_crc32c_fn.fn = snappy._crc32c # pylint: disable=protected-access
except ImportError:
logging.warning('Couldn\'t find python-snappy so the implementation of '
'_TFRecordUtil._masked_crc32c is not as fast as it could '
'be.')
_default_crc32c_fn.fn = crcmod.predefined.mkPredefinedCrcFun('crc-32c')
return _default_crc32c_fn.fn(value)
_default_crc32c_fn.fn = None
class _TFRecordUtil(object):
"""Provides basic TFRecord encoding/decoding with consistency checks.
For detailed TFRecord format description see:
https://www.tensorflow.org/versions/master/api_docs/python/python_io.html#tfrecords-format-details
Note that masks and length are represented in LittleEndian order.
"""
@classmethod
def _masked_crc32c(cls, value, crc32c_fn=_default_crc32c_fn):
"""Compute a masked crc32c checksum for a value.
Args:
value: A string for which we compute the crc.
crc32c_fn: A function that can compute a crc32c.
This is a performance hook that also helps with testing. Callers are
not expected to make use of it directly.
Returns:
Masked crc32c checksum.
"""
crc = crc32c_fn(value)
return (((crc >> 15) | (crc << 17)) + 0xa282ead8) & 0xffffffff
@staticmethod
def encoded_num_bytes(record):
"""Return the number of bytes consumed by a record in its encoded form."""
# 16 = 8 (Length) + 4 (crc of length) + 4 (crc of data)
return len(record) + 16
@classmethod
def write_record(cls, file_handle, value):
"""Encode a value as a TFRecord.
Args:
file_handle: The file to write to.
value: A string content of the record.
"""
encoded_length = struct.pack('<Q', len(value))
file_handle.write('{}{}{}{}'.format(
encoded_length,
struct.pack('<I', cls._masked_crc32c(encoded_length)), #
value,
struct.pack('<I', cls._masked_crc32c(value))))
@classmethod
def read_record(cls, file_handle):
"""Read a record from a TFRecords file.
Args:
file_handle: The file to read from.
Returns:
None if EOF is reached; the paylod of the record otherwise.
Raises:
ValueError: If file appears to not be a valid TFRecords file.
"""
buf_length_expected = 12
buf = file_handle.read(buf_length_expected)
if not buf:
return None # EOF Reached.
# Validate all length related payloads.
if len(buf) != buf_length_expected:
raise ValueError('Not a valid TFRecord. Fewer than %d bytes: %s' %
(buf_length_expected, buf.encode('hex')))
length, length_mask_expected = struct.unpack('<QI', buf)
length_mask_actual = cls._masked_crc32c(buf[:8])
if length_mask_actual != length_mask_expected:
raise ValueError('Not a valid TFRecord. Mismatch of length mask: %s' %
buf.encode('hex'))
# Validate all data related payloads.
buf_length_expected = length + 4
buf = file_handle.read(buf_length_expected)
if len(buf) != buf_length_expected:
raise ValueError('Not a valid TFRecord. Fewer than %d bytes: %s' %
(buf_length_expected, buf.encode('hex')))
data, data_mask_expected = struct.unpack('<%dsI' % length, buf)
data_mask_actual = cls._masked_crc32c(data)
if data_mask_actual != data_mask_expected:
raise ValueError('Not a valid TFRecord. Mismatch of data mask: %s' %
buf.encode('hex'))
# All validation checks passed.
return data
class _TFRecordSource(filebasedsource.FileBasedSource):
"""A File source for reading files of TFRecords.
For detailed TFRecords format description see:
https://www.tensorflow.org/versions/master/api_docs/python/python_io.html#tfrecords-format-details
"""
def __init__(self,
file_pattern,
coder,
compression_type,
validate):
"""Initialize a TFRecordSource. See ReadFromTFRecord for details."""
super(_TFRecordSource, self).__init__(
file_pattern=file_pattern,
compression_type=compression_type,
splittable=False,
validate=validate)
self._coder = coder
def read_records(self, file_name, offset_range_tracker):
if offset_range_tracker.start_position():
raise ValueError('Start position not 0:%s' %
offset_range_tracker.start_position())
current_offset = offset_range_tracker.start_position()
with self.open_file(file_name) as file_handle:
while True:
if not offset_range_tracker.try_claim(current_offset):
raise RuntimeError('Unable to claim position: %s' % current_offset)
record = _TFRecordUtil.read_record(file_handle)
if record is None:
return # Reached EOF
else:
current_offset += _TFRecordUtil.encoded_num_bytes(record)
yield self._coder.decode(record)
class ReadFromTFRecord(PTransform):
"""Transform for reading TFRecord sources."""
def __init__(self,
file_pattern,
coder=coders.BytesCoder(),
compression_type=fileio.CompressionTypes.AUTO,
validate=True,
**kwargs):
"""Initialize a ReadFromTFRecord transform.
Args:
file_pattern: A file glob pattern to read TFRecords from.
coder: Coder used to decode each record.
compression_type: Used to handle compressed input files. Default value
is CompressionTypes.AUTO, in which case the file_path's extension will
be used to detect the compression.
validate: Boolean flag to verify that the files exist during the pipeline
creation time.
**kwargs: optional args dictionary. These are passed through to parent
constructor.
Returns:
A ReadFromTFRecord transform object.
"""
super(ReadFromTFRecord, self).__init__(**kwargs)
self._args = (file_pattern, coder, compression_type, validate)
def expand(self, pvalue):
return pvalue.pipeline | Read(_TFRecordSource(*self._args))
class _TFRecordSink(fileio.FileSink):
"""Sink for writing TFRecords files.
For detailed TFRecord format description see:
https://www.tensorflow.org/versions/master/api_docs/python/python_io.html#tfrecords-format-details
"""
def __init__(self, file_path_prefix, coder, file_name_suffix, num_shards,
shard_name_template, compression_type):
"""Initialize a TFRecordSink. See WriteToTFRecord for details."""
super(_TFRecordSink, self).__init__(
file_path_prefix=file_path_prefix,
coder=coder,
file_name_suffix=file_name_suffix,
num_shards=num_shards,
shard_name_template=shard_name_template,
mime_type='application/octet-stream',
compression_type=compression_type)
def write_encoded_record(self, file_handle, value):
_TFRecordUtil.write_record(file_handle, value)
class WriteToTFRecord(PTransform):
"""Transform for writing to TFRecord sinks."""
def __init__(self,
file_path_prefix,
coder=coders.BytesCoder(),
file_name_suffix='',
num_shards=0,
shard_name_template=fileio.DEFAULT_SHARD_NAME_TEMPLATE,
compression_type=fileio.CompressionTypes.AUTO,
**kwargs):
"""Initialize WriteToTFRecord transform.
Args:
file_path_prefix: The file path to write to. The files written will begin
with this prefix, followed by a shard identifier (see num_shards), and
end in a common extension, if given by file_name_suffix.
coder: Coder used to encode each record.
file_name_suffix: Suffix for the files written.
num_shards: The number of files (shards) used for output. If not set, the
default value will be used.
shard_name_template: A template string containing placeholders for
the shard number and shard count. Currently only '' and
'-SSSSS-of-NNNNN' are patterns allowed.
When constructing a filename for a particular shard number, the
upper-case letters 'S' and 'N' are replaced with the 0-padded shard
number and shard count respectively. This argument can be '' in which
case it behaves as if num_shards was set to 1 and only one file will be
generated. The default pattern is '-SSSSS-of-NNNNN'.
compression_type: Used to handle compressed output files. Typical value
is CompressionTypes.AUTO, in which case the file_path's extension will
be used to detect the compression.
**kwargs: Optional args dictionary. These are passed through to parent
constructor.
Returns:
A WriteToTFRecord transform object.
"""
super(WriteToTFRecord, self).__init__(**kwargs)
self._args = (file_path_prefix, coder, file_name_suffix, num_shards,
shard_name_template, compression_type)
def expand(self, pcoll):
return pcoll | Write(_TFRecordSink(*self._args))
| chamikaramj/incubator-beam | sdks/python/apache_beam/io/tfrecordio.py | Python | apache-2.0 | 10,445 |
package daemon
import (
"fmt"
"sync"
"time"
"github.com/docker/docker/pkg/units"
)
type State struct {
sync.Mutex
Running bool
Paused bool
Restarting bool
Pid int
ExitCode int
StartedAt time.Time
FinishedAt time.Time
waitChan chan struct{}
}
func NewState() *State {
return &State{
waitChan: make(chan struct{}),
}
}
// String returns a human-readable description of the state
func (s *State) String() string {
if s.Running {
if s.Paused {
return fmt.Sprintf("Up %s (Paused)", units.HumanDuration(time.Now().UTC().Sub(s.StartedAt)))
}
if s.Restarting {
return fmt.Sprintf("Restarting (%d) %s ago", s.ExitCode, units.HumanDuration(time.Now().UTC().Sub(s.FinishedAt)))
}
return fmt.Sprintf("Up %s", units.HumanDuration(time.Now().UTC().Sub(s.StartedAt)))
}
if s.FinishedAt.IsZero() {
return ""
}
return fmt.Sprintf("Exited (%d) %s ago", s.ExitCode, units.HumanDuration(time.Now().UTC().Sub(s.FinishedAt)))
}
func wait(waitChan <-chan struct{}, timeout time.Duration) error {
if timeout < 0 {
<-waitChan
return nil
}
select {
case <-time.After(timeout):
return fmt.Errorf("Timed out: %v", timeout)
case <-waitChan:
return nil
}
}
// WaitRunning waits until state is running. If state already running it returns
// immediatly. If you want wait forever you must supply negative timeout.
// Returns pid, that was passed to SetRunning
func (s *State) WaitRunning(timeout time.Duration) (int, error) {
s.Lock()
if s.Running {
pid := s.Pid
s.Unlock()
return pid, nil
}
waitChan := s.waitChan
s.Unlock()
if err := wait(waitChan, timeout); err != nil {
return -1, err
}
return s.GetPid(), nil
}
// WaitStop waits until state is stopped. If state already stopped it returns
// immediatly. If you want wait forever you must supply negative timeout.
// Returns exit code, that was passed to SetStopped
func (s *State) WaitStop(timeout time.Duration) (int, error) {
s.Lock()
if !s.Running {
exitCode := s.ExitCode
s.Unlock()
return exitCode, nil
}
waitChan := s.waitChan
s.Unlock()
if err := wait(waitChan, timeout); err != nil {
return -1, err
}
return s.GetExitCode(), nil
}
func (s *State) IsRunning() bool {
s.Lock()
res := s.Running
s.Unlock()
return res
}
func (s *State) GetPid() int {
s.Lock()
res := s.Pid
s.Unlock()
return res
}
func (s *State) GetExitCode() int {
s.Lock()
res := s.ExitCode
s.Unlock()
return res
}
func (s *State) SetRunning(pid int) {
s.Lock()
s.setRunning(pid)
s.Unlock()
}
func (s *State) setRunning(pid int) {
s.Running = true
s.Paused = false
s.Restarting = false
s.ExitCode = 0
s.Pid = pid
s.StartedAt = time.Now().UTC()
close(s.waitChan) // fire waiters for start
s.waitChan = make(chan struct{})
}
func (s *State) SetStopped(exitCode int) {
s.Lock()
s.setStopped(exitCode)
s.Unlock()
}
func (s *State) setStopped(exitCode int) {
s.Running = false
s.Restarting = false
s.Pid = 0
s.FinishedAt = time.Now().UTC()
s.ExitCode = exitCode
close(s.waitChan) // fire waiters for stop
s.waitChan = make(chan struct{})
}
// SetRestarting is when docker hanldes the auto restart of containers when they are
// in the middle of a stop and being restarted again
func (s *State) SetRestarting(exitCode int) {
s.Lock()
// we should consider the container running when it is restarting because of
// all the checks in docker around rm/stop/etc
s.Running = true
s.Restarting = true
s.Pid = 0
s.FinishedAt = time.Now().UTC()
s.ExitCode = exitCode
close(s.waitChan) // fire waiters for stop
s.waitChan = make(chan struct{})
s.Unlock()
}
func (s *State) IsRestarting() bool {
s.Lock()
res := s.Restarting
s.Unlock()
return res
}
func (s *State) SetPaused() {
s.Lock()
s.Paused = true
s.Unlock()
}
func (s *State) SetUnpaused() {
s.Lock()
s.Paused = false
s.Unlock()
}
func (s *State) IsPaused() bool {
s.Lock()
res := s.Paused
s.Unlock()
return res
}
| mwhudson/docker | daemon/state.go | GO | apache-2.0 | 3,937 |
/*
* Copyright 2012-2016 bambooCORE, greenstep of copyright Chen Xin Nien
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* -----------------------------------------------------------------------
*
* author: Chen Xin Nien
* contact: chen.xin.nien@gmail.com
*
*/
package com.netsteadfast.greenstep.qcharts.action.utils;
import org.apache.commons.lang3.StringUtils;
import com.netsteadfast.greenstep.base.Constants;
import com.netsteadfast.greenstep.base.exception.ControllerException;
import com.netsteadfast.greenstep.base.model.IActionFieldsCheckUtils;
public class SelectItemFieldCheckUtils implements IActionFieldsCheckUtils {
@Override
public boolean check(String value) throws ControllerException {
if (StringUtils.isBlank(value) || Constants.HTML_SELECT_NO_SELECT_ID.equals(value)) {
return false;
}
return true;
}
}
| quangnguyen9x/bamboobsc_quangnv | qcharts-web/src/com/netsteadfast/greenstep/qcharts/action/utils/SelectItemFieldCheckUtils.java | Java | apache-2.0 | 1,369 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package org.mskcc.shenkers.control.track;
/**
*
* @author sol
*/
public enum FileType {
BAM,
WIG,
GTF,
BED,
FASTA
}
| shenkers/CrossBrowse | src/main/java/org/mskcc/shenkers/control/track/FileType.java | Java | apache-2.0 | 326 |
<?php
require_once("../config/connect.php");
?>
| Elimean/DMXT | admin/manager_admin_classManager_Updata.php | PHP | apache-2.0 | 50 |
import React from 'react';
import { Box, WorldMap } from 'grommet';
export const SelectPlace = () => {
const [places, setPlaces] = React.useState();
const onSelectPlace = (place) => {
console.log('Selected', place);
setPlaces([{ color: 'graph-1', location: place }]);
};
return (
<Box align="center" pad="large">
<WorldMap onSelectPlace={onSelectPlace} places={places} />
</Box>
);
};
SelectPlace.storyName = 'Select place';
SelectPlace.parameters = {
chromatic: { disable: true },
};
export default {
title: 'Visualizations/WorldMap/Select place',
};
| HewlettPackard/grommet | src/js/components/WorldMap/stories/SelectPlace.js | JavaScript | apache-2.0 | 595 |
/**
* Licensed to Jasig under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Jasig licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.openregistry.core.domain;
import java.io.Serializable;
/**
* @author Scott Battaglia
* @version $Revision$ $Date$
* @since 1.0.0
*/
public interface OrganizationalUnit extends Serializable {
Long getId();
Type getOrganizationalUnitType();
String getLocalCode();
String getName();
Campus getCampus();
OrganizationalUnit getParentOrganizationalUnit();
String getRBHS();
void setRBHS(String RBHS);
String getPHI();
void setPHI(String PHI);
}
| sheliu/openregistry | openregistry-api/src/main/java/org/openregistry/core/domain/OrganizationalUnit.java | Java | apache-2.0 | 1,279 |
package com.github.czyzby.lml.parser.impl.attribute.list;
import com.badlogic.gdx.scenes.scene2d.ui.List;
import com.github.czyzby.lml.parser.LmlParser;
import com.github.czyzby.lml.parser.tag.LmlAttribute;
import com.github.czyzby.lml.parser.tag.LmlTag;
/** See {@link com.badlogic.gdx.scenes.scene2d.utils.Selection#setRequired(boolean)}. Mapped to "required".
*
* @author MJ */
public class RequiredLmlAttribute implements LmlAttribute<List<?>> {
@Override
@SuppressWarnings("unchecked")
public Class<List<?>> getHandledType() {
// Double cast as there were a problem with generics - SomeClass.class cannot be returned as
// <Class<SomeClass<?>>, even though casting never throws ClassCastException in the end.
return (Class<List<?>>) (Object) List.class;
}
@Override
public void process(final LmlParser parser, final LmlTag tag, final List<?> actor, final String rawAttributeData) {
actor.getSelection().setRequired(parser.parseBoolean(rawAttributeData, actor));
}
}
| tommyettinger/SquidSetup | src/main/java/com/github/czyzby/lml/parser/impl/attribute/list/RequiredLmlAttribute.java | Java | apache-2.0 | 1,036 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.portal.acting;
import java.util.Map;
import org.apache.avalon.framework.configuration.ConfigurationException;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.cocoon.environment.ObjectModelHelper;
import org.apache.cocoon.environment.Redirector;
import org.apache.cocoon.environment.SourceResolver;
import org.apache.cocoon.portal.event.Event;
import org.apache.cocoon.portal.event.EventManager;
import org.apache.cocoon.portal.event.coplet.CopletJXPathEvent;
import org.apache.cocoon.portal.sitemap.Constants;
/**
* Using this action, you can set values in a coplet.
*
* @version $Id$
*/
public class CopletSetDataAction
extends AbstractPortalAction {
/**
* @see org.apache.cocoon.acting.Action#act(org.apache.cocoon.environment.Redirector, org.apache.cocoon.environment.SourceResolver, java.util.Map, java.lang.String, org.apache.avalon.framework.parameters.Parameters)
*/
public Map act(Redirector redirector, SourceResolver resolver, Map objectModel, String source, Parameters parameters)
throws Exception {
// determine coplet id
String copletId = null;
Map context = (Map)objectModel.get(ObjectModelHelper.PARENT_CONTEXT);
if (context != null) {
copletId = (String)context.get(Constants.COPLET_ID_KEY);
} else {
copletId = (String)objectModel.get(Constants.COPLET_ID_KEY);
}
if (copletId == null) {
throw new ConfigurationException("copletId must be passed in the object model either directly (e.g. by using ObjectModelAction) or within the parent context.");
}
// now traverse parameters:
// parameter name is path
// parameter value is value
// if the value is null or empty, the value is not set!
final String[] names = parameters.getNames();
if ( names != null ) {
final EventManager publisher = this.portalService.getEventManager();
for(int i=0; i<names.length; i++) {
final String path = names[i];
final String value = parameters.getParameter(path, null );
if ( value != null && value.trim().length() > 0 ) {
final Event event = new CopletJXPathEvent(this.portalService.getProfileManager().getCopletInstance(copletId),
path,
value);
publisher.send(event);
}
}
}
return EMPTY_MAP;
}
}
| apache/cocoon | blocks/cocoon-portal/cocoon-portal-sitemap/src/main/java/org/apache/cocoon/portal/acting/CopletSetDataAction.java | Java | apache-2.0 | 3,337 |
<?php
include('dao.php');
global $dbh;
$dbh->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);
$data = '';
$id = $_POST['id'];
$status = $_POST['status'];
if($status == "Failed" || $status == "Rejected"){
$failureReason = $_POST['failureReason'];
$sql = $dbh->prepare("UPDATE tbl_totalsentmessages SET sent_status = :status, failure_reason=:reason WHERE sent_messageid = :id");
$sql->bindParam(":status", $status );
$sql->bindParam(":reason", $failureReason);
$sql->bindParam(":id", $id );
try {
$data = $sql->execute();
} catch(PDOException $e) {
echo $e->getMessage();
}
if ($data) {
echo '1';
}
else {
echo '0';
}
}
else {
$sql = $dbh->prepare("UPDATE tbl_totalsentmessages SET sent_status = :status WHERE sent_messageid = :id");
$sql->bindParam(":status", $status );
$sql->bindParam(":id", $id );
try {
$data = $sql->execute();
} catch(PDOException $e) {
echo $e->getMessage();
}
if ($data) {
echo '1';
}
else {
echo '0';
}
}
?> | chebryan/Qsoft-Admin | core/callback_url.php | PHP | apache-2.0 | 1,383 |
/*!
* ${copyright}
*/
// Provides the Design Time Metadata for the sap.m.Slider control
sap.ui.define([],
function () {
"use strict";
return {
name: {
singular: "SLIDER_NAME",
plural: "SLIDER_NAME_PLURAL"
},
palette: {
group: "INPUT",
icons: {
svg: "sap/m/designtime/Slider.icon.svg"
}
},
actions: {
remove: {
changeType: "hideControl"
},
reveal: {
changeType: "unhideControl"
}
},
aggregations: {
scale: {
domRef: ":sap-domref > .sapMSliderTickmarks"
},
customTooltips: {
ignore: true
}
},
templates: {
create: "sap/m/designtime/Slider.create.fragment.xml"
}
};
}, /* bExport= */ true); | SAP/openui5 | src/sap.m/src/sap/m/designtime/Slider.designtime.js | JavaScript | apache-2.0 | 709 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using Microsoft.Scripting;
using Microsoft.Scripting.Runtime;
using Microsoft.Scripting.Utils;
using IronPython.Runtime;
using IronPython.Runtime.Operations;
using IronPython.Runtime.Types;
using System.Numerics;
[assembly: PythonModule("math", typeof(IronPython.Modules.PythonMath))]
namespace IronPython.Modules {
public static partial class PythonMath {
public const string __doc__ = "Provides common mathematical functions.";
public const double pi = Math.PI;
public const double e = Math.E;
private const double degreesToRadians = Math.PI / 180.0;
private const int Bias = 0x3FE;
public static double degrees(double radians) {
return Check(radians, radians / degreesToRadians);
}
public static double radians(double degrees) {
return Check(degrees, degrees * degreesToRadians);
}
public static double fmod(double v, double w) {
return Check(v, w, v % w);
}
private static double sum(List<double> partials) {
// sum the partials the same was as CPython does
var n = partials.Count;
var hi = 0.0;
if (n == 0) return hi;
var lo = 0.0;
// sum exact
while (n > 0) {
var x = hi;
var y = partials[--n];
hi = x + y;
lo = y - (hi - x);
if (lo != 0.0)
break;
}
if (n == 0) return hi;
// half-even rounding
if (lo < 0.0 && partials[n - 1] < 0.0 || lo > 0.0 && partials[n - 1] > 0.0) {
var y = lo * 2.0;
var x = hi + y;
var yr = x - hi;
if (y == yr)
hi = x;
}
return hi;
}
public static double fsum(IEnumerable e) {
// msum from https://code.activestate.com/recipes/393090/
var partials = new List<double>();
foreach (var v in e.Cast<object>().Select(o => Converter.ConvertToDouble(o))) {
var x = v;
var i = 0;
for (var j = 0; j < partials.Count; j++) {
var y = partials[j];
if (Math.Abs(x) < Math.Abs(y)) {
var t = x;
x = y;
y = t;
}
var hi = x + y;
var lo = y - (hi - x);
if (lo != 0) {
partials[i++] = lo;
}
x = hi;
}
partials.RemoveRange(i, partials.Count - i);
partials.Add(x);
}
return sum(partials);
}
public static PythonTuple frexp(double v) {
if (Double.IsInfinity(v) || Double.IsNaN(v)) {
return PythonTuple.MakeTuple(v, 0.0);
}
int exponent = 0;
double mantissa = 0;
if (v == 0) {
mantissa = 0;
exponent = 0;
} else {
byte[] vb = BitConverter.GetBytes(v);
if (BitConverter.IsLittleEndian) {
DecomposeLe(vb, out mantissa, out exponent);
} else {
throw new NotImplementedException();
}
}
return PythonTuple.MakeTuple(mantissa, exponent);
}
public static PythonTuple modf(double v) {
if (double.IsInfinity(v)) {
return PythonTuple.MakeTuple(0.0, v);
}
double w = v % 1.0;
v -= w;
return PythonTuple.MakeTuple(w, v);
}
public static double ldexp(double v, BigInteger w) {
if (v == 0.0 || double.IsInfinity(v)) {
return v;
}
return Check(v, v * Math.Pow(2.0, (double)w));
}
public static double hypot(double v, double w) {
if (double.IsInfinity(v) || double.IsInfinity(w)) {
return double.PositiveInfinity;
}
return Check(v, w, MathUtils.Hypot(v, w));
}
public static double pow(double v, double exp) {
if (v == 1.0 || exp == 0.0) {
return 1.0;
} else if (double.IsNaN(v) || double.IsNaN(exp)) {
return double.NaN;
} else if (v == 0.0) {
if (exp > 0.0) {
return 0.0;
}
throw PythonOps.ValueError("math domain error");
} else if (double.IsPositiveInfinity(exp)) {
if (v > 1.0 || v < -1.0) {
return double.PositiveInfinity;
} else if (v == -1.0) {
return 1.0;
} else {
return 0.0;
}
} else if (double.IsNegativeInfinity(exp)) {
if (v > 1.0 || v < -1.0) {
return 0.0;
} else if (v == -1.0) {
return 1.0;
} else {
return double.PositiveInfinity;
}
}
return Check(v, exp, Math.Pow(v, exp));
}
public static double log(double v0) {
if (v0 <= 0.0) {
throw PythonOps.ValueError("math domain error");
}
return Check(v0, Math.Log(v0));
}
public static double log(double v0, double v1) {
if (v0 <= 0.0 || v1 == 0.0) {
throw PythonOps.ValueError("math domain error");
} else if (v1 == 1.0) {
throw PythonOps.ZeroDivisionError("float division");
} else if (v1 == Double.PositiveInfinity) {
return 0.0;
}
return Check(Math.Log(v0, v1));
}
public static double log(BigInteger value) {
if (value.Sign <= 0) {
throw PythonOps.ValueError("math domain error");
}
return value.Log();
}
public static double log(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return log(val);
} else {
return log(Converter.ConvertToBigInteger(value));
}
}
public static double log(BigInteger value, double newBase) {
if (newBase <= 0.0 || value <= 0) {
throw PythonOps.ValueError("math domain error");
} else if (newBase == 1.0) {
throw PythonOps.ZeroDivisionError("float division");
} else if (newBase == Double.PositiveInfinity) {
return 0.0;
}
return Check(value.Log(newBase));
}
public static double log(object value, double newBase) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return log(val, newBase);
} else {
return log(Converter.ConvertToBigInteger(value), newBase);
}
}
public static double log10(double v0) {
if (v0 <= 0.0) {
throw PythonOps.ValueError("math domain error");
}
return Check(v0, Math.Log10(v0));
}
public static double log10(BigInteger value) {
if (value.Sign <= 0) {
throw PythonOps.ValueError("math domain error");
}
return value.Log10();
}
public static double log10(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return log10(val);
} else {
return log10(Converter.ConvertToBigInteger(value));
}
}
public static double log1p(double v0) {
// Calculate log(1.0 + v0) using William Kahan's algorithm for numerical precision
if (double.IsPositiveInfinity(v0)) {
return double.PositiveInfinity;
}
double v1 = v0 + 1.0;
// Linear approximation for very small v0
if (v1 == 1.0) {
return v0;
}
// Apply correction factor
return log(v1) * v0 / (v1 - 1.0);
}
public static double log1p(BigInteger value) {
return log(value + BigInteger.One);
}
public static double log1p(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return log1p(val);
} else {
return log1p(Converter.ConvertToBigInteger(value));
}
}
public static double expm1(double v0) {
return Check(v0, Math.Tanh(v0 / 2.0) * (Math.Exp(v0) + 1.0));
}
public static double asinh(double v0) {
if (v0 == 0.0 || double.IsInfinity(v0)) {
return v0;
}
// rewrote ln(v0 + sqrt(v0**2 + 1)) for precision
if (Math.Abs(v0) > 1.0) {
return Math.Sign(v0)*(Math.Log(Math.Abs(v0)) + Math.Log(1.0 + MathUtils.Hypot(1.0, 1.0 / v0)));
} else {
return Math.Log(v0 + MathUtils.Hypot(1.0, v0));
}
}
public static double asinh(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return asinh(val);
} else {
return asinh(Converter.ConvertToBigInteger(value));
}
}
public static double acosh(double v0) {
if (v0 < 1.0) {
throw PythonOps.ValueError("math domain error");
} else if (double.IsPositiveInfinity(v0)) {
return double.PositiveInfinity;
}
// rewrote ln(v0 + sqrt(v0**2 - 1)) for precision
double c = Math.Sqrt(v0 + 1.0);
return Math.Log(c) + Math.Log(v0 / c + Math.Sqrt(v0 - 1.0));
}
public static double acosh(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return acosh(val);
} else {
return acosh(Converter.ConvertToBigInteger(value));
}
}
public static double atanh(double v0) {
if (v0 >= 1.0 || v0 <= -1.0) {
throw PythonOps.ValueError("math domain error");
} else if (v0 == 0.0) {
// preserve +/-0.0
return v0;
}
return Math.Log((1.0 + v0) / (1.0 - v0)) * 0.5;
}
public static double atanh(BigInteger value) {
if (value == 0) {
return 0;
} else {
throw PythonOps.ValueError("math domain error");
}
}
public static double atanh(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return atanh(val);
} else {
return atanh(Converter.ConvertToBigInteger(value));
}
}
public static double atan2(double v0, double v1) {
if (double.IsNaN(v0) || double.IsNaN(v1)) {
return double.NaN;
} else if (double.IsInfinity(v0)) {
if (double.IsPositiveInfinity(v1)) {
return pi * 0.25 * Math.Sign(v0);
} else if (double.IsNegativeInfinity(v1)) {
return pi * 0.75 * Math.Sign(v0);
} else {
return pi * 0.5 * Math.Sign(v0);
}
} else if (double.IsInfinity(v1)) {
return v1 > 0.0 ? 0.0 : pi * DoubleOps.Sign(v0);
}
return Math.Atan2(v0, v1);
}
/// <summary>
/// Error function on real values
/// </summary>
public static double erf(double v0) {
return MathUtils.Erf(v0);
}
/// <summary>
/// Complementary error function on real values: erfc(x) = 1 - erf(x)
/// </summary>
public static double erfc(double v0) {
return MathUtils.ErfComplement(v0);
}
public static object factorial(double v0) {
if (v0 % 1.0 != 0.0) {
throw PythonOps.ValueError("factorial() only accepts integral values");
}
if (v0 < 0.0) {
throw PythonOps.ValueError("factorial() not defined for negative values");
}
BigInteger val = 1;
for (BigInteger mul = (BigInteger)v0; mul > BigInteger.One; mul -= BigInteger.One) {
val *= mul;
}
if (val > SysModule.maxint) {
return val;
}
return (int)val;
}
public static object factorial(BigInteger value) {
if (value < 0) {
throw PythonOps.ValueError("factorial() not defined for negative values");
}
BigInteger val = 1;
for (BigInteger mul = value; mul > BigInteger.One; mul -= BigInteger.One) {
val *= mul;
}
if (val > SysModule.maxint) {
return val;
}
return (int)val;
}
public static object factorial(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return factorial(val);
} else {
return factorial(Converter.ConvertToBigInteger(value));
}
}
/// <summary>
/// Gamma function on real values
/// </summary>
public static double gamma(double v0) {
return Check(v0, MathUtils.Gamma(v0));
}
/// <summary>
/// Natural log of absolute value of Gamma function
/// </summary>
public static double lgamma(double v0) {
return Check(v0, MathUtils.LogGamma(v0));
}
public static object trunc(CodeContext/*!*/ context, object value) {
object func;
if (PythonOps.TryGetBoundAttr(value, "__trunc__", out func)) {
return PythonOps.CallWithContext(context, func);
} else {
throw PythonOps.AttributeError("__trunc__");
}
}
public static bool isinf(double v0) {
return double.IsInfinity(v0);
}
public static bool isinf(BigInteger value) {
return false;
}
public static bool isinf(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return isinf(val);
}
return false;
}
public static bool isnan(double v0) {
return double.IsNaN(v0);
}
public static bool isnan(BigInteger value) {
return false;
}
public static bool isnan(object value) {
// CPython tries float first, then double, so we need
// an explicit overload which properly matches the order here
double val;
if (Converter.TryConvertToDouble(value, out val)) {
return isnan(val);
}
return false;
}
public static double copysign(double x, double y) {
return DoubleOps.CopySign(x, y);
}
public static double copysign(object x, object y) {
double val, sign;
if (!Converter.TryConvertToDouble(x, out val) ||
!Converter.TryConvertToDouble(y, out sign)) {
throw PythonOps.TypeError("TypeError: a float is required");
}
return DoubleOps.CopySign(val, sign);
}
#region Private Implementation Details
private static void SetExponentLe(byte[] v, int exp) {
exp += Bias;
ushort oldExp = LdExponentLe(v);
ushort newExp = (ushort)(oldExp & 0x800f | (exp << 4));
StExponentLe(v, newExp);
}
private static int IntExponentLe(byte[] v) {
ushort exp = LdExponentLe(v);
return ((int)((exp & 0x7FF0) >> 4) - Bias);
}
private static ushort LdExponentLe(byte[] v) {
return (ushort)(v[6] | ((ushort)v[7] << 8));
}
private static long LdMantissaLe(byte[] v) {
int i1 = (v[0] | (v[1] << 8) | (v[2] << 16) | (v[3] << 24));
int i2 = (v[4] | (v[5] << 8) | ((v[6] & 0xF) << 16));
return i1 | (i2 << 32);
}
private static void StExponentLe(byte[] v, ushort e) {
v[6] = (byte)e;
v[7] = (byte)(e >> 8);
}
private static bool IsDenormalizedLe(byte[] v) {
ushort exp = LdExponentLe(v);
long man = LdMantissaLe(v);
return ((exp & 0x7FF0) == 0 && (man != 0));
}
private static void DecomposeLe(byte[] v, out double m, out int e) {
if (IsDenormalizedLe(v)) {
m = BitConverter.ToDouble(v, 0);
m *= Math.Pow(2.0, 1022);
v = BitConverter.GetBytes(m);
e = IntExponentLe(v) - 1022;
} else {
e = IntExponentLe(v);
}
SetExponentLe(v, 0);
m = BitConverter.ToDouble(v, 0);
}
private static double Check(double v) {
return PythonOps.CheckMath(v);
}
private static double Check(double input, double output) {
return PythonOps.CheckMath(input, output);
}
private static double Check(double in0, double in1, double output) {
return PythonOps.CheckMath(in0, in1, output);
}
#endregion
}
}
| slozier/ironpython2 | Src/IronPython.Modules/math.cs | C# | apache-2.0 | 19,692 |
package org.jrivets.transaction;
/**
* The interface defines an action which can be executed in
* {@link SimpleTransaction} context.
*
* @author Dmitry Spasibenko
*
*/
public interface Action {
/**
* Executes the action itself. In case of fail should throw an exception
*
* @throws Throwable
* describes the fail cause
*/
void doAction() throws Throwable;
/**
* Rolls back the action executed by <tt>doAction()</tt>. It will be invoked
* ONLY if the action method <tt>doAction()</tt> for the object has been
* executed successfully (did not throw an exception). The cause of calling
* this method can be one of the following: an action after this one is
* failed or transaction was cancelled explicitly (
* <tt>SimpleTransaction.cancel()</tt> method is called).
*/
void rollbackAction();
}
| obattalov/jrivets-common | src/main/java/org/jrivets/transaction/Action.java | Java | apache-2.0 | 891 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.security.examples.pages;
import org.apache.wicket.IPageMap;
import org.apache.wicket.PageParameters;
import org.apache.wicket.markup.html.WebPage;
import org.apache.wicket.model.IModel;
/**
* Base page for all pages that do not require a login.
*
* @author marrink
*
*/
public class MyUnSecurePage extends WebPage
{
private static final long serialVersionUID = 1L;
/**
*
*/
public MyUnSecurePage()
{
}
/**
* @param model
*/
public MyUnSecurePage(IModel< ? > model)
{
super(model);
}
/**
* @param pageMap
*/
public MyUnSecurePage(IPageMap pageMap)
{
super(pageMap);
}
/**
* @param parameters
*/
public MyUnSecurePage(PageParameters parameters)
{
super(parameters);
}
/**
* @param pageMap
* @param model
*/
public MyUnSecurePage(IPageMap pageMap, IModel< ? > model)
{
super(pageMap, model);
}
}
| duesenklipper/wicket-security-1.4 | examples/all_in_one/src/main/java/org/apache/wicket/security/examples/pages/MyUnSecurePage.java | Java | apache-2.0 | 1,765 |
/*
* Copyright 2019, EnMasse authors.
* License: Apache License 2.0 (see the file LICENSE or http://apache.org/licenses/LICENSE-2.0.html).
*/
package io.enmasse.admin.model;
import io.fabric8.kubernetes.api.model.HasMetadata;
import java.util.List;
import java.util.Map;
public interface AddressSpacePlan extends HasMetadata {
Map<String, Double> getResourceLimits();
List<String> getAddressPlans();
String getShortDescription();
String getDisplayName();
int getDisplayOrder();
String getAddressSpaceType();
String getInfraConfigRef();
}
| EnMasseProject/enmasse | api-model/src/main/java/io/enmasse/admin/model/AddressSpacePlan.java | Java | apache-2.0 | 572 |
################################################################################
# Copyright (c) 2015-2019 Skymind, Inc.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License, Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# SPDX-License-Identifier: Apache-2.0
################################################################################
from .progressbar import ProgressBar
import requests
import math
import os
import hashlib
def download(url, file_name):
r = requests.get(url, stream=True)
file_size = int(r.headers['Content-length'])
'''
if py3:
file_size = int(u.getheader("Content-Length")[0])
else:
file_size = int(u.info().getheaders("Content-Length")[0])
'''
file_exists = False
if os.path.isfile(file_name):
local_file_size = os.path.getsize(file_name)
if local_file_size == file_size:
sha1_file = file_name + '.sha1'
if os.path.isfile(sha1_file):
print('sha1 found')
with open(sha1_file) as f:
expected_sha1 = f.read()
BLOCKSIZE = 65536
sha1 = hashlib.sha1()
with open(file_name) as f:
buff = f.read(BLOCKSIZE)
while len(buff) > 0:
sha1.update(buff)
buff = f.read(BLOCKSIZE)
if expected_sha1 == sha1:
file_exists = True
else:
print("File corrupt. Downloading again.")
os.remove(file_name)
else:
file_exists = True
else:
print("File corrupt. Downloading again.")
os.remove(file_name)
if not file_exists:
factor = int(math.floor(math.log(file_size) / math.log(1024)))
display_file_size = str(file_size / 1024 ** factor) + \
['B', 'KB', 'MB', 'GB', 'TB', 'PB'][factor]
print("Source: " + url)
print("Destination " + file_name)
print("Size: " + display_file_size)
file_size_dl = 0
block_sz = 8192
f = open(file_name, 'wb')
pbar = ProgressBar(file_size)
for chunk in r.iter_content(chunk_size=block_sz):
if not chunk:
continue
chunk_size = len(chunk)
file_size_dl += chunk_size
f.write(chunk)
pbar.update(chunk_size)
# status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size)
# status = status + chr(8)*(len(status)+1)
# print(status)
f.close()
else:
print("File already exists - " + file_name)
return True
| RobAltena/deeplearning4j | pydl4j/pydl4j/downloader.py | Python | apache-2.0 | 3,179 |
package org.carlspring.strongbox.xml.parsers;
import org.carlspring.strongbox.url.ClasspathURLStreamHandler;
import org.carlspring.strongbox.url.ClasspathURLStreamHandlerFactory;
import org.carlspring.strongbox.xml.CustomTagService;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import javax.xml.bind.Unmarshaller;
import java.io.*;
import java.net.URL;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.concurrent.locks.ReentrantLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author mtodorov
*/
public class GenericParser<T>
{
public final static boolean IS_OUTPUT_FORMATTED = true;
private static final Logger logger = LoggerFactory.getLogger(GenericParser.class);
private ReentrantLock lock = new ReentrantLock();
private Set<Class> classes = new LinkedHashSet<>();
private JAXBContext context;
static
{
final ClasspathURLStreamHandler handler = new ClasspathURLStreamHandler(ClassLoader.getSystemClassLoader());
ClasspathURLStreamHandlerFactory factory = new ClasspathURLStreamHandlerFactory("classpath", handler);
try
{
URL.setURLStreamHandlerFactory(factory);
}
catch (Error e)
{
// You can safely disregard this, as a second attempt to register a an already
// registered URLStreamHandlerFactory will throw an error. Since there's no
// apparent way to check if it's registered, just catch and ignore the error.
}
}
public GenericParser()
{
this.classes.addAll(CustomTagService.getInstance().getImplementations());
}
public GenericParser(boolean useServiceLoader)
{
if (useServiceLoader)
{
this.classes.addAll(CustomTagService.getInstance().getImplementations());
}
}
public GenericParser(boolean useServiceLoader, Class... classes)
{
Collections.addAll(this.classes, classes);
if (useServiceLoader)
{
this.classes.addAll(CustomTagService.getInstance().getImplementations());
}
}
public GenericParser(Class... classes)
{
Collections.addAll(this.classes, classes);
this.classes.addAll(CustomTagService.getInstance().getImplementations());
}
public T parse(File file)
throws JAXBException, IOException
{
T object = null;
try (FileInputStream is = new FileInputStream(file))
{
object = parse(is);
}
return object;
}
public T parse(URL url)
throws IOException, JAXBException
{
try (InputStream is = url.openStream())
{
return parse(is);
}
}
public T parse(InputStream is)
throws JAXBException
{
T object = null;
try
{
lock.lock();
Unmarshaller unmarshaller = getContext().createUnmarshaller();
//noinspection unchecked
object = (T) unmarshaller.unmarshal(is);
}
finally
{
lock.unlock();
}
return object;
}
public void store(T object,
String path)
throws JAXBException, IOException
{
store(object, new File(path).getAbsoluteFile());
}
public void store(T object,
File file)
throws JAXBException, IOException
{
try (FileOutputStream os = new FileOutputStream(file))
{
store(object, os);
}
}
public void store(T object,
OutputStream os)
throws JAXBException
{
try
{
lock.lock();
JAXBContext context = getContext();
Marshaller marshaller = context.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, IS_OUTPUT_FORMATTED);
marshaller.marshal(object, os);
}
finally
{
lock.unlock();
}
}
/**
* Serialize #object to String using JAXB marshaller.
*
* @param object the object to be serialized
* @return String representation of object
*/
public String serialize(T object)
throws JAXBException
{
StringWriter writer = new StringWriter();
try
{
lock.lock();
JAXBContext context = getContext();
Marshaller marshaller = context.createMarshaller();
marshaller.setProperty(Marshaller.JAXB_FORMATTED_OUTPUT, IS_OUTPUT_FORMATTED);
marshaller.marshal(object, writer);
return writer.getBuffer().toString();
}
finally
{
lock.unlock();
}
}
@SuppressWarnings("unchecked")
public T deserialize(String input)
throws JAXBException
{
try
{
lock.lock();
JAXBContext context = getContext();
Unmarshaller m = context.createUnmarshaller();
return (T) m.unmarshal(new StringReader(input));
}
finally
{
lock.unlock();
}
}
public void setContext(Class<?> classType)
throws JAXBException
{
context = JAXBContext.newInstance(classType);
}
public JAXBContext getContext()
throws JAXBException
{
if (context == null)
{
try
{
context = JAXBContext.newInstance(classes.toArray(new Class[classes.size()]));
}
catch (Exception e)
{
logger.error(e.getMessage(), e);
return null;
}
}
return context;
}
}
| AlexOreshkevich/strongbox | strongbox-configuration/src/main/java/org/carlspring/strongbox/xml/parsers/GenericParser.java | Java | apache-2.0 | 5,887 |
var zkUtil = require('../util');
var zkConstants = require('../constants');
var ZK = require('zookeeper').ZooKeeper;
/**
* @constructor
* encapsulate the lock algorithm. I didn't want it exposed in the client.
* @param {ZkClient} client client doing the locking.
* @param {String} node name of lock.
* @param {Function} callback gets executed when the lock is acquired or reaches an error. expects (error).
*/
function LockAlgorithm(client, node, callback) {
this.client = client;
this.node = node;
this.callback = callback;
}
/**
* given a sorted list of child paths, finds the one that precedes myPath.
* @param {Array} children list of children nodes.
* @param {String} myPath path to compare against.
* @return {String} valid child path (doesn't contain parent) or null if none exists.
*/
LockAlgorithm.prototype.pathBeforeMe = function(children, myPath) {
var i;
for (i = 0; i < children.length - 1; i++) {
if (children[i + 1] === myPath) {
return children[i];
}
}
return null;
};
/**
* checks for the presence of path. it doesn't exist, it gets created.
* @param {String} path node to ensure existence of.
* @param {Function} callback expects (error, pathName).
*/
LockAlgorithm.prototype.ensureNode = function(path, callback) {
var self = this;
this.client.createPaths(path, 'lock node', 0, function(err, pathCreated) {
if (err) {
callback(err);
return;
}
self.client.options.log.tracef('successful parent node creation: ${path}', {'path': pathCreated});
// assert path === pathCreated
callback(null, pathCreated);
});
};
/**
* creates an child node.
* @param {String} path ephemeral child node (specified by path).
* @param {String} txnId The transaction ID.
* @param {Function} callback expects (error, pathName).
*/
LockAlgorithm.prototype.createChild = function(path, txnId, callback) {
var self = this,
lockValue = JSON.stringify([txnId, Date.now()]);
self.client.create(path, lockValue, ZK.ZOO_SEQUENCE | ZK.ZOO_EPHEMERAL, function(err, pathCreated) {
if (err) {
self.client.options.log.error('node creation error', {err: err, pathCreated: pathCreated});
callback(err);
return;
}
// assert pathCreated === path.
callback(null, pathCreated);
});
};
/**
* gets children of a particular node. errors if there are no children.
* @param {String} path the parent of the children.
* @param {Function} callback expects (error, sorted list of children). the children are not full paths, but names only.
*/
LockAlgorithm.prototype.getSortedChildren = function(path, callback) {
// false because we don't want to watch.
this.client._getChildren(path, false, '', function(err, children) {
if (err) {
callback(err);
return;
}
if (children.length < 1) {
// there should *always* be children since this method always gets called after the lock node is created.
callback(new Error('Could not create lock node for ' + path), null);
return;
}
children.sort(function(a, b) {
// each child name is formatted like this: lock-00000000. so peel of chars before creating a number.
return parseInt(a.substr(zkConstants.LOCK_PREFIX.length), 10) -
parseInt(b.substr(zkConstants.LOCK_PREFIX.length), 10);
});
callback(null, children);
});
};
/**
* watches watchPath for deletion. parentPath is roughly equal to the name of the lock, lockPath is the child node
* name for the lock that is to be acquired (e.g. '/this_lock/-lock000000121').
* it is perfectly reasonable for this watch to execute without executing a callback (in the event we need to wait
* for watchPath to be deleted).
* @param {String} parentPath basically the name of the lock (which is the parent node).
* @param {String} lockPath child lock that is basically a place in line.
* @param {String} watchPath the child node that we are waiting on to go away. when that happens it is our turn (we
* have the lock).
* @param {Function} callback expects (error). only purposes is to catch and report problems.
*/
LockAlgorithm.prototype.watch = function(parentPath, lockPath, watchPath, callback) {
var self = this;
self.client.options.log.trace1('watching: ' + watchPath);
self.client._exists(watchPath, true, function(err, exists) {
self.client.options.log.trace('exists', {err: err, exists: exists});
if (err) {
callback(err);
return;
}
if (!exists) {
self.lockAlgorithm(parentPath, lockPath);
return;
}
// wait for it to be deleted, then execute the callback.
if (self.client.waitCallbacks[watchPath]) {
callback(new Error('Already waiting on ' + watchPath));
return;
}
// set a callback that gets invoked when watchPath is deleted.
self.client.waitCallbacks[watchPath] = function() {
self.client.options.log.trace('Invoked wait callback');
self.lockAlgorithm(parentPath, lockPath);
};
});
};
/**
* implements the lock algorithm.
* @param {String} parentPath a decorated form of the lock name.
* @param {String} lockPath a child of parentPath.
*/
LockAlgorithm.prototype.lockAlgorithm = function(parentPath, lockPath) {
var self = this, absolutePath;
self.getSortedChildren(parentPath, function(err, children) {
if (err) {
self.callback(err);
} else {
//log.trace1('PARENT:%s, LOCK:%s, CHILDREN: %j', parentPath, lockPath, children);
if (zkUtil.lte(zkUtil.last(lockPath), children[0])) {
// we've got the lock!!!!
self.client.options.log.tracef('lock acquired on ${parentPath} by ${lockPath}',
{parentPath: parentPath, lockPath: lockPath});
self.client.locks[self.node] = lockPath;
self.callback(null);
} else {
// watch the child path immediately preceeding lockPath. When it is deleted or no longer exists,
// this process owns the lock.
absolutePath = parentPath + '/' + self.pathBeforeMe(children, zkUtil.last(lockPath));
self.watch(parentPath, lockPath, absolutePath, function(err) {
if (err) {
self.callback(err);
} // else, a watch was set.
});
}
}
});
};
/** LockAlgorithm */
exports.LockAlgorithm = LockAlgorithm;
| racker/service-registry | node_modules/zookeeper-client/lib/algorithms/lock.js | JavaScript | apache-2.0 | 6,297 |
import {Request} from '../lib/request';
import {Response} from '../lib/response';
import {AWSError} from '../lib/error';
import {Service} from '../lib/service';
import {ServiceConfigurationOptions} from '../lib/service';
import {ConfigBase as Config} from '../lib/config';
interface Blob {}
declare class IoT1ClickProjects extends Service {
/**
* Constructs a service object. This object has one method for each API operation.
*/
constructor(options?: IoT1ClickProjects.Types.ClientConfiguration)
config: Config & IoT1ClickProjects.Types.ClientConfiguration;
/**
* Associates a physical device with a placement.
*/
associateDeviceWithPlacement(params: IoT1ClickProjects.Types.AssociateDeviceWithPlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.AssociateDeviceWithPlacementResponse) => void): Request<IoT1ClickProjects.Types.AssociateDeviceWithPlacementResponse, AWSError>;
/**
* Associates a physical device with a placement.
*/
associateDeviceWithPlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.AssociateDeviceWithPlacementResponse) => void): Request<IoT1ClickProjects.Types.AssociateDeviceWithPlacementResponse, AWSError>;
/**
* Creates an empty placement.
*/
createPlacement(params: IoT1ClickProjects.Types.CreatePlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.CreatePlacementResponse) => void): Request<IoT1ClickProjects.Types.CreatePlacementResponse, AWSError>;
/**
* Creates an empty placement.
*/
createPlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.CreatePlacementResponse) => void): Request<IoT1ClickProjects.Types.CreatePlacementResponse, AWSError>;
/**
* Creates an empty project with a placement template. A project contains zero or more placements that adhere to the placement template defined in the project.
*/
createProject(params: IoT1ClickProjects.Types.CreateProjectRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.CreateProjectResponse) => void): Request<IoT1ClickProjects.Types.CreateProjectResponse, AWSError>;
/**
* Creates an empty project with a placement template. A project contains zero or more placements that adhere to the placement template defined in the project.
*/
createProject(callback?: (err: AWSError, data: IoT1ClickProjects.Types.CreateProjectResponse) => void): Request<IoT1ClickProjects.Types.CreateProjectResponse, AWSError>;
/**
* Deletes a placement. To delete a placement, it must not have any devices associated with it. When you delete a placement, all associated data becomes irretrievable.
*/
deletePlacement(params: IoT1ClickProjects.Types.DeletePlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DeletePlacementResponse) => void): Request<IoT1ClickProjects.Types.DeletePlacementResponse, AWSError>;
/**
* Deletes a placement. To delete a placement, it must not have any devices associated with it. When you delete a placement, all associated data becomes irretrievable.
*/
deletePlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DeletePlacementResponse) => void): Request<IoT1ClickProjects.Types.DeletePlacementResponse, AWSError>;
/**
* Deletes a project. To delete a project, it must not have any placements associated with it. When you delete a project, all associated data becomes irretrievable.
*/
deleteProject(params: IoT1ClickProjects.Types.DeleteProjectRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DeleteProjectResponse) => void): Request<IoT1ClickProjects.Types.DeleteProjectResponse, AWSError>;
/**
* Deletes a project. To delete a project, it must not have any placements associated with it. When you delete a project, all associated data becomes irretrievable.
*/
deleteProject(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DeleteProjectResponse) => void): Request<IoT1ClickProjects.Types.DeleteProjectResponse, AWSError>;
/**
* Describes a placement in a project.
*/
describePlacement(params: IoT1ClickProjects.Types.DescribePlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DescribePlacementResponse) => void): Request<IoT1ClickProjects.Types.DescribePlacementResponse, AWSError>;
/**
* Describes a placement in a project.
*/
describePlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DescribePlacementResponse) => void): Request<IoT1ClickProjects.Types.DescribePlacementResponse, AWSError>;
/**
* Returns an object describing a project.
*/
describeProject(params: IoT1ClickProjects.Types.DescribeProjectRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DescribeProjectResponse) => void): Request<IoT1ClickProjects.Types.DescribeProjectResponse, AWSError>;
/**
* Returns an object describing a project.
*/
describeProject(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DescribeProjectResponse) => void): Request<IoT1ClickProjects.Types.DescribeProjectResponse, AWSError>;
/**
* Removes a physical device from a placement.
*/
disassociateDeviceFromPlacement(params: IoT1ClickProjects.Types.DisassociateDeviceFromPlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.DisassociateDeviceFromPlacementResponse) => void): Request<IoT1ClickProjects.Types.DisassociateDeviceFromPlacementResponse, AWSError>;
/**
* Removes a physical device from a placement.
*/
disassociateDeviceFromPlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.DisassociateDeviceFromPlacementResponse) => void): Request<IoT1ClickProjects.Types.DisassociateDeviceFromPlacementResponse, AWSError>;
/**
* Returns an object enumerating the devices in a placement.
*/
getDevicesInPlacement(params: IoT1ClickProjects.Types.GetDevicesInPlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.GetDevicesInPlacementResponse) => void): Request<IoT1ClickProjects.Types.GetDevicesInPlacementResponse, AWSError>;
/**
* Returns an object enumerating the devices in a placement.
*/
getDevicesInPlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.GetDevicesInPlacementResponse) => void): Request<IoT1ClickProjects.Types.GetDevicesInPlacementResponse, AWSError>;
/**
* Lists the placement(s) of a project.
*/
listPlacements(params: IoT1ClickProjects.Types.ListPlacementsRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.ListPlacementsResponse) => void): Request<IoT1ClickProjects.Types.ListPlacementsResponse, AWSError>;
/**
* Lists the placement(s) of a project.
*/
listPlacements(callback?: (err: AWSError, data: IoT1ClickProjects.Types.ListPlacementsResponse) => void): Request<IoT1ClickProjects.Types.ListPlacementsResponse, AWSError>;
/**
* Lists the AWS IoT 1-Click project(s) associated with your AWS account and region.
*/
listProjects(params: IoT1ClickProjects.Types.ListProjectsRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.ListProjectsResponse) => void): Request<IoT1ClickProjects.Types.ListProjectsResponse, AWSError>;
/**
* Lists the AWS IoT 1-Click project(s) associated with your AWS account and region.
*/
listProjects(callback?: (err: AWSError, data: IoT1ClickProjects.Types.ListProjectsResponse) => void): Request<IoT1ClickProjects.Types.ListProjectsResponse, AWSError>;
/**
* Updates a placement with the given attributes. To clear an attribute, pass an empty value (i.e., "").
*/
updatePlacement(params: IoT1ClickProjects.Types.UpdatePlacementRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.UpdatePlacementResponse) => void): Request<IoT1ClickProjects.Types.UpdatePlacementResponse, AWSError>;
/**
* Updates a placement with the given attributes. To clear an attribute, pass an empty value (i.e., "").
*/
updatePlacement(callback?: (err: AWSError, data: IoT1ClickProjects.Types.UpdatePlacementResponse) => void): Request<IoT1ClickProjects.Types.UpdatePlacementResponse, AWSError>;
/**
* Updates a project associated with your AWS account and region. With the exception of device template names, you can pass just the values that need to be updated because the update request will change only the values that are provided. To clear a value, pass the empty string (i.e., "").
*/
updateProject(params: IoT1ClickProjects.Types.UpdateProjectRequest, callback?: (err: AWSError, data: IoT1ClickProjects.Types.UpdateProjectResponse) => void): Request<IoT1ClickProjects.Types.UpdateProjectResponse, AWSError>;
/**
* Updates a project associated with your AWS account and region. With the exception of device template names, you can pass just the values that need to be updated because the update request will change only the values that are provided. To clear a value, pass the empty string (i.e., "").
*/
updateProject(callback?: (err: AWSError, data: IoT1ClickProjects.Types.UpdateProjectResponse) => void): Request<IoT1ClickProjects.Types.UpdateProjectResponse, AWSError>;
}
declare namespace IoT1ClickProjects {
export interface AssociateDeviceWithPlacementRequest {
/**
* The name of the project containing the placement in which to associate the device.
*/
projectName: ProjectName;
/**
* The name of the placement in which to associate the device.
*/
placementName: PlacementName;
/**
* The ID of the physical device to be associated with the given placement in the project. Note that a mandatory 4 character prefix is required for all deviceId values.
*/
deviceId: DeviceId;
/**
* The device template name to associate with the device ID.
*/
deviceTemplateName: DeviceTemplateName;
}
export interface AssociateDeviceWithPlacementResponse {
}
export type AttributeDefaultValue = string;
export type AttributeName = string;
export type AttributeValue = string;
export interface CreatePlacementRequest {
/**
* The name of the placement to be created.
*/
placementName: PlacementName;
/**
* The name of the project in which to create the placement.
*/
projectName: ProjectName;
/**
* Optional user-defined key/value pairs providing contextual data (such as location or function) for the placement.
*/
attributes?: PlacementAttributeMap;
}
export interface CreatePlacementResponse {
}
export interface CreateProjectRequest {
/**
* The name of the project to create.
*/
projectName: ProjectName;
/**
* An optional description for the project.
*/
description?: Description;
/**
* The schema defining the placement to be created. A placement template defines placement default attributes and device templates. You cannot add or remove device templates after the project has been created. However, you can update callbackOverrides for the device templates using the UpdateProject API.
*/
placementTemplate?: PlacementTemplate;
}
export interface CreateProjectResponse {
}
export type DefaultPlacementAttributeMap = {[key: string]: AttributeDefaultValue};
export interface DeletePlacementRequest {
/**
* The name of the empty placement to delete.
*/
placementName: PlacementName;
/**
* The project containing the empty placement to delete.
*/
projectName: ProjectName;
}
export interface DeletePlacementResponse {
}
export interface DeleteProjectRequest {
/**
* The name of the empty project to delete.
*/
projectName: ProjectName;
}
export interface DeleteProjectResponse {
}
export interface DescribePlacementRequest {
/**
* The name of the placement within a project.
*/
placementName: PlacementName;
/**
* The project containing the placement to be described.
*/
projectName: ProjectName;
}
export interface DescribePlacementResponse {
/**
* An object describing the placement.
*/
placement: PlacementDescription;
}
export interface DescribeProjectRequest {
/**
* The name of the project to be described.
*/
projectName: ProjectName;
}
export interface DescribeProjectResponse {
/**
* An object describing the project.
*/
project: ProjectDescription;
}
export type Description = string;
export type DeviceCallbackKey = string;
export type DeviceCallbackOverrideMap = {[key: string]: DeviceCallbackValue};
export type DeviceCallbackValue = string;
export type DeviceId = string;
export type DeviceMap = {[key: string]: DeviceId};
export interface DeviceTemplate {
/**
* The device type, which currently must be "button".
*/
deviceType?: DeviceType;
/**
* An optional Lambda function to invoke instead of the default Lambda function provided by the placement template.
*/
callbackOverrides?: DeviceCallbackOverrideMap;
}
export type DeviceTemplateMap = {[key: string]: DeviceTemplate};
export type DeviceTemplateName = string;
export type DeviceType = string;
export interface DisassociateDeviceFromPlacementRequest {
/**
* The name of the project that contains the placement.
*/
projectName: ProjectName;
/**
* The name of the placement that the device should be removed from.
*/
placementName: PlacementName;
/**
* The device ID that should be removed from the placement.
*/
deviceTemplateName: DeviceTemplateName;
}
export interface DisassociateDeviceFromPlacementResponse {
}
export interface GetDevicesInPlacementRequest {
/**
* The name of the project containing the placement.
*/
projectName: ProjectName;
/**
* The name of the placement to get the devices from.
*/
placementName: PlacementName;
}
export interface GetDevicesInPlacementResponse {
/**
* An object containing the devices (zero or more) within the placement.
*/
devices: DeviceMap;
}
export interface ListPlacementsRequest {
/**
* The project containing the placements to be listed.
*/
projectName: ProjectName;
/**
* The token to retrieve the next set of results.
*/
nextToken?: NextToken;
/**
* The maximum number of results to return per request. If not set, a default value of 100 is used.
*/
maxResults?: MaxResults;
}
export interface ListPlacementsResponse {
/**
* An object listing the requested placements.
*/
placements: PlacementSummaryList;
/**
* The token used to retrieve the next set of results - will be effectively empty if there are no further results.
*/
nextToken?: NextToken;
}
export interface ListProjectsRequest {
/**
* The token to retrieve the next set of results.
*/
nextToken?: NextToken;
/**
* The maximum number of results to return per request. If not set, a default value of 100 is used.
*/
maxResults?: MaxResults;
}
export interface ListProjectsResponse {
/**
* An object containing the list of projects.
*/
projects: ProjectSummaryList;
/**
* The token used to retrieve the next set of results - will be effectively empty if there are no further results.
*/
nextToken?: NextToken;
}
export type MaxResults = number;
export type NextToken = string;
export type PlacementAttributeMap = {[key: string]: AttributeValue};
export interface PlacementDescription {
/**
* The name of the project containing the placement.
*/
projectName: ProjectName;
/**
* The name of the placement.
*/
placementName: PlacementName;
/**
* The user-defined attributes associated with the placement.
*/
attributes: PlacementAttributeMap;
/**
* The date when the placement was initially created, in UNIX epoch time format.
*/
createdDate: Time;
/**
* The date when the placement was last updated, in UNIX epoch time format. If the placement was not updated, then createdDate and updatedDate are the same.
*/
updatedDate: Time;
}
export type PlacementName = string;
export interface PlacementSummary {
/**
* The name of the project containing the placement.
*/
projectName: ProjectName;
/**
* The name of the placement being summarized.
*/
placementName: PlacementName;
/**
* The date when the placement was originally created, in UNIX epoch time format.
*/
createdDate: Time;
/**
* The date when the placement was last updated, in UNIX epoch time format. If the placement was not updated, then createdDate and updatedDate are the same.
*/
updatedDate: Time;
}
export type PlacementSummaryList = PlacementSummary[];
export interface PlacementTemplate {
/**
* The default attributes (key/value pairs) to be applied to all placements using this template.
*/
defaultAttributes?: DefaultPlacementAttributeMap;
/**
* An object specifying the DeviceTemplate for all placements using this (PlacementTemplate) template.
*/
deviceTemplates?: DeviceTemplateMap;
}
export interface ProjectDescription {
/**
* The name of the project for which to obtain information from.
*/
projectName: ProjectName;
/**
* The description of the project.
*/
description?: Description;
/**
* The date when the project was originally created, in UNIX epoch time format.
*/
createdDate: Time;
/**
* The date when the project was last updated, in UNIX epoch time format. If the project was not updated, then createdDate and updatedDate are the same.
*/
updatedDate: Time;
/**
* An object describing the project's placement specifications.
*/
placementTemplate?: PlacementTemplate;
}
export type ProjectName = string;
export interface ProjectSummary {
/**
* The name of the project being summarized.
*/
projectName: ProjectName;
/**
* The date when the project was originally created, in UNIX epoch time format.
*/
createdDate: Time;
/**
* The date when the project was last updated, in UNIX epoch time format. If the project was not updated, then createdDate and updatedDate are the same.
*/
updatedDate: Time;
}
export type ProjectSummaryList = ProjectSummary[];
export type Time = Date;
export interface UpdatePlacementRequest {
/**
* The name of the placement to update.
*/
placementName: PlacementName;
/**
* The name of the project containing the placement to be updated.
*/
projectName: ProjectName;
/**
* The user-defined object of attributes used to update the placement. The maximum number of key/value pairs is 50.
*/
attributes?: PlacementAttributeMap;
}
export interface UpdatePlacementResponse {
}
export interface UpdateProjectRequest {
/**
* The name of the project to be updated.
*/
projectName: ProjectName;
/**
* An optional user-defined description for the project.
*/
description?: Description;
/**
* An object defining the project update. Once a project has been created, you cannot add device template names to the project. However, for a given placementTemplate, you can update the associated callbackOverrides for the device definition using this API.
*/
placementTemplate?: PlacementTemplate;
}
export interface UpdateProjectResponse {
}
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
export type apiVersion = "2018-05-14"|"latest"|string;
export interface ClientApiVersions {
/**
* A string in YYYY-MM-DD format that represents the latest possible API version that can be used in this service. Specify 'latest' to use the latest possible version.
*/
apiVersion?: apiVersion;
}
export type ClientConfiguration = ServiceConfigurationOptions & ClientApiVersions;
/**
* Contains interfaces for use with the IoT1ClickProjects client.
*/
export import Types = IoT1ClickProjects;
}
export = IoT1ClickProjects;
| chrisradek/aws-sdk-js | clients/iot1clickprojects.d.ts | TypeScript | apache-2.0 | 20,275 |
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using soothsayer.Infrastructure;
using soothsayer.Infrastructure.IO;
using soothsayer.Migrations;
using soothsayer.Scanners;
using soothsayer.Scripts;
namespace soothsayer
{
public class OracleMigrator : IMigrator
{
private readonly IConnectionFactory _connectionFactory;
private readonly IVersionRespositoryFactory _versionRespositoryFactory;
private readonly IAppliedScriptsRepositoryFactory _appliedScriptsRepositoryFactory;
private readonly IDatabaseMetadataProviderFactory _databaseMetadataProviderFactory;
private readonly IScriptScannerFactory _scriptScannerFactory;
private readonly IScriptRunnerFactory _scriptRunnerFactory;
public OracleMigrator(
IConnectionFactory connectionFactory,
IVersionRespositoryFactory versionRespositoryFactory,
IAppliedScriptsRepositoryFactory appliedScriptsRepositoryFactory,
IDatabaseMetadataProviderFactory databaseMetadataProviderFactory,
IScriptScannerFactory scriptScannerFactory,
IScriptRunnerFactory scriptRunnerFactory)
{
_connectionFactory = connectionFactory;
_versionRespositoryFactory = versionRespositoryFactory;
_databaseMetadataProviderFactory = databaseMetadataProviderFactory;
_scriptScannerFactory = scriptScannerFactory;
_scriptRunnerFactory = scriptRunnerFactory;
_appliedScriptsRepositoryFactory = appliedScriptsRepositoryFactory;
}
public void Migrate(DatabaseConnectionInfo databaseConnectionInfo, MigrationInfo migrationInfo)
{
using (var connection = _connectionFactory.Create(databaseConnectionInfo))
{
Output.Text("Connected to oracle database on connection string '{0}'.".FormatWith(databaseConnectionInfo.ConnectionString));
Output.EmptyLine();
Output.Text("Checking for the current database version.");
var oracleMetadataProvider = _databaseMetadataProviderFactory.Create(connection);
var oracleVersioning = _versionRespositoryFactory.Create(connection);
var oracleAppliedScriptsRepository = _appliedScriptsRepositoryFactory.Create(connection);
var currentVersion = oracleMetadataProvider.SchemaExists(migrationInfo.TargetSchema) ? oracleVersioning.GetCurrentVersion(migrationInfo.TargetSchema) : null;
Output.Info("The current database version is: {0}".FormatWith(currentVersion.IsNotNull() ? currentVersion.Version.ToString(CultureInfo.InvariantCulture) : "<empty>"));
Output.EmptyLine();
Output.Info("Scanning input folder '{0}' for scripts...".FormatWith(migrationInfo.ScriptFolder));
var initScripts = ScanForScripts(migrationInfo, ScriptFolders.Init, _scriptScannerFactory.Create(ScriptFolders.Init)).ToArray();
var upScripts = ScanForScripts(migrationInfo, ScriptFolders.Up, _scriptScannerFactory.Create(ScriptFolders.Up)).ToArray();
var downScripts = ScanForScripts(migrationInfo, ScriptFolders.Down, _scriptScannerFactory.Create(ScriptFolders.Down)).ToArray();
var termScripts = ScanForScripts(migrationInfo, ScriptFolders.Term, _scriptScannerFactory.Create(ScriptFolders.Term)).ToArray();
Output.EmptyLine();
if (migrationInfo.TargetVersion.HasValue)
{
Output.Info("Target database version was provided, will target migrating the database to version {0}".FormatWith(migrationInfo.TargetVersion.Value));
}
VerifyDownScripts(upScripts, downScripts);
var storedMigrationSteps = new List<IStep>();
if (migrationInfo.UseStored)
{
Output.Info("--usestored was specified, fetching set of applied scripts stored in the target database...".FormatWith());
storedMigrationSteps = oracleAppliedScriptsRepository.GetAppliedScripts(migrationInfo.TargetSchema).ToList();
Output.Text(" {0} stored applied scripts found.".FormatWith(storedMigrationSteps.Count));
Output.EmptyLine();
}
var scriptRunner = _scriptRunnerFactory.Create(databaseConnectionInfo);
RunMigration(migrationInfo, currentVersion, initScripts, upScripts, downScripts, termScripts, storedMigrationSteps, scriptRunner, oracleMetadataProvider, oracleVersioning, oracleAppliedScriptsRepository);
if (oracleMetadataProvider.SchemaExists(migrationInfo.TargetSchema))
{
var newVersion = oracleVersioning.GetCurrentVersion(migrationInfo.TargetSchema);
Output.Info("Database version is now: {0}".FormatWith(newVersion.IsNotNull() ? newVersion.Version.ToString(CultureInfo.InvariantCulture) : "<empty>"));
}
else
{
Output.Info("Target schema '{0}' no longer exists.".FormatWith(migrationInfo.TargetSchema));
}
}
}
private static IEnumerable<Script> ScanForScripts(MigrationInfo migrationInfo, string migrationFolder, IScriptScanner scanner)
{
var environments = (migrationInfo.TargetEnvironment ?? Enumerable.Empty<string>()).ToArray();
var scripts = (scanner.Scan(migrationInfo.ScriptFolder.Whack(migrationFolder), environments) ?? Enumerable.Empty<Script>()).ToArray();
Output.Text("Found {0} '{1}' scripts.".FormatWith(scripts.Length, migrationFolder));
foreach (var script in scripts)
{
Output.Verbose(script.Name, 1);
}
return scripts;
}
private static void VerifyDownScripts(IEnumerable<Script> upScripts, IEnumerable<Script> downScripts)
{
var withoutRollback = upScripts.Where(u => downScripts.All(d => d.Version != u.Version)).ToArray();
if (withoutRollback.Any())
{
Output.Warn("The following 'up' scripts do not have a corresponding 'down' script, any rollback may not work as expected:");
foreach (var script in withoutRollback)
{
Output.Warn(script.Name, 1);
}
Output.EmptyLine();
}
}
private static void RunMigration(MigrationInfo migrationInfo, DatabaseVersion currentVersion, IEnumerable<Script> initScripts, IEnumerable<Script> upScripts, IEnumerable<Script> downScripts, IEnumerable<Script> termScripts,
IList<IStep> storedSteps, IScriptRunner scriptRunner, IDatabaseMetadataProvider databaseMetadataProvider, IVersionRespository versionRespository, IAppliedScriptsRepository appliedScriptsRepository)
{
var upDownSteps = upScripts.Select(u => new DatabaseStep(u, downScripts.FirstOrDefault(d => d.Version == u.Version))).ToList();
var initTermSteps = initScripts.Select(i => new DatabaseStep(i, termScripts.FirstOrDefault(t => t.Version == i.Version))).ToList();
if (migrationInfo.Direction == MigrationDirection.Down)
{
var downMigration = new DownMigration(databaseMetadataProvider, versionRespository, appliedScriptsRepository, migrationInfo.Forced);
if (storedSteps.Any())
{
Output.Warn("NOTE: Using stored applied scripts to perform downgrade instead of local 'down' scripts.");
downMigration.Migrate(storedSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
}
else
{
downMigration.Migrate(upDownSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
}
if (!migrationInfo.TargetVersion.HasValue)
{
var termMigration = new TermMigration(databaseMetadataProvider, migrationInfo.Forced);
termMigration.Migrate(initTermSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
}
else
{
Output.Info("A target version was provided, termination scripts will not be executed.");
}
}
else
{
var initMigration = new InitMigration(databaseMetadataProvider, migrationInfo.Forced);
initMigration.Migrate(initTermSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
EnsureVersioningTableIsInitialised(versionRespository, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
EnsureAppliedScriptsTableIsInitialised(appliedScriptsRepository, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
var upMigration = new UpMigration(versionRespository, appliedScriptsRepository, migrationInfo.Forced);
upMigration.Migrate(upDownSteps, currentVersion, migrationInfo.TargetVersion, scriptRunner, migrationInfo.TargetSchema, migrationInfo.TargetTablespace);
}
}
private static void EnsureAppliedScriptsTableIsInitialised(IAppliedScriptsRepository appliedScriptsRepository, string targetSchema, string targetTablespace)
{
bool alreadyInitialised = appliedScriptsRepository.AppliedScriptsTableExists(targetSchema);
if (!alreadyInitialised)
{
appliedScriptsRepository.InitialiseAppliedScriptsTable(targetSchema, targetTablespace);
}
}
private static void EnsureVersioningTableIsInitialised(IVersionRespository versionRespository, string targetSchema, string targetTablespace)
{
bool alreadyInitialised = versionRespository.VersionTableExists(targetSchema);
if (!alreadyInitialised)
{
versionRespository.InitialiseVersioningTable(targetSchema, targetTablespace);
}
}
}
}
| paybyphone/soothsayer | soothsayer/OracleMigrator.cs | C# | apache-2.0 | 10,677 |
package org.apache.uima.casviewer.core.internal;
import java.util.List;
/**
* A node that contains a list of AnnotationObject(s)
*
*/
public class AnnotationObjectsNode {
protected List<AnnotationObject> annotationList;
public AnnotationObjectsNode () {
}
public AnnotationObjectsNode(List<AnnotationObject> list) {
annotationList = list;
}
/**
* @return the annotationList
*/
public List<AnnotationObject> getAnnotationList() {
return annotationList;
}
/**
* @param annotationList the annotationList to set
*/
public void setAnnotationList(List<AnnotationObject> list) {
this.annotationList = list;
}
}
| apache/uima-sandbox | CasViewerEclipsePlugin/uimaj-ep-casviewer-core/src/main/java/org/apache/uima/casviewer/core/internal/AnnotationObjectsNode.java | Java | apache-2.0 | 717 |
/*
* JBoss, Home of Professional Open Source
* Copyright 2012, Red Hat, Inc., and individual contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.weld.injection.producer;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.security.AccessController;
import java.security.PrivilegedActionException;
import java.util.Set;
import javax.enterprise.context.spi.CreationalContext;
import javax.enterprise.event.Observes;
import javax.enterprise.event.ObservesAsync;
import javax.enterprise.inject.CreationException;
import javax.enterprise.inject.Disposes;
import javax.enterprise.inject.spi.AnnotatedMember;
import javax.enterprise.inject.spi.InjectionPoint;
import javax.enterprise.inject.spi.Producer;
import org.jboss.weld.annotated.enhanced.EnhancedAnnotatedMethod;
import org.jboss.weld.bean.DisposalMethod;
import org.jboss.weld.bean.SessionBean;
import org.jboss.weld.exceptions.DefinitionException;
import org.jboss.weld.injection.InjectionPointFactory;
import org.jboss.weld.injection.MethodInjectionPoint;
import org.jboss.weld.injection.MethodInjectionPoint.MethodInjectionPointType;
import org.jboss.weld.logging.BeanLogger;
import org.jboss.weld.security.GetMethodAction;
import org.jboss.weld.util.reflection.Formats;
import org.jboss.weld.util.reflection.Reflections;
/**
* {@link Producer} implementation for producer methods.
*
* @author Jozef Hartinger
*
*/
public abstract class ProducerMethodProducer<X, T> extends AbstractMemberProducer<X, T> {
private static final String PRODUCER_ANNOTATION = "@Produces";
// The underlying method
private final MethodInjectionPoint<T, ? super X> method;
public ProducerMethodProducer(EnhancedAnnotatedMethod<T, ? super X> enhancedAnnotatedMethod, DisposalMethod<?, ?> disposalMethod) {
super(enhancedAnnotatedMethod, disposalMethod);
// Note that for producer method injection points the declaring bean is the producer method itself
this.method = InjectionPointFactory.instance().createMethodInjectionPoint(MethodInjectionPointType.PRODUCER, enhancedAnnotatedMethod, getBean(), enhancedAnnotatedMethod.getDeclaringType().getJavaClass(), null, getBeanManager());
checkProducerMethod(enhancedAnnotatedMethod);
checkDelegateInjectionPoints();
}
/**
* Validates the producer method
*/
protected void checkProducerMethod(EnhancedAnnotatedMethod<T, ? super X> method) {
if (method.getEnhancedParameters(Observes.class).size() > 0) {
throw BeanLogger.LOG.inconsistentAnnotationsOnMethod(PRODUCER_ANNOTATION, "@Observes", this.method,
Formats.formatAsStackTraceElement(method.getJavaMember()));
} else if (method.getEnhancedParameters(ObservesAsync.class).size() > 0) {
throw BeanLogger.LOG.inconsistentAnnotationsOnMethod(PRODUCER_ANNOTATION, "@ObservesAsync", this.method,
Formats.formatAsStackTraceElement(method.getJavaMember()));
} else if (method.getEnhancedParameters(Disposes.class).size() > 0) {
throw BeanLogger.LOG.inconsistentAnnotationsOnMethod(PRODUCER_ANNOTATION, "@Disposes", this.method,
Formats.formatAsStackTraceElement(method.getJavaMember()));
} else if (getDeclaringBean() instanceof SessionBean<?> && !Modifier.isStatic(method.slim().getJavaMember().getModifiers())) {
boolean methodDeclaredOnTypes = false;
for (Type type : getDeclaringBean().getTypes()) {
Class<?> clazz = Reflections.getRawType(type);
try {
AccessController.doPrivileged(new GetMethodAction(clazz, method.getName(), method.getParameterTypesAsArray()));
methodDeclaredOnTypes = true;
break;
} catch (PrivilegedActionException ignored) {
}
}
if (!methodDeclaredOnTypes) {
throw BeanLogger.LOG.methodNotBusinessMethod("Producer", this, getDeclaringBean(), Formats.formatAsStackTraceElement(method.getJavaMember()));
}
}
}
@Override
public Set<InjectionPoint> getInjectionPoints() {
return method.getInjectionPoints();
}
@Override
protected T produce(Object receiver, CreationalContext<T> ctx) {
return method.invoke(receiver, null, getBeanManager(), ctx, CreationException.class);
}
@Override
public AnnotatedMember<? super X> getAnnotated() {
return method.getAnnotated();
}
@Override
protected DefinitionException producerWithInvalidTypeVariable(AnnotatedMember<?> member) {
return BeanLogger.LOG.producerMethodReturnTypeInvalidTypeVariable(member, Formats.formatAsStackTraceElement(member.getJavaMember()));
}
@Override
protected DefinitionException producerWithInvalidWildcard(AnnotatedMember<?> member) {
return BeanLogger.LOG.producerMethodCannotHaveAWildcardReturnType(member, Formats.formatAsStackTraceElement(member.getJavaMember()));
}
@Override
protected DefinitionException producerWithParameterizedTypeWithTypeVariableBeanTypeMustBeDependent(AnnotatedMember<?> member) {
return BeanLogger.LOG.producerMethodWithTypeVariableReturnTypeMustBeDependent(member, Formats.formatAsStackTraceElement(member.getJavaMember()));
}
}
| antoinesd/weld-core | impl/src/main/java/org/jboss/weld/injection/producer/ProducerMethodProducer.java | Java | apache-2.0 | 5,998 |
package controllers;
import play.shaded.ahc.org.asynchttpclient.AsyncHttpClient;
import play.shaded.ahc.org.asynchttpclient.BoundRequestBuilder;
import play.shaded.ahc.org.asynchttpclient.ListenableFuture;
import play.shaded.ahc.org.asynchttpclient.ws.WebSocket;
import play.shaded.ahc.org.asynchttpclient.ws.WebSocketListener;
import play.shaded.ahc.org.asynchttpclient.ws.WebSocketTextListener;
import play.shaded.ahc.org.asynchttpclient.ws.WebSocketUpgradeHandler;
import org.slf4j.Logger;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutionException;
import java.util.function.Consumer;
/**
* A quick wrapper around AHC WebSocket
*
* https://github.com/AsyncHttpClient/async-http-client/blob/2.0/client/src/main/java/org/asynchttpclient/ws/WebSocket.java
*/
public class WebSocketClient {
private AsyncHttpClient client;
public WebSocketClient(AsyncHttpClient c) {
this.client = c;
}
public CompletableFuture<WebSocket> call(String url, WebSocketTextListener listener) throws ExecutionException, InterruptedException {
final BoundRequestBuilder requestBuilder = client.prepareGet(url);
final WebSocketUpgradeHandler handler = new WebSocketUpgradeHandler.Builder().addWebSocketListener(listener).build();
final ListenableFuture<WebSocket> future = requestBuilder.execute(handler);
return future.toCompletableFuture();
}
static class LoggingListener implements WebSocketTextListener {
private final Consumer<String> onMessageCallback;
public LoggingListener(Consumer<String> onMessageCallback) {
this.onMessageCallback = onMessageCallback;
}
private Logger logger = org.slf4j.LoggerFactory.getLogger(LoggingListener.class);
private Throwable throwableFound = null;
public Throwable getThrowable() {
return throwableFound;
}
public void onOpen(WebSocket websocket) {
//logger.info("onClose: ");
//websocket.sendMessage("hello");
}
public void onClose(WebSocket websocket) {
//logger.info("onClose: ");
}
public void onError(Throwable t) {
//logger.error("onError: ", t);
throwableFound = t;
}
@Override
public void onMessage(String s) {
//logger.info("onMessage: s = " + s);
onMessageCallback.accept(s);
}
}
}
| play2-maven-plugin/play2-maven-test-projects | play26/java/websocket-example-using-webjars-assets/test/controllers/WebSocketClient.java | Java | apache-2.0 | 2,469 |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
# COM interop utility module
import sys
import nt
from iptest.assert_util import *
from iptest.file_util import *
from iptest.process_util import *
if is_cli:
import clr
from System import Type
from System import Activator
from System import Exception as System_dot_Exception
remove_ironpython_dlls(testpath.public_testdir)
load_iron_python_dll()
import IronPython
load_iron_python_test()
import IronPythonTest
#--For asserts in IP/DLR assemblies----------------------------------------
from System.Diagnostics import Debug, DefaultTraceListener
class MyTraceListener(DefaultTraceListener):
def Fail(self, msg, detailMsg=''):
print "ASSERT FAILED:", msg
if detailMsg!='':
print " ", detailMsg
sys.exit(1)
if is_snap:
Debug.Listeners.Clear()
Debug.Listeners.Add(MyTraceListener())
is_pywin32 = False
if sys.platform=="win32":
try:
import win32com.client
is_pywin32 = True
if sys.prefix not in nt.environ["Path"]:
nt.environ["Path"] += ";" + sys.prefix
except:
pass
#------------------------------------------------------------------------------
#--GLOBALS
windir = get_environ_variable("windir")
agentsvr_path = path_combine(windir, r"msagent\agentsvr.exe")
scriptpw_path = path_combine(windir, r"system32\scriptpw.dll")
STRING_VALUES = [ "", "a", "ab", "abc", "aa",
"a" * 100000,
"1", "1.0", "1L", "object", "str", "object()",
" ", "_", "abc ", " abc", " abc ", "ab c", "ab c",
"\ta", "a\t", "\n", "\t", "\na", "a\n"]
STRING_VALUES = [unicode(x) for x in STRING_VALUES] + STRING_VALUES
def aFunc(): pass
class KNew(object): pass
class KOld: pass
NON_NUMBER_VALUES = [ object,
KNew, KOld,
Exception,
object(), KNew(), KOld(),
aFunc, str, eval, type,
[], [3.14], ["abc"],
(), (3,), (u"xyz",),
xrange(5),
{}, {'a':1},
__builtins__,
]
FPN_VALUES = [ -1.23, -1.0, -0.123, -0.0, 0.123, 1.0, 1.23,
0.0000001, 3.14159265, 1E10, 1.0E10 ]
UINT_VALUES = [ 0, 1, 2, 7, 10, 32]
INT_VALUES = [ -x for x in UINT_VALUES ] + UINT_VALUES
LONG_VALUES = [long(x) for x in INT_VALUES]
COMPLEX_VALUES = [ 3j]
#--Subclasses of Python/.NET types
class Py_Str(str): pass
if is_cli:
class Py_System_String(System.String): pass
class Py_Float(float): pass
class Py_Double(float): pass
if is_cli:
class Py_System_Double(System.Double): pass
class Py_UShort(int): pass
class Py_ULong(long): pass
class Py_ULongLong(long): pass
class Py_Short(int): pass
class Py_Long(int): pass
if is_cli:
class Py_System_Int32(System.Int32): pass
class Py_LongLong(long): pass
#-------Helpers----------------
def shallow_copy(in_list):
'''
We do not necessarily have access to the copy module.
'''
return [x for x in in_list]
def pos_num_helper(clr_type):
return [
clr_type.MinValue,
clr_type.MinValue + 1,
clr_type.MinValue + 2,
clr_type.MinValue + 10,
clr_type.MaxValue/2,
clr_type.MaxValue - 10,
clr_type.MaxValue - 2,
clr_type.MaxValue - 1,
clr_type.MaxValue,
]
def overflow_num_helper(clr_type):
return [
clr_type.MinValue - 1,
clr_type.MinValue - 2,
clr_type.MinValue - 3,
clr_type.MinValue - 10,
clr_type.MaxValue + 10,
clr_type.MaxValue + 3,
clr_type.MaxValue + 2,
clr_type.MaxValue + 1,
]
def valueErrorTrigger(in_type):
ret_val = {}
############################################################
#Is there anything in Python not being able to evaluate to a bool?
ret_val["VARIANT_BOOL"] = [ ]
############################################################
ret_val["BYTE"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["BYTE"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["BYTE"] += FPN_VALUES #Merlin 323751
ret_val["BYTE"] = [x for x in ret_val["BYTE"] if type(x) not in [unicode, str]] #INCOMPAT BUG - should be ValueError
ret_val["BYTE"] = [x for x in ret_val["BYTE"] if not isinstance(x, KOld)] #INCOMPAT BUG - should be AttributeError
############################################################
ret_val["BSTR"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["BSTR"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["BSTR"] = [] #INCOMPAT BUG
#strip out string values
ret_val["BSTR"] = [x for x in ret_val["BSTR"] if type(x) is not str and type(x) is not KNew and type(x) is not KOld and type(x) is not object]
############################################################
ret_val["CHAR"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["CHAR"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["CHAR"] += FPN_VALUES #Merlin 323751
############################################################
ret_val["FLOAT"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["FLOAT"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["FLOAT"] += UINT_VALUES + INT_VALUES #COMPAT BUG
############################################################
ret_val["DOUBLE"] = shallow_copy(ret_val["FLOAT"])
############################################################
ret_val["USHORT"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["USHORT"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["USHORT"] += FPN_VALUES #Merlin 323751
############################################################
ret_val["ULONG"] = shallow_copy(ret_val["USHORT"])
############################################################
ret_val["ULONGLONG"] = shallow_copy(ret_val["ULONG"])
############################################################
ret_val["SHORT"] = shallow_copy(NON_NUMBER_VALUES)
ret_val["SHORT"] += COMPLEX_VALUES
if sys.platform=="win32":
ret_val["SHORT"] += FPN_VALUES #Merlin 323751
############################################################
ret_val["LONG"] = shallow_copy(ret_val["SHORT"])
############################################################
ret_val["LONGLONG"] = shallow_copy(ret_val["LONG"])
############################################################
return ret_val[in_type]
def typeErrorTrigger(in_type):
ret_val = {}
############################################################
#Is there anything in Python not being able to evaluate to a bool?
ret_val["VARIANT_BOOL"] = [ ]
############################################################
ret_val["BYTE"] = []
############################################################
ret_val["BSTR"] = []
#strip out string values
ret_val["BSTR"] = [x for x in ret_val["BSTR"] if type(x) is not str]
############################################################
ret_val["CHAR"] = []
############################################################
ret_val["FLOAT"] = []
############################################################
ret_val["DOUBLE"] = []
############################################################
ret_val["USHORT"] = []
############################################################
ret_val["ULONG"] = []
############################################################
ret_val["ULONGLONG"] = []
############################################################
ret_val["SHORT"] = []
############################################################
ret_val["LONG"] = []
############################################################
ret_val["LONGLONG"] = []
############################################################
return ret_val[in_type]
def overflowErrorTrigger(in_type):
ret_val = {}
############################################################
ret_val["VARIANT_BOOL"] = []
############################################################
ret_val["BYTE"] = []
ret_val["BYTE"] += overflow_num_helper(System.Byte)
############################################################
#Doesn't seem possible to create a value (w/o 1st overflowing
#in Python) to pass to the COM method which will overflow.
ret_val["BSTR"] = [] #["0123456789" * 1234567890]
############################################################
ret_val["CHAR"] = []
ret_val["CHAR"] += overflow_num_helper(System.SByte)
############################################################
ret_val["FLOAT"] = []
ret_val["FLOAT"] += overflow_num_helper(System.Double)
#Shouldn't be possible to overflow a double.
ret_val["DOUBLE"] = []
############################################################
ret_val["USHORT"] = []
ret_val["USHORT"] += overflow_num_helper(System.UInt16)
ret_val["ULONG"] = []
ret_val["ULONG"] += overflow_num_helper(System.UInt32)
ret_val["ULONGLONG"] = []
# Dev10 475426
#ret_val["ULONGLONG"] += overflow_num_helper(System.UInt64)
ret_val["SHORT"] = []
ret_val["SHORT"] += overflow_num_helper(System.Int16)
ret_val["LONG"] = []
# Dev10 475426
#ret_val["LONG"] += overflow_num_helper(System.Int32)
ret_val["LONGLONG"] = []
# Dev10 475426
#ret_val["LONGLONG"] += overflow_num_helper(System.Int64)
############################################################
return ret_val[in_type]
def pythonToCOM(in_type):
'''
Given a COM type (in string format), this helper function returns a list of
lists where each sublists contains 1-N elements. Each of these elements in
turn are of different types (compatible with in_type), but equivalent to
one another.
'''
ret_val = {}
############################################################
temp_funcs = [int, bool, System.Boolean] # long, Dev10 475426
temp_values = [ 0, 1, True, False]
ret_val["VARIANT_BOOL"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [System.Byte]
temp_values = pos_num_helper(System.Byte)
ret_val["BYTE"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [ str, unicode, # Py_Str, Py_System_String,
System.String ]
temp_values = shallow_copy(STRING_VALUES)
ret_val["BSTR"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [System.SByte]
temp_values = pos_num_helper(System.SByte)
ret_val["CHAR"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [ float, # Py_Float,
System.Single]
ret_val["FLOAT"] = [ [y(x) for y in temp_funcs] for x in FPN_VALUES]
############################################################
temp_funcs = [ float, System.Double] # Py_Double, Py_System_Double,
temp_values = [-1.0e+308, 1.0e308] + FPN_VALUES
ret_val["DOUBLE"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["DOUBLE"] += ret_val["FLOAT"]
############################################################
temp_funcs = [int, System.UInt16] # Py_UShort,
temp_values = pos_num_helper(System.UInt16)
ret_val["USHORT"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [int, System.UInt32] # Py_ULong,
temp_values = pos_num_helper(System.UInt32) + pos_num_helper(System.UInt16)
ret_val["ULONG"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["ULONG"] += ret_val["USHORT"]
############################################################
temp_funcs = [int, long, System.UInt64] # Py_ULongLong,
temp_values = pos_num_helper(System.UInt64) + pos_num_helper(System.UInt32) + pos_num_helper(System.UInt16)
ret_val["ULONGLONG"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["ULONGLONG"] += ret_val["ULONG"]
############################################################
temp_funcs = [int, System.Int16] # Py_Short,
temp_values = pos_num_helper(System.Int16)
ret_val["SHORT"] = [ [y(x) for y in temp_funcs] for x in temp_values]
############################################################
temp_funcs = [int, System.Int32] # Py_Long, Dev10 475426
temp_values = pos_num_helper(System.Int32) + pos_num_helper(System.Int16)
ret_val["LONG"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["LONG"] += ret_val["SHORT"]
############################################################
temp_funcs = [int, long, System.Int64] # Py_LongLong, Dev10 475426
temp_values = pos_num_helper(System.Int64) + pos_num_helper(System.Int32) + pos_num_helper(System.Int16)
ret_val["LONGLONG"] = [ [y(x) for y in temp_funcs] for x in temp_values]
ret_val["LONGLONG"] += ret_val["LONG"]
############################################################
return ret_val[in_type]
#------------------------------------------------------------------------------
#--Override a couple of definitions from assert_util
from iptest import assert_util
DEBUG = 1
def assert_helper(in_dict):
#add the keys if they're not there
if not in_dict.has_key("runonly"): in_dict["runonly"] = True
if not in_dict.has_key("skip"): in_dict["skip"] = False
#determine whether this test will be run or not
run = in_dict["runonly"] and not in_dict["skip"]
#strip out the keys
for x in ["runonly", "skip"]: in_dict.pop(x)
if not run:
if in_dict.has_key("bugid"):
print "...skipped an assert due to bug", str(in_dict["bugid"])
elif DEBUG:
print "...skipped an assert on", sys.platform
if in_dict.has_key("bugid"): in_dict.pop("bugid")
return run
def Assert(*args, **kwargs):
if assert_helper(kwargs): assert_util.Assert(*args, **kwargs)
def AreEqual(*args, **kwargs):
if assert_helper(kwargs): assert_util.AreEqual(*args, **kwargs)
def AssertError(*args, **kwargs):
try:
if assert_helper(kwargs): assert_util.AssertError(*args, **kwargs)
except Exception, e:
print "AssertError(" + str(args) + ", " + str(kwargs) + ") failed!"
raise e
def AssertErrorWithMessage(*args, **kwargs):
try:
if assert_helper(kwargs): assert_util.AssertErrorWithMessage(*args, **kwargs)
except Exception, e:
print "AssertErrorWithMessage(" + str(args) + ", " + str(kwargs) + ") failed!"
raise e
def AssertErrorWithPartialMessage(*args, **kwargs):
try:
if assert_helper(kwargs): assert_util.AssertErrorWithPartialMessage(*args, **kwargs)
except Exception, e:
print "AssertErrorWithPartialMessage(" + str(args) + ", " + str(kwargs) + ") failed!"
raise e
def AlmostEqual(*args, **kwargs):
if assert_helper(kwargs): assert_util.AlmostEqual(*args, **kwargs)
#------------------------------------------------------------------------------
#--HELPERS
def TryLoadExcelInteropAssembly():
try:
clr.AddReferenceByName('Microsoft.Office.Interop.Excel, Version=12.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
except:
try:
clr.AddReferenceByName('Microsoft.Office.Interop.Excel, Version=11.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
except:
pass
#------------------------------------------------------------------------------
def TryLoadWordInteropAssembly():
try:
clr.AddReferenceByName('Microsoft.Office.Interop.Word, Version=12.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
except:
try:
clr.AddReferenceByName('Microsoft.Office.Interop.Word, Version=11.0.0.0, Culture=neutral, PublicKeyToken=71e9bce111e9429c')
except:
pass
#------------------------------------------------------------------------------
def IsExcelInstalled():
from Microsoft.Win32 import Registry
from System.IO import File
excel = None
#Office 11 or 12 are both OK for this test. Office 12 is preferred.
excel = Registry.LocalMachine.OpenSubKey("Software\\Microsoft\\Office\\12.0\\Excel\\InstallRoot")
if excel==None:
excel = Registry.LocalMachine.OpenSubKey("Software\\Microsoft\\Office\\11.0\\Excel\\InstallRoot")
#sanity check
if excel==None:
return False
#make sure it's really installed on disk
excel_path = excel.GetValue("Path") + "excel.exe"
return File.Exists(excel_path)
#------------------------------------------------------------------------------
def IsWordInstalled():
from Microsoft.Win32 import Registry
from System.IO import File
word = None
#Office 11 or 12 are both OK for this test. Office 12 is preferred.
word = Registry.LocalMachine.OpenSubKey("Software\\Microsoft\\Office\\12.0\\Word\\InstallRoot")
if word==None:
word= Registry.LocalMachine.OpenSubKey("Software\\Microsoft\\Office\\11.0\\Word\\InstallRoot")
#sanity check
if word==None:
return False
#make sure it's really installed on disk
word_path = word.GetValue("Path") + "winword.exe"
return File.Exists(word_path)
#------------------------------------------------------------------------------
def CreateExcelApplication():
#TODO: why is there use of the GUID here?
#import clr
#typelib = clr.LoadTypeLibrary(System.Guid("00020813-0000-0000-C000-000000000046"))
#return typelib.Excel.Application()
import System
type = System.Type.GetTypeFromProgID("Excel.Application")
return System.Activator.CreateInstance(type)
#------------------------------------------------------------------------------
def CreateWordApplication():
import System
#import clr
#typelib = clr.LoadTypeLibrary(System.Guid("00020905-0000-0000-C000-000000000046"))
#return typelib.Word.Application()
type = System.Type.GetTypeFromProgID("Word.Application")
return System.Activator.CreateInstance(type)
#------------------------------------------------------------------------------
def CreateAgentServer():
import clr
from System import Guid
typelib = clr.LoadTypeLibrary(Guid("A7B93C73-7B81-11D0-AC5F-00C04FD97575"))
return typelib.AgentServerObjects.AgentServer()
#------------------------------------------------------------------------------
def CreateDlrComServer():
com_type_name = "DlrComLibrary.DlrComServer"
if is_cli:
com_obj = getRCWFromProgID(com_type_name)
else:
com_obj = win32com.client.Dispatch(com_type_name)
return com_obj
#------------------------------------------------------------------------------
def getTypeFromProgID(prog_id):
'''
Returns the Type object for prog_id.
'''
return Type.GetTypeFromProgID(prog_id)
#------------------------------------------------------------------------------
def getRCWFromProgID(prog_id):
'''
Returns an instance of prog_id.
'''
if is_cli:
return Activator.CreateInstance(getTypeFromProgID(prog_id))
else:
return win32com.client.Dispatch(prog_id)
#------------------------------------------------------------------------------
def genPeverifyInteropAsm(file):
#if this isn't a test run that will invoke peverify there's no point in
#continuing
if not is_peverify_run:
return
else:
mod_name = file.rsplit("\\", 1)[1].split(".py")[0]
print "Generating interop assemblies for the", mod_name, "test module which are needed in %TEMP% by peverify..."
from System.IO import Path
tempDir = Path.GetTempPath()
cwd = nt.getcwd()
#maps COM interop test module names to a list of DLLs
module_dll_dict = {
"excel" : [],
"msagent" : [agentsvr_path],
"scriptpw" : [scriptpw_path],
"word" : [],
}
dlrcomlib_list = [ "dlrcomserver", "paramsinretval", "method", "obj", "prop", ]
if is_cli32:
temp_name = testpath.rowan_root + "\\Test\\DlrComLibrary\\Debug\\DlrComLibrary.dll"
else:
temp_name = testpath.rowan_root + "\\Test\\DlrComLibrary\\x64\\Release\\DlrComLibrary.dll"
for mod_name in dlrcomlib_list: module_dll_dict[mod_name] = [ temp_name ]
if not file_exists_in_path("tlbimp.exe"):
print "ERROR: tlbimp.exe is not in the path!"
sys.exit(1)
try:
if not module_dll_dict.has_key(mod_name):
print "WARNING: cannot determine which interop assemblies to install!"
print " This may affect peverify runs adversely."
print
return
else:
nt.chdir(tempDir)
for com_dll in module_dll_dict[mod_name]:
if not file_exists(com_dll):
print "\tERROR: %s does not exist!" % (com_dll)
continue
print "\trunning tlbimp on", com_dll
run_tlbimp(com_dll)
finally:
nt.chdir(cwd)
#------------------------------------------------------------------------------
#--Fake parts of System for compat tests
if sys.platform=="win32":
class System:
class Byte(int):
MinValue = 0
MaxValue = 255
class SByte(int):
MinValue = -128
MaxValue = 127
class Int16(int):
MinValue = -32768
MaxValue = 32767
class UInt16(int):
MinValue = 0
MaxValue = 65535
class Int32(int):
MinValue = -2147483648
MaxValue = 2147483647
class UInt32(long):
MinValue = 0
MaxValue = 4294967295
class Int64(long):
MinValue = -9223372036854775808L
MaxValue = 9223372036854775807L
class UInt64(long):
MinValue = 0L
MaxValue = 18446744073709551615
class Single(float):
MinValue = -3.40282e+038
MaxValue = 3.40282e+038
class Double(float):
MinValue = -1.79769313486e+308
MaxValue = 1.79769313486e+308
class String(str):
pass
class Boolean(int):
pass
#------------------------------------------------------------------------------
def run_com_test(name, file):
run_test(name)
genPeverifyInteropAsm(file)
| slozier/ironpython2 | Src/IronPython/Lib/iptest/cominterop_util.py | Python | apache-2.0 | 24,101 |
package com.badlogic.gdx.backends.jglfw;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Graphics.DisplayMode;
import com.badlogic.gdx.backends.jglfw.JglfwGraphics.JglfwDisplayMode;
import com.badlogic.gdx.graphics.Color;
import com.badlogic.gdx.utils.Array;
import java.awt.GraphicsDevice;
import java.awt.GraphicsEnvironment;
/** @author Nathan Sweet */
public class JglfwApplicationConfiguration {
/** Title of application window. **/
public String title = "";
/** Initial width of the application window. **/
public int width = 640;
/** Initial height of the application window. **/
public int height = 480;
/** Intial x coordinate of the application window, -1 for center. **/
public int x = -1;
/** Intial x coordinate of the application window, -1 for center. **/
public int y = -1;
/** True to start in fullscreen. **/
public boolean fullscreen;
/** Monitor index to use for fullscreen. **/
public int fullscreenMonitorIndex = -1;
/** Number of bits per color channel. **/
public int r = 8, g = 8, b = 8, a = 8;
/** Number of bits for the depth buffer. **/
public int depth = 16;
/** Number of bits for the stencil buffer. **/
public int stencil = 0;
/** Number of samples for MSAA **/
public int samples = 0;
/** True to enable vsync. **/
public boolean vSync = true;
/** True if the window is resizable. **/
public boolean resizable = true;
/** True to attempt to use OpenGL ES 2.0. Note {@link Gdx#gl20} may be null even when this is true. **/
public boolean useGL20;
/** True to call System.exit() when the main loop is complete. **/
public boolean forceExit = true;
/** True to have a title and border around the window. **/
public boolean undecorated;
/** Causes the main loop to run on the EDT instead of a new thread, for easier interoperability with AWT/Swing. Broken on Linux. **/
public boolean runOnEDT;
/** The color to clear the window immediately after creation. **/
public Color initialBackgroundColor = Color.BLACK;
/** True to hide the window when it is created. The window must be shown with {@link JglfwGraphics#show()}. **/
public boolean hidden;
/** Target framerate when the window is in the foreground. The CPU sleeps as needed. Use 0 to never sleep. **/
public int foregroundFPS;
/** Target framerate when the window is in the background. The CPU sleeps as needed. Use 0 to never sleep, -1 to not render. **/
public int backgroundFPS;
/** Target framerate when the window is hidden or minimized. The CPU sleeps as needed. Use 0 to never sleep, -1 to not render. **/
public int hiddenFPS = -1;
static public DisplayMode[] getDisplayModes () {
GraphicsDevice device = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice();
java.awt.DisplayMode desktopMode = device.getDisplayMode();
java.awt.DisplayMode[] displayModes = device.getDisplayModes();
Array<DisplayMode> modes = new Array();
outer:
for (java.awt.DisplayMode mode : displayModes) {
for (DisplayMode other : modes)
if (other.width == mode.getWidth() && other.height == mode.getHeight() && other.bitsPerPixel == mode.getBitDepth())
continue outer; // Duplicate.
if (mode.getBitDepth() != desktopMode.getBitDepth()) continue;
modes.add(new JglfwDisplayMode(mode.getWidth(), mode.getHeight(), mode.getRefreshRate(), mode.getBitDepth()));
}
return modes.toArray(DisplayMode.class);
}
static public DisplayMode getDesktopDisplayMode () {
java.awt.DisplayMode mode = GraphicsEnvironment.getLocalGraphicsEnvironment().getDefaultScreenDevice().getDisplayMode();
return new JglfwDisplayMode(mode.getWidth(), mode.getHeight(), mode.getRefreshRate(), mode.getBitDepth());
}
}
| domix/libgdx | backends/gdx-backend-jglfw/src/com/badlogic/gdx/backends/jglfw/JglfwApplicationConfiguration.java | Java | apache-2.0 | 3,674 |
package com.xnx3.j2ee.entity;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import static javax.persistence.GenerationType.IDENTITY;
import javax.persistence.Id;
import javax.persistence.Table;
/**
* FriendLog entity. @author MyEclipse Persistence Tools
*/
@Entity
@Table(name = "friend_log")
public class FriendLog implements java.io.Serializable {
// Fields
private Integer id;
private Integer self;
private Integer other;
private Integer time;
private Short state;
private String ip;
// Constructors
/** default constructor */
public FriendLog() {
}
/** full constructor */
public FriendLog(Integer self, Integer other, Integer time, Short state,
String ip) {
this.self = self;
this.other = other;
this.time = time;
this.state = state;
this.ip = ip;
}
// Property accessors
@Id
@GeneratedValue(strategy = IDENTITY)
@Column(name = "id", unique = true, nullable = false)
public Integer getId() {
return this.id;
}
public void setId(Integer id) {
this.id = id;
}
@Column(name = "self", nullable = false)
public Integer getSelf() {
return this.self;
}
public void setSelf(Integer self) {
this.self = self;
}
@Column(name = "other", nullable = false)
public Integer getOther() {
return this.other;
}
public void setOther(Integer other) {
this.other = other;
}
@Column(name = "time", nullable = false)
public Integer getTime() {
return this.time;
}
public void setTime(Integer time) {
this.time = time;
}
@Column(name = "state", nullable = false)
public Short getState() {
return this.state;
}
public void setState(Short state) {
this.state = state;
}
@Column(name = "ip", nullable = false, length = 15)
public String getIp() {
return this.ip;
}
public void setIp(String ip) {
this.ip = ip;
}
} | xnx3/iw_demo | src/com/xnx3/j2ee/entity/FriendLog.java | Java | apache-2.0 | 1,858 |
/*
* Copyright 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.apps.iosched.ui;
import com.google.analytics.tracking.android.EasyTracker;
import gdg.devfest.app.R;
import com.google.android.apps.iosched.provider.ScheduleContract;
import com.google.android.apps.iosched.util.ImageFetcher;
import com.google.android.apps.iosched.util.UIUtils;
import com.actionbarsherlock.app.SherlockFragment;
import android.app.Activity;
import android.content.Intent;
import android.database.Cursor;
import android.net.Uri;
import android.os.Bundle;
import android.support.v4.app.LoaderManager;
import android.support.v4.content.CursorLoader;
import android.support.v4.content.Loader;
import android.text.TextUtils;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.TextView;
import static com.google.android.apps.iosched.util.LogUtils.LOGD;
import static com.google.android.apps.iosched.util.LogUtils.makeLogTag;
/**
* A fragment that shows detail information for a developer sandbox company, including
* company name, description, logo, etc.
*/
public class VendorDetailFragment extends SherlockFragment implements
LoaderManager.LoaderCallbacks<Cursor> {
private static final String TAG = makeLogTag(VendorDetailFragment.class);
private Uri mVendorUri;
private TextView mName;
private ImageView mLogo;
private TextView mUrl;
private TextView mDesc;
private ImageFetcher mImageFetcher;
public interface Callbacks {
public void onTrackIdAvailable(String trackId);
}
private static Callbacks sDummyCallbacks = new Callbacks() {
@Override
public void onTrackIdAvailable(String trackId) {}
};
private Callbacks mCallbacks = sDummyCallbacks;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
final Intent intent = BaseActivity.fragmentArgumentsToIntent(getArguments());
mVendorUri = intent.getData();
if (mVendorUri == null) {
return;
}
mImageFetcher = UIUtils.getImageFetcher(getActivity());
mImageFetcher.setImageFadeIn(false);
setHasOptionsMenu(true);
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (mVendorUri == null) {
return;
}
// Start background query to load vendor details
getLoaderManager().initLoader(VendorsQuery._TOKEN, null, this);
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
if (!(activity instanceof Callbacks)) {
throw new ClassCastException("Activity must implement fragment's callbacks.");
}
mCallbacks = (Callbacks) activity;
}
@Override
public void onDetach() {
super.onDetach();
mCallbacks = sDummyCallbacks;
}
@Override
public void onStop() {
super.onStop();
if (mImageFetcher != null) {
mImageFetcher.closeCache();
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
ViewGroup rootView = (ViewGroup) inflater.inflate(R.layout.fragment_vendor_detail, null);
mName = (TextView) rootView.findViewById(R.id.vendor_name);
mLogo = (ImageView) rootView.findViewById(R.id.vendor_logo);
mUrl = (TextView) rootView.findViewById(R.id.vendor_url);
mDesc = (TextView) rootView.findViewById(R.id.vendor_desc);
return rootView;
}
public void buildUiFromCursor(Cursor cursor) {
if (getActivity() == null) {
return;
}
if (!cursor.moveToFirst()) {
return;
}
String nameString = cursor.getString(VendorsQuery.NAME);
mName.setText(nameString);
// Start background fetch to load vendor logo
final String logoUrl = cursor.getString(VendorsQuery.LOGO_URL);
if (!TextUtils.isEmpty(logoUrl)) {
mImageFetcher.loadThumbnailImage(logoUrl, mLogo, R.drawable.sandbox_logo_empty);
}
mUrl.setText(cursor.getString(VendorsQuery.URL));
mDesc.setText(cursor.getString(VendorsQuery.DESC));
EasyTracker.getTracker().trackView("Sandbox Vendor: " + nameString);
LOGD("Tracker", "Sandbox Vendor: " + nameString);
mCallbacks.onTrackIdAvailable(cursor.getString(VendorsQuery.TRACK_ID));
}
@Override
public Loader<Cursor> onCreateLoader(int id, Bundle data) {
return new CursorLoader(getActivity(), mVendorUri, VendorsQuery.PROJECTION, null, null,
null);
}
@Override
public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) {
buildUiFromCursor(cursor);
}
@Override
public void onLoaderReset(Loader<Cursor> loader) {
}
/**
* {@link com.google.android.apps.iosched.provider.ScheduleContract.Vendors}
* query parameters.
*/
private interface VendorsQuery {
int _TOKEN = 0x4;
String[] PROJECTION = {
ScheduleContract.Vendors.VENDOR_NAME,
ScheduleContract.Vendors.VENDOR_DESC,
ScheduleContract.Vendors.VENDOR_URL,
ScheduleContract.Vendors.VENDOR_LOGO_URL,
ScheduleContract.Vendors.TRACK_ID,
};
int NAME = 0;
int DESC = 1;
int URL = 2;
int LOGO_URL = 3;
int TRACK_ID = 4;
}
}
| printminion/gdgsched | android/src/com/google/android/apps/iosched/ui/VendorDetailFragment.java | Java | apache-2.0 | 6,196 |
/**
* Generated with Acceleo
*/
package org.wso2.developerstudio.eclipse.gmf.esb.parts.forms;
// Start of user code for imports
import org.eclipse.emf.common.util.Enumerator;
import org.eclipse.emf.ecore.util.EcoreAdapterFactory;
import org.eclipse.emf.edit.ui.provider.AdapterFactoryLabelProvider;
import org.eclipse.emf.eef.runtime.EEFRuntimePlugin;
import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent;
import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart;
import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent;
import org.eclipse.emf.eef.runtime.part.impl.SectionPropertiesEditingPart;
import org.eclipse.emf.eef.runtime.ui.parts.PartComposer;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence;
import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionStep;
import org.eclipse.emf.eef.runtime.ui.utils.EditingUtils;
import org.eclipse.emf.eef.runtime.ui.widgets.EMFComboViewer;
import org.eclipse.emf.eef.runtime.ui.widgets.FormUtils;
import org.eclipse.jface.viewers.ArrayContentProvider;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.FocusAdapter;
import org.eclipse.swt.events.FocusEvent;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Event;
import org.eclipse.swt.widgets.Listener;
import org.eclipse.swt.widgets.Text;
import org.eclipse.ui.forms.widgets.Form;
import org.eclipse.ui.forms.widgets.FormToolkit;
import org.eclipse.ui.forms.widgets.ScrolledForm;
import org.eclipse.ui.forms.widgets.Section;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository;
import org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart;
import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages;
// End of user code
/**
*
*
*/
public class TaskPropertyPropertiesEditionPartForm extends SectionPropertiesEditingPart implements IFormPropertiesEditionPart, TaskPropertyPropertiesEditionPart {
protected Text propertyName;
protected Text propertyValue;
protected EMFComboViewer propertyType;
/**
* For {@link ISection} use only.
*/
public TaskPropertyPropertiesEditionPartForm() { super(); }
/**
* Default constructor
* @param editionComponent the {@link IPropertiesEditionComponent} that manage this part
*
*/
public TaskPropertyPropertiesEditionPartForm(IPropertiesEditionComponent editionComponent) {
super(editionComponent);
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createFigure(org.eclipse.swt.widgets.Composite, org.eclipse.ui.forms.widgets.FormToolkit)
*
*/
public Composite createFigure(final Composite parent, final FormToolkit widgetFactory) {
ScrolledForm scrolledForm = widgetFactory.createScrolledForm(parent);
Form form = scrolledForm.getForm();
view = form.getBody();
GridLayout layout = new GridLayout();
layout.numColumns = 3;
view.setLayout(layout);
createControls(widgetFactory, view);
return scrolledForm;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IFormPropertiesEditionPart#
* createControls(org.eclipse.ui.forms.widgets.FormToolkit, org.eclipse.swt.widgets.Composite)
*
*/
public void createControls(final FormToolkit widgetFactory, Composite view) {
CompositionSequence taskPropertyStep = new BindingCompositionSequence(propertiesEditionComponent);
CompositionStep propertiesStep = taskPropertyStep.addStep(EsbViewsRepository.TaskProperty.Properties.class);
propertiesStep.addStep(EsbViewsRepository.TaskProperty.Properties.propertyName);
propertiesStep.addStep(EsbViewsRepository.TaskProperty.Properties.propertyValue);
propertiesStep.addStep(EsbViewsRepository.TaskProperty.Properties.propertyType);
composer = new PartComposer(taskPropertyStep) {
@Override
public Composite addToPart(Composite parent, Object key) {
if (key == EsbViewsRepository.TaskProperty.Properties.class) {
return createPropertiesGroup(widgetFactory, parent);
}
if (key == EsbViewsRepository.TaskProperty.Properties.propertyName) {
return createPropertyNameText(widgetFactory, parent);
}
if (key == EsbViewsRepository.TaskProperty.Properties.propertyValue) {
return createPropertyValueText(widgetFactory, parent);
}
if (key == EsbViewsRepository.TaskProperty.Properties.propertyType) {
return createPropertyTypeEMFComboViewer(widgetFactory, parent);
}
return parent;
}
};
composer.compose(view);
}
/**
*
*/
protected Composite createPropertiesGroup(FormToolkit widgetFactory, final Composite parent) {
Section propertiesSection = widgetFactory.createSection(parent, Section.TITLE_BAR | Section.TWISTIE | Section.EXPANDED);
propertiesSection.setText(EsbMessages.TaskPropertyPropertiesEditionPart_PropertiesGroupLabel);
GridData propertiesSectionData = new GridData(GridData.FILL_HORIZONTAL);
propertiesSectionData.horizontalSpan = 3;
propertiesSection.setLayoutData(propertiesSectionData);
Composite propertiesGroup = widgetFactory.createComposite(propertiesSection);
GridLayout propertiesGroupLayout = new GridLayout();
propertiesGroupLayout.numColumns = 3;
propertiesGroup.setLayout(propertiesGroupLayout);
propertiesSection.setClient(propertiesGroup);
return propertiesGroup;
}
protected Composite createPropertyNameText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.TaskProperty.Properties.propertyName, EsbMessages.TaskPropertyPropertiesEditionPart_PropertyNameLabel);
propertyName = widgetFactory.createText(parent, ""); //$NON-NLS-1$
propertyName.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData propertyNameData = new GridData(GridData.FILL_HORIZONTAL);
propertyName.setLayoutData(propertyNameData);
propertyName.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.TaskProperty.Properties.propertyName,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, propertyName.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.TaskProperty.Properties.propertyName,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, propertyName.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
propertyName.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertyPropertiesEditionPartForm.this, EsbViewsRepository.TaskProperty.Properties.propertyName, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, propertyName.getText()));
}
}
});
EditingUtils.setID(propertyName, EsbViewsRepository.TaskProperty.Properties.propertyName);
EditingUtils.setEEFtype(propertyName, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.TaskProperty.Properties.propertyName, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createPropertyNameText
// End of user code
return parent;
}
protected Composite createPropertyValueText(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.TaskProperty.Properties.propertyValue, EsbMessages.TaskPropertyPropertiesEditionPart_PropertyValueLabel);
propertyValue = widgetFactory.createText(parent, ""); //$NON-NLS-1$
propertyValue.setData(FormToolkit.KEY_DRAW_BORDER, FormToolkit.TEXT_BORDER);
widgetFactory.paintBordersFor(parent);
GridData propertyValueData = new GridData(GridData.FILL_HORIZONTAL);
propertyValue.setLayoutData(propertyValueData);
propertyValue.addFocusListener(new FocusAdapter() {
/**
* @see org.eclipse.swt.events.FocusAdapter#focusLost(org.eclipse.swt.events.FocusEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void focusLost(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.TaskProperty.Properties.propertyValue,
PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, propertyValue.getText()));
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
EsbViewsRepository.TaskProperty.Properties.propertyValue,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_LOST,
null, propertyValue.getText()));
}
}
/**
* @see org.eclipse.swt.events.FocusAdapter#focusGained(org.eclipse.swt.events.FocusEvent)
*/
@Override
public void focusGained(FocusEvent e) {
if (propertiesEditionComponent != null) {
propertiesEditionComponent
.firePropertiesChanged(new PropertiesEditionEvent(
TaskPropertyPropertiesEditionPartForm.this,
null,
PropertiesEditionEvent.FOCUS_CHANGED, PropertiesEditionEvent.FOCUS_GAINED,
null, null));
}
}
});
propertyValue.addKeyListener(new KeyAdapter() {
/**
* @see org.eclipse.swt.events.KeyAdapter#keyPressed(org.eclipse.swt.events.KeyEvent)
*
*/
@Override
@SuppressWarnings("synthetic-access")
public void keyPressed(KeyEvent e) {
if (e.character == SWT.CR) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertyPropertiesEditionPartForm.this, EsbViewsRepository.TaskProperty.Properties.propertyValue, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, propertyValue.getText()));
}
}
});
EditingUtils.setID(propertyValue, EsbViewsRepository.TaskProperty.Properties.propertyValue);
EditingUtils.setEEFtype(propertyValue, "eef::Text"); //$NON-NLS-1$
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.TaskProperty.Properties.propertyValue, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createPropertyValueText
// End of user code
return parent;
}
/**
* @generated NOT
*/
protected Composite createPropertyTypeEMFComboViewer(FormToolkit widgetFactory, Composite parent) {
createDescription(parent, EsbViewsRepository.TaskProperty.Properties.propertyType, EsbMessages.TaskPropertyPropertiesEditionPart_PropertyTypeLabel);
propertyType = new EMFComboViewer(parent);
propertyType.setContentProvider(new ArrayContentProvider());
propertyType.setLabelProvider(new AdapterFactoryLabelProvider(EEFRuntimePlugin.getDefault().getAdapterFactory()));
GridData propertyTypeData = new GridData(GridData.FILL_HORIZONTAL);
propertyType.getCombo().setLayoutData(propertyTypeData);
propertyType.getCombo().addListener(SWT.MouseVerticalWheel, new Listener() {
@Override
public void handleEvent(Event arg0) {
arg0.doit = false;
}
});
propertyType.addSelectionChangedListener(new ISelectionChangedListener() {
/**
* {@inheritDoc}
*
* @see org.eclipse.jface.viewers.ISelectionChangedListener#selectionChanged(org.eclipse.jface.viewers.SelectionChangedEvent)
*
*/
public void selectionChanged(SelectionChangedEvent event) {
if (propertiesEditionComponent != null)
propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(TaskPropertyPropertiesEditionPartForm.this, EsbViewsRepository.TaskProperty.Properties.propertyType, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.SET, null, getPropertyType()));
}
});
propertyType.setID(EsbViewsRepository.TaskProperty.Properties.propertyType);
FormUtils.createHelpButton(widgetFactory, parent, propertiesEditionComponent.getHelpContent(EsbViewsRepository.TaskProperty.Properties.propertyType, EsbViewsRepository.FORM_KIND), null); //$NON-NLS-1$
// Start of user code for createPropertyTypeEMFComboViewer
// End of user code
return parent;
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent)
*
*/
public void firePropertiesChanged(IPropertiesEditionEvent event) {
// Start of user code for tab synchronization
// End of user code
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#getPropertyName()
*
*/
public String getPropertyName() {
return propertyName.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#setPropertyName(String newValue)
*
*/
public void setPropertyName(String newValue) {
if (newValue != null) {
propertyName.setText(newValue);
} else {
propertyName.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.TaskProperty.Properties.propertyName);
if (eefElementEditorReadOnlyState && propertyName.isEnabled()) {
propertyName.setEnabled(false);
propertyName.setToolTipText(EsbMessages.TaskProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !propertyName.isEnabled()) {
propertyName.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#getPropertyValue()
*
*/
public String getPropertyValue() {
return propertyValue.getText();
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#setPropertyValue(String newValue)
*
*/
public void setPropertyValue(String newValue) {
if (newValue != null) {
propertyValue.setText(newValue);
} else {
propertyValue.setText(""); //$NON-NLS-1$
}
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.TaskProperty.Properties.propertyValue);
if (eefElementEditorReadOnlyState && propertyValue.isEnabled()) {
propertyValue.setEnabled(false);
propertyValue.setToolTipText(EsbMessages.TaskProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !propertyValue.isEnabled()) {
propertyValue.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#getPropertyType()
*
*/
public Enumerator getPropertyType() {
Enumerator selection = (Enumerator) ((StructuredSelection) propertyType.getSelection()).getFirstElement();
return selection;
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#initPropertyType(Object input, Enumerator current)
*/
public void initPropertyType(Object input, Enumerator current) {
propertyType.setInput(input);
propertyType.modelUpdating(new StructuredSelection(current));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.TaskProperty.Properties.propertyType);
if (eefElementEditorReadOnlyState && propertyType.isEnabled()) {
propertyType.setEnabled(false);
propertyType.setToolTipText(EsbMessages.TaskProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !propertyType.isEnabled()) {
propertyType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.wso2.developerstudio.eclipse.gmf.esb.parts.TaskPropertyPropertiesEditionPart#setPropertyType(Enumerator newValue)
*
*/
public void setPropertyType(Enumerator newValue) {
propertyType.modelUpdating(new StructuredSelection(newValue));
boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.TaskProperty.Properties.propertyType);
if (eefElementEditorReadOnlyState && propertyType.isEnabled()) {
propertyType.setEnabled(false);
propertyType.setToolTipText(EsbMessages.TaskProperty_ReadOnly);
} else if (!eefElementEditorReadOnlyState && !propertyType.isEnabled()) {
propertyType.setEnabled(true);
}
}
/**
* {@inheritDoc}
*
* @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle()
*
*/
public String getTitle() {
return EsbMessages.TaskProperty_Part_Title;
}
// Start of user code additional methods
// End of user code
}
| prabushi/devstudio-tooling-esb | plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src-gen/org/wso2/developerstudio/eclipse/gmf/esb/parts/forms/TaskPropertyPropertiesEditionPartForm.java | Java | apache-2.0 | 18,210 |
package CustomOreGen.Util;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
import java.util.Map.Entry;
public abstract class MapCollection<K,V> implements Collection<V>
{
protected final Map<K,V> backingMap;
public MapCollection(Map<K,V> backingMap)
{
this.backingMap = backingMap;
for (Entry<K,V> entry : backingMap.entrySet()) {
K key = entry.getKey();
V value = entry.getValue();
K nKey = this.getKey(value);
if (key != nKey && (key == null || !key.equals(nKey))) {
throw new IllegalArgumentException("Backing set contains inconsistent key/value pair \'" + key + "\' -> \'" + value + "\', expected \'" + nKey + "\' -> \'" + value + "\'");
}
}
}
protected abstract K getKey(V value);
public int size()
{
return this.backingMap.size();
}
public boolean isEmpty()
{
return this.backingMap.isEmpty();
}
public boolean contains(Object o)
{
try {
@SuppressWarnings("unchecked")
K key = this.getKey((V)o);
return this.backingMap.containsKey(key);
} catch(ClassCastException e) {
return false;
}
}
public Iterator<V> iterator()
{
return this.backingMap.values().iterator();
}
public Object[] toArray()
{
return this.backingMap.values().toArray();
}
public <T> T[] toArray(T[] a)
{
return this.backingMap.values().toArray(a);
}
public boolean add(V v)
{
K key = this.getKey(v);
if (v != null)
{
return this.backingMap.put(key, v) != v;
}
else
{
boolean hasKey = this.backingMap.containsKey(key);
V prev = this.backingMap.put(key, v);
return !hasKey || v != prev;
}
}
public boolean remove(Object o)
{
try {
@SuppressWarnings("unchecked")
K key = this.getKey((V)o);
return this.backingMap.keySet().remove(key);
} catch(ClassCastException e) {
return false;
}
}
public boolean containsAll(Collection<?> c)
{
for (Object o : c) {
if (!this.contains(o))
return false;
}
return true;
}
public boolean addAll(Collection<? extends V> c)
{
boolean changed = false;
for (V v : c) {
changed |= this.add(v);
}
return changed;
}
public boolean removeAll(Collection<?> c)
{
boolean changed = false;
for (Object o : c) {
changed |= this.remove(o);
}
return changed;
}
public boolean retainAll(Collection<?> c)
{
ArrayList<K> keys = new ArrayList<K>(this.backingMap.size());
for (Object o : c) {
try {
@SuppressWarnings("unchecked")
K key = this.getKey((V)o);
keys.add(key);
} catch(ClassCastException e) {
}
}
return this.backingMap.keySet().retainAll(keys);
}
public void clear()
{
this.backingMap.clear();
}
public int hashCode()
{
return this.backingMap.hashCode();
}
public boolean equals(Object obj)
{
return obj instanceof MapCollection ? this.backingMap.equals(((MapCollection<?, ?>)obj).backingMap) : false;
}
public String toString()
{
return this.backingMap.values().toString();
}
}
| reteo/CustomOreGen | src/main/java/CustomOreGen/Util/MapCollection.java | Java | artistic-2.0 | 3,588 |
class Helmfile < Formula
desc "Deploy Kubernetes Helm Charts"
homepage "https://github.com/roboll/helmfile"
url "https://github.com/roboll/helmfile/archive/v0.142.0.tar.gz"
sha256 "5475a041f0a1eb5777cc45e3fb06458ae76b1d4840aec89f2fed509d833d0cde"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "29cf096405cc834e7888ebdee9c811a3e375e8a43b2e045ec0295e8ff654bad3"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "80e9c9d81f57b0331038108026263d6d9b184403659b66a976172e9dde916792"
sha256 cellar: :any_skip_relocation, monterey: "73e5bab63a7d9c0af77ccc72f8bca63cc8f72b96923ebfe41430a356cbf2cdeb"
sha256 cellar: :any_skip_relocation, big_sur: "ca024a40610d455dce99ef913baee47fa1d82dc821d780b94e56a54b3ecbde7b"
sha256 cellar: :any_skip_relocation, catalina: "7fa829db664c78079ba1c8ac19ec75b47e9664dfc55cf79d18970070e81d0fc2"
sha256 cellar: :any_skip_relocation, x86_64_linux: "53ff4d7a0816b82fcd87c791e6a9db70699425931bbba96181b545a837fb7fb7"
end
depends_on "go" => :build
depends_on "helm"
def install
system "go", "build", "-ldflags", "-X github.com/roboll/helmfile/pkg/app/version.Version=v#{version}",
"-o", bin/"helmfile", "-v", "github.com/roboll/helmfile"
end
test do
(testpath/"helmfile.yaml").write <<-EOS
repositories:
- name: stable
url: https://charts.helm.sh/stable
releases:
- name: vault # name of this release
namespace: vault # target namespace
createNamespace: true # helm 3.2+ automatically create release namespace (default true)
labels: # Arbitrary key value pairs for filtering releases
foo: bar
chart: stable/vault # the chart being installed to create this release, referenced by `repository/chart` syntax
version: ~1.24.1 # the semver of the chart. range constraint is supported
EOS
system Formula["helm"].opt_bin/"helm", "create", "foo"
output = "Adding repo stable https://charts.helm.sh/stable"
assert_match output, shell_output("#{bin}/helmfile -f helmfile.yaml repos 2>&1")
assert_match version.to_s, shell_output("#{bin}/helmfile -v")
end
end
| sjackman/homebrew-core | Formula/helmfile.rb | Ruby | bsd-2-clause | 2,316 |
import re
# Python 2/3 compatibility hackery
try:
unicode
except NameError:
unicode = str
def compile_url(url):
clean_url = unicode(url).lstrip(u'/')
return re.compile(clean_url)
def compile_urls(urls):
return [compile_url(expr) for expr in urls]
| ghickman/incuna-auth | incuna_auth/middleware/utils.py | Python | bsd-2-clause | 272 |
class Helmsman < Formula
desc "Helm Charts as Code tool"
homepage "https://github.com/Praqma/helmsman"
url "https://github.com/Praqma/helmsman.git",
tag: "v3.7.2",
revision: "6d7e6ddb2c7747b8789dd72db7714431fe17e779"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "a307ed84ab8c572b9256fb54aaad6a7300f9384c94163dfeafc7419188aed4d0"
sha256 cellar: :any_skip_relocation, big_sur: "2f8fa84afd13560540da1be94ba442d8d140821436e14b038b8f034e561d7ca7"
sha256 cellar: :any_skip_relocation, catalina: "c0604c09ea08fd0aefb7ad9f24ec6df256156670fa8d30c180365c9b479cf99f"
sha256 cellar: :any_skip_relocation, mojave: "4841f957b4825a3501faa2ccf437d79042ea9019389ad344d4f20f9cecfe3830"
sha256 cellar: :any_skip_relocation, x86_64_linux: "dd61ceab712bafb407449a97d4e5e3df51bf50514f27c4bdd228796032748527"
end
depends_on "go" => :build
depends_on "helm"
depends_on "kubernetes-cli"
def install
system "go", "build", *std_go_args, "-ldflags", "-s -w -X main.version=#{version}", "./cmd/helmsman"
pkgshare.install "examples/example.yaml"
end
test do
assert_match version.to_s, shell_output("#{bin}/helmsman version")
output = shell_output("#{bin}/helmsman --apply -f #{pkgshare}/example.yaml 2>&1", 1)
assert_match "helm diff not found", output
end
end
| zyedidia/homebrew-core | Formula/helmsman.rb | Ruby | bsd-2-clause | 1,369 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from math import log
import argh
import numpy as np
from chemreac import ReactionDiffusion
from chemreac.integrate import run
from chemreac.util.plotting import plot_solver_linear_error
def efield_cb(x, logx=False):
"""
Returns a flat efield (-1)
"""
return -np.ones_like(x)
def y0_flat_cb(x, logx=False, use_log2=False):
xc = x[:-1] + np.diff(x)/2
if logx:
expb = (lambda arg: 2**arg) if use_log2 else np.exp
x, xc = map(expb, (x, xc))
return 17 - 11*(xc-x[0])/(x[-1]-x[0])
def y0_cylindrical_cb(x, logx=False, use_log2=False):
xc = x[:-1] + np.diff(x)/2
if logx:
expb = (lambda arg: 2**arg) if use_log2 else np.exp
x, xc = map(expb, (x, xc))
return 17 - np.log((xc-x[0])/(x[-1]-x[0]))
def y0_spherical_cb(x, logx=False, use_log2=False):
xc = x[:-1] + np.diff(x)/2
if logx:
expb = (lambda arg: 2**arg) if use_log2 else np.exp
x, xc = map(expb, (x, xc))
return 3 + 0.1/((xc-x[0])/(x[-1]-x[0]))
def integrate_rd(D=2e-3, t0=3., tend=7., x0=0.0, xend=1.0, mu=None, N=32,
nt=25, geom='f', logt=False, logy=False, logx=False,
random=False, nstencil=3, lrefl=False, rrefl=False,
num_jacobian=False, method='bdf', plot=False,
atol=1e-6, rtol=1e-6, efield=False, random_seed=42,
verbose=False, use_log2=False):
if random_seed:
np.random.seed(random_seed)
n = 1
mu = float(mu or x0)
tout = np.linspace(t0, tend, nt)
assert geom in 'fcs'
# Setup the grid
logb = (lambda arg: log(arg)/log(2)) if use_log2 else log
_x0 = logb(x0) if logx else x0
_xend = logb(xend) if logx else xend
x = np.linspace(_x0, _xend, N+1)
if random:
x += (np.random.random(N+1)-0.5)*(_xend-_x0)/(N+2)
mob = 0.3
# Initial conditions
y0 = {
'f': y0_flat_cb,
'c': y0_cylindrical_cb,
's': y0_spherical_cb
}[geom](x, logx)
# Setup the system
stoich_active = []
stoich_prod = []
k = []
assert not lrefl
assert not rrefl
rd = ReactionDiffusion(
n, stoich_active, stoich_prod, k, N,
D=[D],
z_chg=[1],
mobility=[mob],
x=x,
geom=geom,
logy=logy,
logt=logt,
logx=logx,
nstencil=nstencil,
lrefl=lrefl,
rrefl=rrefl,
use_log2=use_log2
)
if efield:
if geom != 'f':
raise ValueError("Only analytic sol. for flat drift implemented.")
rd.efield = efield_cb(rd.xcenters, logx)
# Analytic reference values
t = tout.copy().reshape((nt, 1))
Cref = np.repeat(y0[np.newaxis, :, np.newaxis], nt, axis=0)
if efield:
Cref += t.reshape((nt, 1, 1))*mob
# Run the integration
integr = run(rd, y0, tout, atol=atol, rtol=rtol,
with_jacobian=(not num_jacobian), method=method)
Cout, info = integr.Cout, integr.info
if verbose:
print(info)
def lin_err(i=slice(None), j=slice(None)):
return integr.Cout[i, :, j] - Cref[i, :, j]
rmsd = np.sum(lin_err()**2 / N, axis=1)**0.5
ave_rmsd_over_atol = np.average(rmsd, axis=0)/info['atol']
# Plot results
if plot:
import matplotlib.pyplot as plt
def _plot(y, c, ttl=None, apply_exp_on_y=False):
plt.plot(rd.xcenters, rd.expb(y) if apply_exp_on_y else y, c=c)
if N < 100:
plt.vlines(rd.x, 0, np.ones_like(rd.x)*max(y), linewidth=.1,
colors='gray')
plt.xlabel('x / m')
plt.ylabel('C / M')
if ttl:
plt.title(ttl)
for i in range(nt):
c = 1-tout[i]/tend
c = (1.0-c, .5-c/2, .5-c/2) # over time: dark red -> light red
plt.subplot(4, 1, 1)
_plot(Cout[i, :, 0], c, 'Simulation (N={})'.format(rd.N),
apply_exp_on_y=logy)
plt.subplot(4, 1, 2)
_plot(Cref[i, :, 0], c, 'Analytic', apply_exp_on_y=logy)
ax_err = plt.subplot(4, 1, 3)
plot_solver_linear_error(integr, Cref, ax_err, ti=i,
bi=slice(None),
color=c, fill=(i == 0))
plt.title('Linear rel error / Log abs. tol. (={})'.format(
info['atol']))
plt.subplot(4, 1, 4)
tspan = [tout[0], tout[-1]]
plt.plot(tout, rmsd[:, 0] / info['atol'], 'r')
plt.plot(tspan, [ave_rmsd_over_atol[0]]*2, 'r--')
plt.xlabel('Time / s')
plt.ylabel(r'$\sqrt{\langle E^2 \rangle} / atol$')
plt.tight_layout()
plt.show()
return tout, Cout, info, ave_rmsd_over_atol, rd
if __name__ == '__main__':
argh.dispatch_command(integrate_rd, output_file=None)
| bjodah/chemreac | examples/steady_state.py | Python | bsd-2-clause | 4,970 |
package org.jvnet.jaxb2_commons.xml.bind.model.concrete;
import java.util.ArrayList;
import java.util.Collection;
import java.util.IdentityHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.activation.MimeType;
import javax.xml.namespace.NamespaceContext;
import javax.xml.namespace.QName;
import org.jvnet.jaxb2_commons.lang.Validate;
import org.jvnet.jaxb2_commons.xml.bind.model.MBuiltinLeafInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MClassInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MClassTypeInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MContainer;
import org.jvnet.jaxb2_commons.xml.bind.model.MElement;
import org.jvnet.jaxb2_commons.xml.bind.model.MElementInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MElementTypeRef;
import org.jvnet.jaxb2_commons.xml.bind.model.MEnumLeafInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MModelInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MPackageInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MPropertyInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.MTypeInfo;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMAnyAttributePropertyInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMBuiltinLeafInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMClassInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMElementInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMElementOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMElementTypeRefOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMEnumConstantInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMEnumLeafInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMModelInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMPropertyInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.concrete.origin.CMWildcardTypeInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MBuiltinLeafInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MClassInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MElementInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MElementOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MElementTypeRefOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MEnumConstantInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MEnumLeafInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MModelInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MPropertyInfoOrigin;
import org.jvnet.jaxb2_commons.xml.bind.model.origin.MWildcardTypeInfoOrigin;
import com.sun.xml.bind.v2.model.core.Adapter;
import com.sun.xml.bind.v2.model.core.AttributePropertyInfo;
import com.sun.xml.bind.v2.model.core.BuiltinLeafInfo;
import com.sun.xml.bind.v2.model.core.ClassInfo;
import com.sun.xml.bind.v2.model.core.Element;
import com.sun.xml.bind.v2.model.core.ElementInfo;
import com.sun.xml.bind.v2.model.core.ElementPropertyInfo;
import com.sun.xml.bind.v2.model.core.EnumConstant;
import com.sun.xml.bind.v2.model.core.EnumLeafInfo;
import com.sun.xml.bind.v2.model.core.ID;
import com.sun.xml.bind.v2.model.core.MapPropertyInfo;
import com.sun.xml.bind.v2.model.core.PropertyInfo;
import com.sun.xml.bind.v2.model.core.ReferencePropertyInfo;
import com.sun.xml.bind.v2.model.core.TypeInfo;
import com.sun.xml.bind.v2.model.core.TypeInfoSet;
import com.sun.xml.bind.v2.model.core.TypeRef;
import com.sun.xml.bind.v2.model.core.ValuePropertyInfo;
import com.sun.xml.bind.v2.model.core.WildcardTypeInfo;
public abstract class CMInfoFactory<T, C extends T, TIS extends TypeInfoSet<T, C, ?, ?>,
//
TI extends TypeInfo<T, C>,
//
BLI extends BuiltinLeafInfo<T, C>,
//
E extends Element<T, C>,
//
EI extends ElementInfo<T, C>,
//
ELI extends EnumLeafInfo<T, C>,
//
EC extends EnumConstant<T, C>,
//
CI extends ClassInfo<T, C>,
//
PI extends PropertyInfo<T, C>,
//
API extends AttributePropertyInfo<T, C>,
//
VPI extends ValuePropertyInfo<T, C>,
//
EPI extends ElementPropertyInfo<T, C>,
//
RPI extends ReferencePropertyInfo<T, C>,
//
WTI extends WildcardTypeInfo<T, C>,
//
TR extends TypeRef<T, C>> {
private final Map<BLI, MBuiltinLeafInfo<T, C>> builtinLeafInfos = new IdentityHashMap<BLI, MBuiltinLeafInfo<T, C>>();
private final Map<CI, MClassInfo<T, C>> classInfos = new IdentityHashMap<CI, MClassInfo<T, C>>();
private final Map<ELI, MEnumLeafInfo<T, C>> enumLeafInfos = new IdentityHashMap<ELI, MEnumLeafInfo<T, C>>();
private final Map<EI, MElementInfo<T, C>> elementInfos = new IdentityHashMap<EI, MElementInfo<T, C>>();
private final TIS typeInfoSet;
public CMInfoFactory(TIS typeInfoSet) {
Validate.notNull(typeInfoSet);
this.typeInfoSet = typeInfoSet;
}
public TIS getTypeInfoSet() {
return typeInfoSet;
}
public MModelInfo<T, C> createModel() {
final CMModel<T, C> model = new CMModel<T, C>(
createModelInfoOrigin(typeInfoSet));
createBuiltinLeafInfos(model);
createEnumLeafInfos(model);
createClassInfos(model);
createElementInfos(model);
return model;
}
private void createElementInfos(final CMModel<T, C> model) {
Iterable<? extends ElementInfo<T, C>> elements = typeInfoSet
.getAllElements();
for (ElementInfo<T, C> element : elements) {
final EI ei = (EI) element;
getElementInfo(ei);
}
for (ElementInfo<T, C> element : elements) {
model.addElementInfo(getElementInfo((EI) element));
}
}
private void createEnumLeafInfos(final CMModel<T, C> model) {
Collection<? extends EnumLeafInfo<T, C>> enums = typeInfoSet.enums()
.values();
for (EnumLeafInfo<T, C> enumLeafInfo : enums) {
@SuppressWarnings("unchecked")
final ELI eli = (ELI) enumLeafInfo;
getTypeInfo(eli);
}
for (Map.Entry<ELI, MEnumLeafInfo<T, C>> entry : enumLeafInfos
.entrySet()) {
populateEnumLeafInfo(entry.getKey(), entry.getValue());
}
for (EnumLeafInfo<T, C> enumLeafInfo : enums) {
model.addEnumLeafInfo(getTypeInfo((ELI) enumLeafInfo));
}
}
private void createBuiltinLeafInfos(final CMModel<T, C> model) {
Collection<? extends BuiltinLeafInfo<T, C>> builtins = typeInfoSet
.builtins().values();
for (BuiltinLeafInfo<T, C> builtinLeafInfo : builtins) {
@SuppressWarnings("unchecked")
final BLI bli = (BLI) builtinLeafInfo;
getTypeInfo(bli);
}
for (BuiltinLeafInfo<T, C> builtinLeafInfo : builtins) {
model.addBuiltinLeafInfo(getTypeInfo((BLI) builtinLeafInfo));
}
}
private void createClassInfos(final CMModel<T, C> model) {
Collection<? extends ClassInfo<T, C>> beans = typeInfoSet.beans()
.values();
for (ClassInfo<T, C> classInfo : beans) {
@SuppressWarnings("unchecked")
final CI ci = (CI) classInfo;
getTypeInfo(ci);
}
for (Map.Entry<CI, MClassInfo<T, C>> entry : classInfos.entrySet()) {
populateClassInfo(entry.getKey(), entry.getValue());
}
for (ClassInfo<T, C> classInfo : beans) {
model.addClassInfo(getTypeInfo((CI) classInfo));
}
}
protected MTypeInfo<T, C> getTypeInfo(PropertyInfo<T, C> propertyInfo,
TI typeInfo, boolean list, Adapter<T, C> adapter, ID id,
MimeType mimeType) {
final MTypeInfo<T, C> ti = getTypeInfo(typeInfo);
if (list) {
switch (id) {
case ID:
final MTypeInfo<T, C> tid = new CMID<T, C>(ti.getTargetType(),
ti);
return new CMList<T, C>(createListType(tid.getTargetType()),
tid, null);
case IDREF:
return new CMIDREFS<T, C>(createListType(ti.getTargetType()),
ti);
default:
return new CMList<T, C>(createListType(ti.getTargetType()), ti,
null);
}
} else {
switch (id) {
case ID:
return new CMID<T, C>(ti.getTargetType(), ti);
case IDREF:
return new CMIDREF<T, C>(ti.getTargetType(), ti);
default:
return ti;
}
}
}
protected MTypeInfo<T, C> getTypeInfo(TI typeInfo) {
if (typeInfo instanceof BuiltinLeafInfo) {
return getTypeInfo((BLI) typeInfo);
} else if (typeInfo instanceof EnumLeafInfo) {
return getTypeInfo((ELI) typeInfo);
} else if (typeInfo instanceof ElementInfo) {
return getTypeInfo((EI) typeInfo);
} else if (typeInfo instanceof WildcardTypeInfo) {
return createWildcardTypeInfo((WTI) typeInfo);
} else if (typeInfo instanceof ClassInfo) {
return getTypeInfo((CI) typeInfo);
} else {
throw new UnsupportedOperationException(typeInfo.getClass()
.getName());
}
}
private MBuiltinLeafInfo<T, C> getTypeInfo(BLI info) {
MBuiltinLeafInfo<T, C> builtinLeafInfo = builtinLeafInfos.get(info);
if (builtinLeafInfo == null) {
builtinLeafInfo = createBuiltinLeafInfo(info);
builtinLeafInfos.put(info, builtinLeafInfo);
}
return builtinLeafInfo;
}
private MTypeInfo<T, C> getTypeInfo(EI info) {
@SuppressWarnings("unchecked")
EPI p = (EPI) info.getProperty();
@SuppressWarnings("unchecked")
TI contentType = (TI) info.getContentType();
return getTypeInfo(p, contentType, p.isValueList(), p.getAdapter(),
p.id(), p.getExpectedMimeType());
}
protected MClassInfo<T, C> getTypeInfo(CI info) {
MClassInfo<T, C> classInfo = classInfos.get(info);
if (classInfo == null) {
classInfo = createClassInfo(info);
classInfos.put(info, classInfo);
}
return classInfo;
}
private MEnumLeafInfo<T, C> getTypeInfo(ELI info) {
MEnumLeafInfo<T, C> enumLeafInfo = enumLeafInfos.get(info);
if (enumLeafInfo == null) {
enumLeafInfo = createEnumLeafInfo(info);
enumLeafInfos.put(info, enumLeafInfo);
}
return enumLeafInfo;
}
private void populateEnumLeafInfo(ELI info, MEnumLeafInfo<T, C> enumLeafInfo) {
@SuppressWarnings("rawtypes")
Iterable<? extends EnumConstant> _constants = info.getConstants();
@SuppressWarnings("unchecked")
final Iterable<? extends EnumConstant<T, C>> enumConstants = (Iterable<? extends EnumConstant<T, C>>) _constants;
for (EnumConstant<?, ?> enumConstant : enumConstants) {
enumLeafInfo.addEnumConstantInfo(createEnumContantInfo(
enumLeafInfo, (EC) enumConstant));
}
}
protected MElementInfo<T, C> getElementInfo(EI info) {
MElementInfo<T, C> elementInfo = elementInfos.get(info);
if (elementInfo == null) {
elementInfo = createElementInfo(info);
elementInfos.put(info, elementInfo);
}
return elementInfo;
}
protected MClassInfo<T, C> createClassInfo(CI info) {
return new CMClassInfo<T, C>(createClassInfoOrigin(info),
info.getClazz(), getPackage(info), getContainer(info),
getLocalName(info), createBaseTypeInfo(info),
info.isElement() ? info.getElementName() : null,
info.getTypeName());
}
private void populateClassInfo(CI info, MClassInfo<T, C> classInfo) {
if (info.hasAttributeWildcard()) {
classInfo.addProperty(createAnyAttributePropertyInfo(classInfo));
}
for (PropertyInfo<T, C> p : (List<? extends PropertyInfo<T, C>>) info
.getProperties()) {
classInfo.addProperty(createPropertyInfo(classInfo, (PI) p));
}
}
protected MClassTypeInfo<T, C, ?> createBaseTypeInfo(CI info) {
return info.getBaseClass() == null ? null : getTypeInfo((CI) info
.getBaseClass());
}
private MPropertyInfo<T, C> createPropertyInfo(
final MClassInfo<T, C> classInfo, PI p) {
if (p instanceof AttributePropertyInfo) {
@SuppressWarnings("unchecked")
final API api = (API) p;
return createAttributePropertyInfo(classInfo, api);
} else if (p instanceof ValuePropertyInfo) {
@SuppressWarnings("unchecked")
final VPI vpi = (VPI) p;
return createValuePropertyInfo(classInfo, vpi);
} else if (p instanceof ElementPropertyInfo) {
@SuppressWarnings("unchecked")
final EPI ep = (EPI) p;
if (ep.getTypes().size() == 1) {
return createElementPropertyInfo(classInfo, ep);
} else {
return createElementsPropertyInfo(classInfo, ep);
}
} else if (p instanceof ReferencePropertyInfo) {
@SuppressWarnings("unchecked")
final RPI rp = (RPI) p;
final Set<? extends Element<T, C>> elements = rp.getElements();
if (elements.size() == 0
&& rp.getWildcard() != null
&& (rp.getWildcard().allowDom || rp.getWildcard().allowTypedObject)) {
return createAnyElementPropertyInfo(classInfo, rp);
} else if (elements.size() == 1) {
return createElementRefPropertyInfo(classInfo, rp);
} else {
return createElementRefsPropertyInfo(classInfo, rp);
}
} else if (p instanceof MapPropertyInfo) {
// System.out.println("Map property: " + p.getName());
// MapPropertyInfo<T, C> mp = (MapPropertyInfo<T, C>) p;
throw new UnsupportedOperationException();
} else {
throw new AssertionError();
}
}
protected MPropertyInfo<T, C> createAttributePropertyInfo(
final MClassInfo<T, C> classInfo, final API propertyInfo) {
return new CMAttributePropertyInfo<T, C>(
createPropertyInfoOrigin((PI) propertyInfo), classInfo,
propertyInfo.getName(), getTypeInfo(propertyInfo),
propertyInfo.getXmlName(), propertyInfo.isRequired(),
getDefaultValue(propertyInfo),
getDefaultValueNamespaceContext(propertyInfo));
}
protected MPropertyInfo<T, C> createValuePropertyInfo(
final MClassInfo<T, C> classInfo, final VPI propertyInfo) {
return new CMValuePropertyInfo<T, C>(
createPropertyInfoOrigin((PI) propertyInfo), classInfo,
propertyInfo.getName(), getTypeInfo(propertyInfo), null, null);
}
protected MPropertyInfo<T, C> createElementPropertyInfo(
final MClassInfo<T, C> classInfo, final EPI ep) {
final TR typeRef = (TR) ep.getTypes().get(0);
return new CMElementPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) ep), classInfo, ep.getName(),
ep.isCollection() && !ep.isValueList(), ep.isRequired(),
getTypeInfo(ep, typeRef), typeRef.getTagName(),
ep.getXmlName(), typeRef.isNillable(),
getDefaultValue(typeRef),
getDefaultValueNamespaceContext(typeRef));
}
protected MPropertyInfo<T, C> createElementsPropertyInfo(
final MClassInfo<T, C> classInfo, final EPI ep) {
List<? extends TR> types = (List<? extends TR>) ep.getTypes();
final Collection<MElementTypeRef<T, C>> typedElements = new ArrayList<MElementTypeRef<T, C>>(
types.size());
for (TR typeRef : types) {
typedElements.add(new CMElementTypeRef<T, C>(
createElementTypeRefOrigin(ep, typeRef), typeRef
.getTagName(), getTypeInfo(ep, typeRef), typeRef
.isNillable(), getDefaultValue(typeRef),
getDefaultValueNamespaceContext(typeRef)));
}
return new CMElementsPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) ep), classInfo, ep.getName(),
ep.isCollection() && !ep.isValueList(), ep.isRequired(),
typedElements, ep.getXmlName());
}
protected MPropertyInfo<T, C> createAnyElementPropertyInfo(
final MClassInfo<T, C> classInfo, final RPI rp) {
return new CMAnyElementPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) rp), classInfo, rp.getName(),
rp.isCollection(), rp.isRequired(), rp.isMixed(),
rp.getWildcard().allowDom, rp.getWildcard().allowTypedObject);
}
protected MPropertyInfo<T, C> createElementRefPropertyInfo(
final MClassInfo<T, C> classInfo, final RPI rp) {
final Element<T, C> element = rp.getElements().iterator().next();
return new CMElementRefPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) rp), classInfo, rp.getName(),
rp.isCollection(), rp.isRequired(), getTypeInfo(rp, element),
element.getElementName(), rp.getXmlName(),
rp.isMixed(), rp.getWildcard() == null ? false
: rp.getWildcard().allowDom,
rp.getWildcard() == null ? true
: rp.getWildcard().allowTypedObject,
getDefaultValue(element),
getDefaultValueNamespaceContext(element));
}
protected MPropertyInfo<T, C> createElementRefsPropertyInfo(
final MClassInfo<T, C> classInfo, final RPI rp) {
final List<MElement<T, C>> typedElements = new ArrayList<MElement<T, C>>();
for (Element<T, C> e : rp.getElements()) {
final E element = (E) e;
typedElements.add(new CMElement<T, C>(createElementOrigin(element),
element.getElementName(), getTypeInfo(rp, element), true,
getDefaultValue(element),
getDefaultValueNamespaceContext(element)));
}
return new CMElementRefsPropertyInfo<T, C>(
createPropertyInfoOrigin((PI) rp), classInfo, rp.getName(),
rp.isCollection(), rp.isRequired(), typedElements,
rp.getXmlName(), rp.isMixed(), rp.getWildcard() == null ? false
: rp.getWildcard().allowDom,
rp.getWildcard() == null ? true
: rp.getWildcard().allowTypedObject);
}
protected CMAnyAttributePropertyInfo<T, C> createAnyAttributePropertyInfo(
final MClassInfo<T, C> classInfo) {
return new CMAnyAttributePropertyInfo<T, C>(
createAnyAttributePropertyInfoOrigin(), classInfo,
"otherAttributes");
}
protected MTypeInfo<T, C> getTypeInfo(final ValuePropertyInfo<T, C> vp) {
return getTypeInfo(vp, (TI) vp.ref().iterator().next(),
vp.isCollection(), vp.getAdapter(), vp.id(),
vp.getExpectedMimeType());
}
protected MTypeInfo<T, C> getTypeInfo(final AttributePropertyInfo<T, C> ap) {
return getTypeInfo(ap, (TI) ap.ref().iterator().next(),
ap.isCollection(), ap.getAdapter(), ap.id(),
ap.getExpectedMimeType());
}
protected MTypeInfo<T, C> getTypeInfo(final ElementPropertyInfo<T, C> ep,
final TR typeRef) {
return getTypeInfo(ep, (TI) typeRef.getTarget(),
ep.isValueList(), ep.getAdapter(), ep.id(), ep.getExpectedMimeType());
}
protected MTypeInfo<T, C> getTypeInfo(final ReferencePropertyInfo<T, C> rp,
Element<T, C> element) {
return getTypeInfo(rp, (TI) element, false, rp.getAdapter(), rp.id(),
rp.getExpectedMimeType());
}
private String getDefaultValue(Element<T, C> element) {
if (element instanceof ElementInfo) {
final ElementInfo<T, C> elementInfo = (ElementInfo<T, C>) element;
final ElementPropertyInfo<T, C> property = elementInfo
.getProperty();
if (property != null) {
final List<? extends TR> types = (List<? extends TR>) property.getTypes();
if (types.size() == 1) {
final TR typeRef = types.get(0);
return getDefaultValue(typeRef);
}
}
}
return null;
}
private NamespaceContext getDefaultValueNamespaceContext(
Element<T, C> element) {
if (element instanceof ElementInfo) {
final ElementInfo<T, C> elementInfo = (ElementInfo<T, C>) element;
final ElementPropertyInfo<T, C> property = elementInfo
.getProperty();
if (property != null) {
final List<? extends TypeRef<T, C>> types = property.getTypes();
if (types.size() == 1) {
final TypeRef<T, C> typeRef = types.get(0);
return getDefaultValueNamespaceContext(typeRef);
}
}
}
return null;
}
protected abstract MPackageInfo getPackage(CI info);
protected abstract String getLocalName(CI info);
protected abstract MClassInfo<T, C> getScope(CI info);
protected abstract MPackageInfo getPackage(ELI info);
protected abstract String getLocalName(ELI info);
protected abstract String getLocalName(EI info);
protected abstract MPackageInfo getPackage(EI info);
protected abstract MContainer getContainer(CI info);
protected abstract MContainer getContainer(EI info);
protected abstract MContainer getContainer(ELI info);
//
protected MBuiltinLeafInfo<T, C> createBuiltinLeafInfo(BLI info) {
return new CMBuiltinLeafInfo<T, C>(createBuiltinLeafInfoOrigin(info),
info.getType(), info.getTypeName());
}
protected MEnumLeafInfo<T, C> createEnumLeafInfo(final ELI info) {
@SuppressWarnings("unchecked")
final TI baseType = (TI) info.getBaseType();
return new CMEnumLeafInfo<T, C>(createEnumLeafInfoOrigin(info),
info.getClazz(), getPackage(info), getContainer(info),
getLocalName(info), getTypeInfo(baseType),
info.getElementName(), info.getTypeName());
}
protected CMEnumConstantInfo<T, C> createEnumContantInfo(
MEnumLeafInfo<T, C> enumLeafInfo, EC enumConstant) {
return new CMEnumConstantInfo<T, C>(
createEnumConstantInfoOrigin(enumConstant), enumLeafInfo,
enumConstant.getLexicalValue());
}
protected MElementInfo<T, C> createElementInfo(EI element) {
@SuppressWarnings("unchecked")
final CI scopeCI = (CI) element.getScope();
final MClassInfo<T, C> scope = element.getScope() == null ? null
: getTypeInfo(scopeCI);
final QName substitutionHead = element.getSubstitutionHead() == null ? null
: element.getSubstitutionHead().getElementName();
final MElementInfo<T, C> elementInfo = new CMElementInfo<T, C>(
createElementInfoOrigin(element), getPackage(element),
getContainer(element), getLocalName(element),
element.getElementName(), scope, getTypeInfo(element),
substitutionHead, getDefaultValue(element),
getDefaultValueNamespaceContext(element));
return elementInfo;
}
protected MTypeInfo<T, C> createWildcardTypeInfo(WTI info) {
return new CMWildcardTypeInfo<T, C>(createWildcardTypeInfoOrigin(info),
info.getType());
}
protected MModelInfoOrigin createModelInfoOrigin(TIS info) {
return new CMModelInfoOrigin<T, C, TIS>(info);
}
protected MBuiltinLeafInfoOrigin createBuiltinLeafInfoOrigin(BLI info) {
return new CMBuiltinLeafInfoOrigin<T, C, BLI>(info);
}
protected MClassInfoOrigin createClassInfoOrigin(CI info) {
return new CMClassInfoOrigin<T, C, CI>(info);
}
protected MPropertyInfoOrigin createAnyAttributePropertyInfoOrigin() {
return new CMAnyAttributePropertyInfoOrigin();
}
protected MPropertyInfoOrigin createPropertyInfoOrigin(PI info) {
return new CMPropertyInfoOrigin<T, C, PI>(info);
}
protected MElementOrigin createElementOrigin(E info) {
return new CMElementOrigin<T, C, E>(info);
}
protected MElementTypeRefOrigin createElementTypeRefOrigin(EPI ep,
TR typeRef) {
return new CMElementTypeRefOrigin<T, C, EPI, TR>(ep, typeRef);
}
protected MElementInfoOrigin createElementInfoOrigin(EI info) {
return new CMElementInfoOrigin<T, C, EI>(info);
}
protected MEnumLeafInfoOrigin createEnumLeafInfoOrigin(ELI info) {
return new CMEnumLeafInfoOrigin<T, C, ELI>(info);
}
protected MEnumConstantInfoOrigin createEnumConstantInfoOrigin(EC info) {
return new CMEnumConstantInfoOrigin<T, C, EC>(info);
}
protected MWildcardTypeInfoOrigin createWildcardTypeInfoOrigin(WTI info) {
return new CMWildcardTypeInfoOrigin<T, C, WTI>(info);
}
protected abstract T createListType(T elementType);
/**
* Returns Java class for the reference type or null if it can't be found.
*
* @param referencedType
* referenced type.
* @return Java class for the reference type or null.
*/
protected abstract Class<?> loadClass(T referencedType);
protected abstract String getDefaultValue(API propertyInfo);
protected abstract NamespaceContext getDefaultValueNamespaceContext(
API propertyInfo);
protected abstract String getDefaultValue(TypeRef<T, C> typeRef);
protected abstract NamespaceContext getDefaultValueNamespaceContext(
TypeRef<T, C> typeRef);
}
| Stephan202/jaxb2-basics | runtime/src/main/java/org/jvnet/jaxb2_commons/xml/bind/model/concrete/CMInfoFactory.java | Java | bsd-2-clause | 22,939 |
// Copyright 2009 the Sputnik authors. All rights reserved.
// This code is governed by the BSD license found in the LICENSE file.
/*---
info: |
When "String" is called as part of a new expression, it is a constructor: it initialises the newly created object and
The [[Value]] property of the newly constructed object is set to ToString(value), or to the empty string if value is not supplied
es5id: 15.5.2.1_A1_T16
description: >
Creating string object with "new String()" initialized with .12345
and other numbers
---*/
var __str = new String(.12345);
//////////////////////////////////////////////////////////////////////////////
//CHECK#1
if (typeof __str !== "object") {
$ERROR('#1: __str =new String(.12345); typeof __str === "object". Actual: typeof __str ===' + typeof __str);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#1.5
if (__str.constructor !== String) {
$ERROR('#1.5: __str =new String(.12345); __str.constructor === String. Actual: __str.constructor ===' + __str.constructor);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#2
if (__str != "0.12345") {
$ERROR('#2: __str =new String(.12345); __str =="0.12345". Actual: __str ==' + __str);
}
//
//////////////////////////////////////////////////////////////////////////////
__str = new String(.012345);
//////////////////////////////////////////////////////////////////////////////
//CHECK#3
if (typeof __str !== "object") {
$ERROR('#3: __str =new String(.012345); typeof __str === "object". Actual: typeof __str ===' + typeof __str);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#2.5
if (__str.constructor !== String) {
$ERROR('#3.5: __str =new String(.012345); __str.constructor === String. Actual: __str.constructor ===' + __str.constructor);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#4
if (__str != "0.012345") {
$ERROR('#4: __str =new String(.012345); __str =="0.012345". Actual: __str ==' + __str);
}
//
//////////////////////////////////////////////////////////////////////////////
__str = new String(.0012345);
//////////////////////////////////////////////////////////////////////////////
//CHECK#5
if (typeof __str !== "object") {
$ERROR('#5: __str =new String(.0012345); typeof __str === "object". Actual: typeof __str ===' + typeof __str);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#5.5
if (__str.constructor !== String) {
$ERROR('#5.5: __str =new String(.0012345); __str.constructor === String. Actual: __str.constructor ===' + __str.constructor);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#6
if (__str != "0.0012345") {
$ERROR('#6: __str =new String(.0012345); __str =="0.0012345". Actual: __str ==' + __str);
}
//
//////////////////////////////////////////////////////////////////////////////
__str = new String(.00000012345);
//////////////////////////////////////////////////////////////////////////////
//CHECK#7
if (typeof __str !== "object") {
$ERROR('#7: __str =new String(.00000012345); typeof __str === "object". Actual: typeof __str ===' + typeof __str);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#7.5
if (__str.constructor !== String) {
$ERROR('#7.5: __str =new String(.00000012345); __str.constructor === String. Actual: __str.constructor ===' + __str.constructor);
}
//
//////////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////////
//CHECK#8
if (__str != "1.2345e-7") {
$ERROR('#8: __str =new String(.00000012345); __str =="1.2345e-7". Actual: __str ==' + __str);
}
//
//////////////////////////////////////////////////////////////////////////////
| sebastienros/jint | Jint.Tests.Test262/test/built-ins/String/S15.5.2.1_A1_T16.js | JavaScript | bsd-2-clause | 4,487 |
cask 'anka-flow' do
version '1.1.1.79'
sha256 '9f91222458f5b7b52bee53a62e878faed4a4894ca02fe3d37b52b79b54c523fa'
# d1efqjhnhbvc57.cloudfront.net was verified as official when first introduced to the cask
url "https://d1efqjhnhbvc57.cloudfront.net/AnkaFlow-#{version}.pkg",
referer: 'https://veertu.com/download-anka-run/'
appcast 'https://ankadoc.bitbucket.io/release-notes/index.html',
checkpoint: '902e8a6a51287459ac85fb33f03569004dc105637b9c6a86827beacf53f341c9'
name 'Veertu Anka Flow'
homepage 'https://veertu.com/'
depends_on macos: '>= :yosemite'
pkg "AnkaFlow-#{version}.pkg"
uninstall launchctl: [
'com.veertu.nlimit',
'com.veertu.vlaunch',
],
script: {
executable: '/Library/Application Support/Veertu/Anka/tools/uninstall.sh',
args: ['-f'],
sudo: true,
}
zap trash: [
'~/.anka',
'~/Library/Application Support/Veertu/Anka',
'~/Library/Logs/Anka',
'~/Library/Preferences/com.veertu.ankaview.plist',
'/Library/Application Support/Veertu/Anka',
],
rmdir: [
'~/Library/Application Support/Veertu',
'/Library/Application Support/Veertu',
]
caveats <<~EOS
Installing this Cask means you have AGREED to the
Veertu End User License Agreement at
https://veertu.com/terms-and-conditions/
EOS
end
| tedski/homebrew-cask | Casks/anka-flow.rb | Ruby | bsd-2-clause | 1,593 |
/*
* Copyright (C) 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#include "config.h"
#if ENABLE(MEDIA_STREAM)
#include "MediaTrackConstraints.h"
#include "MediaTrackConstraint.h"
#include "MediaTrackConstraintSet.h"
#include "NotImplemented.h"
using namespace JSC;
namespace WebCore {
RefPtr<MediaTrackConstraints> MediaTrackConstraints::create(PassRefPtr<MediaConstraintsImpl> constraints)
{
return adoptRef(new MediaTrackConstraints(constraints));
}
MediaTrackConstraints::MediaTrackConstraints(PassRefPtr<MediaConstraintsImpl> constraints)
: m_constraints(constraints)
{
}
Vector<PassRefPtr<MediaTrackConstraint>> MediaTrackConstraints::optional(bool) const
{
// https://bugs.webkit.org/show_bug.cgi?id=121954
notImplemented();
return Vector<PassRefPtr<MediaTrackConstraint>>();
}
RefPtr<MediaTrackConstraintSet> MediaTrackConstraints::mandatory(bool) const
{
// https://bugs.webkit.org/show_bug.cgi?id=121954
notImplemented();
return nullptr;
}
} // namespace WebCore
#endif
| aosm/WebCore | Modules/mediastream/MediaTrackConstraints.cpp | C++ | bsd-2-clause | 2,294 |
# Generated by Django 3.1.4 on 2020-12-15 15:58
from django.db import migrations
def copy_labels(apps, schema_editor):
Trek = apps.get_model('trekking', 'Trek')
Label = apps.get_model('common', 'Label')
for trek in Trek.objects.all():
for label in trek.labels.all():
label2, created = Label.objects.get_or_create(name=label.name, defaults={'advice': label.advice, 'filter': label.filter_rando})
trek.labels2.add(label2)
class Migration(migrations.Migration):
dependencies = [
('trekking', '0023_trek_labels2'),
]
operations = [
migrations.RunPython(copy_labels),
]
| makinacorpus/Geotrek | geotrek/trekking/migrations/0024_copy_labels.py | Python | bsd-2-clause | 649 |
class Cuetools < Formula
desc "Utilities for .cue and .toc files"
homepage "https://github.com/svend/cuetools"
url "https://github.com/svend/cuetools/archive/1.4.1.tar.gz"
sha256 "24a2420f100c69a6539a9feeb4130d19532f9f8a0428a8b9b289c6da761eb107"
head "https://github.com/svend/cuetools.git"
bottle do
cellar :any_skip_relocation
sha256 "1e36c3c8d2d53947b73a9f0a0aed74145e2b1890f83764de02f1d12566d0300f" => :mojave
sha256 "4393d6db857a9568a34de3a09ff049fbec9a55a95b029eacd24e35d6ce792074" => :high_sierra
sha256 "9456e5957a78f993f5a8cef76aa583ac6a42a8298fb05bded243dbaf810f9a44" => :sierra
sha256 "7f0effc75d64fca0f2695b5f7ddb4d8713cc83522d40dcd37842e83c120ac117" => :el_capitan
sha256 "81d06ef2e3d98061f332a535b810102c1be0505371c1ac1aed711cf2ae8de5a3" => :yosemite
sha256 "95216c0df3840b2602e61dd3bef7d4c9b65cec0315e5b23ac87329320d9f6be9" => :mavericks
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
# see https://github.com/svend/cuetools/pull/18
patch :DATA
def install
system "autoreconf", "-i"
system "./configure", "--prefix=#{prefix}", "--mandir=#{man}"
system "make", "install"
end
test do
(testpath/"test.cue").write <<~EOS
FILE "sampleimage.bin" BINARY
TRACK 01 MODE1/2352
INDEX 01 00:00:00
EOS
system "cueconvert", testpath/"test.cue", testpath/"test.toc"
assert_predicate testpath/"test.toc", :exist?
end
end
__END__
diff --git a/configure.ac b/configure.ac
index f54bb92..84ab467 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1,5 +1,5 @@
AC_INIT([cuetools], [1.4.0], [svend@ciffer.net])
-AM_INIT_AUTOMAKE([-Wall -Werror foreign])
+AM_INIT_AUTOMAKE([-Wall -Werror -Wno-extra-portability foreign])
AC_PROG_CC
AC_PROG_INSTALL
AC_PROG_RANLIB
| jdubois/homebrew-core | Formula/cuetools.rb | Ruby | bsd-2-clause | 1,827 |
#define SOL_CHECK_ARGUMENTS 1
#include <sol.hpp>
#include "assert.hpp"
#include <iostream>
int main(int, char*[]) {
std::cout << "=== coroutine state transfer ===" << std::endl;
sol::state lua;
lua.open_libraries();
sol::function transferred_into;
lua["f"] = [&lua, &transferred_into](sol::object t, sol::this_state this_L) {
std::cout << "state of main : " << (void*)lua.lua_state() << std::endl;
std::cout << "state of function : " << (void*)this_L.lua_state() << std::endl;
// pass original lua_State* (or sol::state/sol::state_view)
// transfers ownership from the state of "t",
// to the "lua" sol::state
transferred_into = sol::function(lua, t);
};
lua.script(R"(
i = 0
function test()
co = coroutine.create(
function()
local g = function() i = i + 1 end
f(g)
g = nil
collectgarbage()
end
)
coroutine.resume(co)
co = nil
collectgarbage()
end
)");
// give it a try
lua.safe_script("test()");
// should call 'g' from main thread, increment i by 1
transferred_into();
// check
int i = lua["i"];
c_assert(i == 1);
std::cout << std::endl;
return 0;
}
| Project-OSRM/osrm-backend | third_party/sol2/examples/coroutine_state.cpp | C++ | bsd-2-clause | 1,131 |
/*
* w-driver-volleyball-lstm-evaluator.cpp
*
* Created on: Jul 13, 2015
* Author: msibrahi
*/
#include <iostream>
#include <vector>
#include <stdio.h>
#include <string>
#include <set>
#include <set>
#include <map>
#include <iomanip>
using std::vector;
using std::set;
using std::multiset;
using std::map;
using std::pair;
using std::string;
using std::endl;
using std::cerr;
#include "boost/algorithm/string.hpp"
#include "google/protobuf/text_format.h"
#include "caffe/blob.hpp"
#include "caffe/common.hpp"
#include "caffe/net.hpp"
#include "caffe/proto/caffe.pb.h"
#include "caffe/util/db.hpp"
#include "caffe/util/io.hpp"
#include "caffe/vision_layers.hpp"
using caffe::Blob;
using caffe::Caffe;
using caffe::Datum;
using caffe::Net;
using caffe::Layer;
using caffe::LayerParameter;
using caffe::DataParameter;
using caffe::NetParameter;
using boost::shared_ptr;
namespace db = caffe::db;
#include "../src/utilities.h"
#include "../src/leveldb-reader.h"
void evaluate(vector<int> truthLabels, vector<int> resultLabels, int w) {
set<int> total_labels;
map<int, map<int, int> > confusion_freq_maps;
map<int, int> label_freq;
int correct = 0;
cerr<<"\n\n";
for (int i = 0; i < (int) truthLabels.size(); ++i) {
correct += truthLabels[i] == resultLabels[i];
cerr << "Test " << i + 1 << ": Result = " << resultLabels[i] << " GroundTruth = " << truthLabels[i] << "\n";
confusion_freq_maps[truthLabels[i]][resultLabels[i]]++;
total_labels.insert(truthLabels[i]);
total_labels.insert(resultLabels[i]);
label_freq[truthLabels[i]]++;
}
cerr.setf(std::ios::fixed);
cerr.precision(2);
cerr<<"\n\n";
cerr << "Total testing frames: " << truthLabels.size() << " with temporal window: " << w << "\n";
cerr << "Temporal accuracy : " << 100.0 * correct / truthLabels.size() << " %\n";
cerr << "\n=======================================================================================\n";
cerr << "\nConfusion Matrix - Truth (col) / Result(row)\n\n";
cerr << std::setw(5) << "T/R" << ": ";
for (auto r_label : total_labels)
cerr << std::setw(5) << r_label;
cerr << "\n=======================================================================================\n";
for (auto t_label : total_labels) {
int sum = 0;
cerr << std::setw(5) << t_label << ": ";
for (auto r_label : total_labels)
{
cerr << std::setw(5) << confusion_freq_maps[t_label][r_label];
sum += confusion_freq_maps[t_label][r_label];
}
double percent = 0;
if (label_freq[t_label] > 0)
percent = 100.0 * confusion_freq_maps[t_label][t_label] / label_freq[t_label];
cerr << " \t=> Total Correct = " << std::setw(5) << confusion_freq_maps[t_label][t_label] << " / " << std::setw(5) << sum << " = " << percent << " %\n";
}
cerr<<"\n\n";
cerr << std::setw(7) << "T/R" << ": ";
for (auto r_label : total_labels)
cerr << std::setw(7) << r_label;
cerr << "\n=======================================================================================\n";
for (auto t_label : total_labels) {
cerr << std::setw(7) << t_label << ": ";
for (auto r_label : total_labels)
{
double percent = 0;
if (label_freq[t_label] > 0)
percent = 100.0 * confusion_freq_maps[t_label][r_label] / label_freq[t_label];
cerr << std::setw(7) << percent;
}
cerr<<"\n";
}
cerr<<"\nTo get labels corresponding to IDs..see dataset loading logs\n";
}
int getArgmax(vector<float> &v) {
int pos = 0;
assert(v.size() > 0);
for (int j = 1; j < (int) v.size(); ++j) {
if (v[j] > v[pos])
pos = j;
}
return pos;
}
template<typename Dtype>
void feature_extraction_pipeline(int &argc, char** &argv) {
int frames_window = MostCV::consumeIntParam(argc, argv);
LOG(ERROR)<< "Temporal Window = " << frames_window;
string computation_mode = MostCV::consumeStringParam(argc, argv);
if (strcmp(computation_mode.c_str(), "GPU") == 0) {
uint device_id = MostCV::consumeIntParam(argc, argv);
LOG(ERROR)<< "Using GPU";
LOG(ERROR)<< "Using Device_id = " << device_id;
Caffe::SetDevice(device_id);
Caffe::set_mode(Caffe::GPU);
} else {
LOG(ERROR)<< "Using CPU";
Caffe::set_mode(Caffe::CPU);
}
string pretrained_binary_proto(MostCV::consumeStringParam(argc, argv));
string feature_extraction_proto(MostCV::consumeStringParam(argc, argv));
LOG(ERROR)<<"Model: "<<pretrained_binary_proto<<"\n";
LOG(ERROR)<<"Proto: "<<feature_extraction_proto<<"\n";
LOG(ERROR)<<"Creating the test network\n";
shared_ptr<Net<Dtype> > feature_extraction_net(new Net<Dtype>(feature_extraction_proto, caffe::Phase::TEST));
LOG(ERROR)<<"Loading the Model\n";
feature_extraction_net->CopyTrainedLayersFrom(pretrained_binary_proto);
string blob_name = MostCV::consumeStringParam(argc, argv);
LOG(ERROR)<<"blob_name: "<<blob_name<<"\n";
CHECK(feature_extraction_net->has_blob(blob_name)) << "Unknown feature blob name " << blob_name << " in the network " << feature_extraction_proto;
int num_mini_batches = MostCV::consumeIntParam(argc, argv);
LOG(ERROR)<<"num_mini_batches: "<<num_mini_batches<<"\n";
vector<Blob<float>*> input_vec;
int batch_size = -1;
int dim_features = -1;
std::set<int> labels; // every (2w+1) * batch size MUST all have same label
vector<int> truthLabels;
vector<int> propAvgMaxResultLabels;
for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index) { // e.g. 100 iterations. Probably roll on data if needed
feature_extraction_net->Forward(input_vec); // Take one batch of data (e.g. 50 images), and pass them to end of network
// Load the Labels
const shared_ptr<Blob<Dtype> > label_blob = feature_extraction_net->blob_by_name("label");
batch_size = label_blob->num(); // e.g. 50 batches
assert(batch_size == frames_window);
int current_label = -1;
for (int n = 0; n < batch_size; ++n) {
const Dtype* label_blob_data = label_blob->cpu_data() + label_blob->offset(n); // move offset to ith blob in batch
current_label = label_blob_data[0]; // all will be same value
labels.insert(current_label);
if (n == 0)
truthLabels.push_back(current_label);
}
if (labels.size() != 1) { // every 1 batch should have same value
LOG(ERROR)<< "Something wrong. every 1 batch should have same value. New value at element " << batch_index + 1 << "\n";
assert(false);
}
labels.clear();
const shared_ptr<Blob<Dtype> > feature_blob = feature_extraction_net->blob_by_name(blob_name); // get e.g. fc7 blob for the batch
dim_features = feature_blob->count() / batch_size;
assert(dim_features > 1);
const Dtype* feature_blob_data = nullptr;
vector<float> test_case_sum(dim_features);
for (int n = 0; n < batch_size; ++n) {
feature_blob_data = feature_blob->cpu_data() + feature_blob->offset(n); // move offset to ith blob in batch
vector<float> test_case;
for (int j = 0; j < dim_features; ++j) {
test_case.push_back(feature_blob_data[j]);
test_case_sum[j] += feature_blob_data[j];
}
}
propAvgMaxResultLabels.push_back( getArgmax(test_case_sum) );
}
evaluate(truthLabels, propAvgMaxResultLabels, 1);
}
int main(int argc, char** argv) {
::google::InitGoogleLogging(argv[0]);
MostCV::consumeStringParam(argc, argv); // read program entry data
if (argc < 6) {
LOG(ERROR)<< "At least 6 parameters expected\n";
assert(false);
}
LOG(ERROR)<< "Make sure to have LD_LIBRARY_PATH pointing to LSTM implementation in case of LSTM\n\n";
feature_extraction_pipeline<float>(argc, argv);
return 0;
}
| mostafa-saad/deep-activity-rec | eclipse-project/ibrahim16-deep-act-rec-part/apps/exePhase4.cpp | C++ | bsd-2-clause | 7,728 |
/*
* Copyright (c) 2008-2011 Juli Mallett. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <common/test.h>
#include <event/action.h>
struct Cancelled {
Test test_;
bool cancelled_;
Cancelled(TestGroup& g)
: test_(g, "Cancellation does occur."),
cancelled_(false)
{
Action *a = cancellation(this, &Cancelled::cancel);
a->cancel();
}
~Cancelled()
{
if (cancelled_)
test_.pass();
}
void cancel(void)
{
ASSERT("/cancelled", !cancelled_);
cancelled_ = true;
}
};
struct NotCancelled {
Test test_;
bool cancelled_;
Action *action_;
NotCancelled(TestGroup& g)
: test_(g, "Cancellation does not occur."),
cancelled_(false),
action_(NULL)
{
action_ = cancellation(this, &NotCancelled::cancel);
}
~NotCancelled()
{
if (!cancelled_) {
if (action_ != NULL) {
action_->cancel();
action_ = NULL;
ASSERT("/not/cancelled", cancelled_);
}
}
}
void cancel(void)
{
ASSERT("/not/cancelled", !cancelled_);
cancelled_ = true;
test_.pass();
}
};
int
main(void)
{
TestGroup g("/test/action/cancel1", "Action::cancel #1");
{
Cancelled _(g);
}
{
NotCancelled _(g);
}
}
| diegows/wanproxy | event/test/action-cancel1/action-cancel1.cc | C++ | bsd-2-clause | 2,406 |
//
// Created by 王晓辰 on 15/10/2.
//
#include "test_dirname.h"
#include <ftxpath.h>
#include "tester.h"
bool test_dirname_path()
{
std::string path = "/a/b/c/d";
std::string dirname = "/a/b/c";
return dirname == ftx::path::dirname(path);
}
bool test_dirname_onename()
{
std::string name = "name";
return ftx::path::dirname(name).empty();
}
bool test_dirname_filepath()
{
std::string filepath = "a/b/c/d.txt";
std::string dirname = "a/b/c";
return dirname == ftx::path::dirname(filepath);
}
bool test_dirname_folderpath()
{
std::string folderpath = "a/b/c/folder/";
std::string dirname = "a/b/c/folder";
return dirname == ftx::path::dirname(folderpath);
}
bool test_dirname_root()
{
std::string root = "/";
return root == ftx::path::dirname(root);
}
bool test_dirname() {
LOG_TEST_STRING("");
TEST_BOOL_TO_BOOL(test_dirname_path(), "dir dirname failed");
TEST_BOOL_TO_BOOL(test_dirname_onename(), "one name dirname failed");
TEST_BOOL_TO_BOOL(test_dirname_filepath(), "file path dirname failed");
TEST_BOOL_TO_BOOL(test_dirname_folderpath(), "folder path dirname failed");
TEST_BOOL_TO_BOOL(test_dirname_root(), "root dirname failed");
return true;
}
| XiaochenFTX/ftxpath | test/test_dirname.cpp | C++ | bsd-2-clause | 1,239 |
/*
* Copyright (C) 2009 Google Inc. All rights reserved.
* Copyright (C) 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#include "config.h"
#include "RuntimeEnabledFeatures.h"
#include "DatabaseManager.h"
#include "MediaPlayer.h"
#include "SharedWorkerRepository.h"
#include "WebSocket.h"
#include <wtf/NeverDestroyed.h>
namespace WebCore {
RuntimeEnabledFeatures::RuntimeEnabledFeatures()
: m_isLocalStorageEnabled(true)
, m_isSessionStorageEnabled(true)
, m_isWebkitNotificationsEnabled(false)
, m_isApplicationCacheEnabled(true)
, m_isDataTransferItemsEnabled(true)
, m_isGeolocationEnabled(true)
, m_isIndexedDBEnabled(false)
, m_isTouchEnabled(true)
, m_isDeviceMotionEnabled(true)
, m_isDeviceOrientationEnabled(true)
, m_isSpeechInputEnabled(true)
, m_isCSSExclusionsEnabled(true)
, m_isCSSShapesEnabled(true)
, m_isCSSRegionsEnabled(false)
, m_isCSSCompositingEnabled(false)
, m_isLangAttributeAwareFormControlUIEnabled(false)
#if PLATFORM(IOS)
, m_isPluginReplacementEnabled(true)
#else
, m_isPluginReplacementEnabled(false)
#endif
#if ENABLE(SCRIPTED_SPEECH)
, m_isScriptedSpeechEnabled(false)
#endif
#if ENABLE(MEDIA_STREAM)
, m_isMediaStreamEnabled(true)
, m_isPeerConnectionEnabled(true)
#endif
#if ENABLE(LEGACY_CSS_VENDOR_PREFIXES)
, m_isLegacyCSSVendorPrefixesEnabled(false)
#endif
#if ENABLE(JAVASCRIPT_I18N_API)
, m_isJavaScriptI18NAPIEnabled(false)
#endif
#if ENABLE(VIDEO_TRACK)
, m_isVideoTrackEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_DATE)
, m_isInputTypeDateEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_DATETIME_INCOMPLETE)
, m_isInputTypeDateTimeEnabled(false)
#endif
#if ENABLE(INPUT_TYPE_DATETIMELOCAL)
, m_isInputTypeDateTimeLocalEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_MONTH)
, m_isInputTypeMonthEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_TIME)
, m_isInputTypeTimeEnabled(true)
#endif
#if ENABLE(INPUT_TYPE_WEEK)
, m_isInputTypeWeekEnabled(true)
#endif
#if ENABLE(CSP_NEXT)
, m_areExperimentalContentSecurityPolicyFeaturesEnabled(false)
#endif
#if ENABLE(FONT_LOAD_EVENTS)
, m_isFontLoadEventsEnabled(false)
#endif
#if ENABLE(GAMEPAD)
, m_areGamepadsEnabled(false)
#endif
{
}
RuntimeEnabledFeatures& RuntimeEnabledFeatures::sharedFeatures()
{
static NeverDestroyed<RuntimeEnabledFeatures> runtimeEnabledFeatures;
return runtimeEnabledFeatures;
}
#if ENABLE(JAVASCRIPT_I18N_API)
bool RuntimeEnabledFeatures::javaScriptI18NAPIEnabled()
{
return m_isJavaScriptI18NAPIEnabled;
}
#endif
#if ENABLE(VIDEO)
bool RuntimeEnabledFeatures::audioEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::htmlMediaElementEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::htmlAudioElementEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::htmlVideoElementEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::htmlSourceElementEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::mediaControllerEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::mediaErrorEnabled() const
{
return MediaPlayer::isAvailable();
}
bool RuntimeEnabledFeatures::timeRangesEnabled() const
{
return MediaPlayer::isAvailable();
}
#endif
#if ENABLE(SHARED_WORKERS)
bool RuntimeEnabledFeatures::sharedWorkerEnabled() const
{
return SharedWorkerRepository::isAvailable();
}
#endif
#if ENABLE(WEB_SOCKETS)
bool RuntimeEnabledFeatures::webSocketEnabled() const
{
return WebSocket::isAvailable();
}
#endif
} // namespace WebCore
| aosm/WebCore | bindings/generic/RuntimeEnabledFeatures.cpp | C++ | bsd-2-clause | 5,200 |
package org.joshy.gfx.test.itunes;
/**
* Created by IntelliJ IDEA.
* User: josh
* Date: Jan 28, 2010
* Time: 9:20:01 PM
* To change this template use File | Settings | File Templates.
*/
class Song {
int trackNumber;
int totalTracks;
String name;
String album;
String artist;
int duration;
public Song(int trackNumber, int totalTracks, String name, String album, String artist, int duration) {
this.trackNumber = trackNumber;
this.totalTracks = totalTracks;
this.name = name;
this.album = album;
this.artist = artist;
this.duration = duration;
}
}
| tonykwok/leonardosketch.amino | src/org/joshy/gfx/test/itunes/Song.java | Java | bsd-2-clause | 622 |
class Dxpy < Formula
include Language::Python::Virtualenv
desc "DNAnexus toolkit utilities and platform API bindings for Python"
homepage "https://github.com/dnanexus/dx-toolkit"
url "https://files.pythonhosted.org/packages/7e/d8/9529a045270fe2cee67c01fde759864b9177ecdd486d016c3a38863f3895/dxpy-0.320.0.tar.gz"
sha256 "aef4c16d73cf9e7513d1f8e503f7e0d3ed7f2135fe6f8596a97196a8df109977"
license "Apache-2.0"
bottle do
sha256 cellar: :any, arm64_monterey: "f833c2a2b486b3a54ba1b4f5c205aa73dc815b498053bfe1197a7c497a6e4646"
sha256 cellar: :any, arm64_big_sur: "984872c26ab277ba25764029bea2df0bb697452e589c537cc6c2fbec77fb463e"
sha256 cellar: :any, monterey: "90709e2bff817faabb7ab6e3a726bde68dadbc33bc4ef9e830154f8d5b852b4b"
sha256 cellar: :any, big_sur: "3a063ce13281a975cfc03a14b58ddd33b8ceb5dcc851c6f5f8e01e7d4c4d5fdf"
sha256 cellar: :any, catalina: "56f0159e7a3193f3baa3c6710a70ee2d141d74fb46b9b9aa6128149240d6a603"
sha256 cellar: :any_skip_relocation, x86_64_linux: "c6c14d2acbe34e5763b3b6352af63d3252655b479ae5cefe556dbbe51720e6c4"
end
depends_on "rust" => :build # for cryptography
depends_on "python@3.10"
depends_on "six"
on_macos do
depends_on "readline"
end
on_linux do
depends_on "pkg-config" => :build
depends_on "libffi"
end
resource "argcomplete" do
url "https://files.pythonhosted.org/packages/05/f8/67851ae4fe5396ba6868c5d84219b81ea6a5d53991a6853616095c30adc0/argcomplete-2.0.0.tar.gz"
sha256 "6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20"
end
resource "certifi" do
url "https://files.pythonhosted.org/packages/6c/ae/d26450834f0acc9e3d1f74508da6df1551ceab6c2ce0766a593362d6d57f/certifi-2021.10.8.tar.gz"
sha256 "78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/00/9e/92de7e1217ccc3d5f352ba21e52398372525765b2e0c4530e6eb2ba9282a/cffi-1.15.0.tar.gz"
sha256 "920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"
end
resource "charset-normalizer" do
url "https://files.pythonhosted.org/packages/e8/e8/b6cfd28fb430b2ec9923ad0147025bf8bbdf304b1eb3039b69f1ce44ed6e/charset-normalizer-2.0.11.tar.gz"
sha256 "98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/f9/4b/1cf8e281f7ae4046a59e5e39dd7471d46db9f61bb564fddbff9084c4334f/cryptography-36.0.1.tar.gz"
sha256 "53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/62/08/e3fc7c8161090f742f504f40b1bccbfc544d4a4e09eb774bf40aafce5436/idna-3.3.tar.gz"
sha256 "9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"
end
resource "psutil" do
url "https://files.pythonhosted.org/packages/47/b6/ea8a7728f096a597f0032564e8013b705aa992a0990becd773dcc4d7b4a7/psutil-5.9.0.tar.gz"
sha256 "869842dbd66bb80c3217158e629d6fceaecc3a3166d3d1faee515b05dd26ca25"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/5e/0b/95d387f5f4433cb0f53ff7ad859bd2c6051051cebbb564f139a999ab46de/pycparser-2.21.tar.gz"
sha256 "e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"
end
resource "python-dateutil" do
url "https://files.pythonhosted.org/packages/4c/c4/13b4776ea2d76c115c1d1b84579f3764ee6d57204f6be27119f13a61d0a9/python-dateutil-2.8.2.tar.gz"
sha256 "0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/e7/01/3569e0b535fb2e4a6c384bdbed00c55b9d78b5084e0fb7f4d0bf523d7670/requests-2.26.0.tar.gz"
sha256 "b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/b0/b1/7bbf5181f8e3258efae31702f5eab87d8a74a72a0aa78bc8c08c1466e243/urllib3-1.26.8.tar.gz"
sha256 "0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"
end
resource "websocket-client" do
url "https://files.pythonhosted.org/packages/8b/0f/52de51b9b450ed52694208ab952d5af6ebbcbce7f166a48784095d930d8c/websocket_client-0.57.0.tar.gz"
sha256 "d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010"
end
def install
virtualenv_install_with_resources
end
test do
dxenv = <<~EOS
API server protocol https
API server host api.dnanexus.com
API server port 443
Current workspace None
Current folder None
Current user None
EOS
assert_match dxenv, shell_output("#{bin}/dx env")
end
end
| filcab/homebrew-core | Formula/dxpy.rb | Ruby | bsd-2-clause | 4,787 |
/* global define */
define([
'jquery',
'underscore',
'./dist',
'./axis'
], function($, _, dist, axis) {
var EditableFieldChart = dist.FieldChart.extend({
template: 'charts/editable-chart',
toolbarAnimationTime: 200,
formAnimationTime: 300,
events: _.extend({
'click .fullsize': 'toggleExpanded'
}, dist.FieldChart.prototype.events),
ui: _.extend({
toolbar: '.btn-toolbar',
fullsizeToggle: '.fullsize',
form: '.editable',
xAxis: '[name=x-Axis]',
yAxis: '[name=y-Axis]',
series: '[name=series]'
}, dist.FieldChart.prototype.ui),
onRender: function() {
if (this.options.editable === false) {
this.ui.form.detach();
this.ui.toolbar.detach();
}
else {
this.xAxis = new axis.FieldAxis({
el: this.ui.xAxis,
collection: this.collection
});
this.yAxis = new axis.FieldAxis({
el: this.ui.yAxis,
collection: this.collection
});
this.series = new axis.FieldAxis({
el: this.ui.series,
enumerableOnly: true,
collection: this.collection
});
if (this.model) {
if (this.model.get('xAxis')) {
this.ui.form.hide();
}
if (this.model.get('expanded')) {
this.expand();
}
else {
this.contract();
}
}
}
},
customizeOptions: function(options) {
this.ui.status.detach();
this.ui.heading.text(options.title.text);
options.title.text = '';
// Check if any data is present.
if (!options.series[0]) {
this.ui.chart.html('<p class=no-data>Unfortunately, there is no ' +
'data to graph here.</p>');
return;
}
this.ui.form.hide();
var statusText = [];
if (options.clustered) {
statusText.push('Clustered');
}
if (statusText[0]) {
this.ui.status.text(statusText.join(', ')).show();
this.ui.heading.append(this.$status);
}
if (this.interactive(options)) {
this.enableChartEvents();
}
$.extend(true, options, this.chartOptions);
options.chart.renderTo = this.ui.chart[0];
return options;
},
// Ensure rapid successions of this method do not occur.
changeChart: function(event) {
if (event) {
event.preventDefault();
}
var _this = this;
this.collection.when(function() {
var xAxis, yAxis, series, seriesIdx;
// TODO fix this nonsense
if (event === null || typeof event === 'undefined') {
xAxis = _this.model.get('xAxis');
if (xAxis) {
_this.xAxis.$el.val(xAxis.toString());
}
yAxis = _this.model.get('yAxis');
if (yAxis) {
_this.yAxis.$el.val(yAxis.toString());
}
series = _this.model.get('series');
if (series) {
this.series.$el.val(series.toString());
}
}
xAxis = _this.xAxis.getSelected();
yAxis = _this.yAxis.getSelected();
series = _this.series.getSelected();
if (!xAxis) return;
var url = _this.model.links.distribution;
var fields = [xAxis];
var data = 'dimension=' + xAxis.id;
if (yAxis) {
fields.push(yAxis);
data = data + '&dimension=' + yAxis.id;
}
if (series) {
if (yAxis) {
seriesIdx = 2;
}
else {
seriesIdx = 1;
}
data = data + '&dimension=' + series.id;
}
if (event && _this.model) {
_this.model.set({
xAxis: xAxis.id,
yAxis: (yAxis) ? yAxis.id : null,
series: (series) ? series.id : null
});
}
_this.update(url, data, fields, seriesIdx);
});
},
// Disable selected fields since using the same field for multiple
// axes doesn't make sense.
disableSelected: function(event) {
var $target = $(event.target);
// Changed to an empty value, unhide other dropdowns.
if (this.xAxis.el === event.target) {
this.yAxis.$('option').prop('disabled', false);
this.series.$('option').prop('disabled', false);
}
else if (this.yAxis.el === event.target) {
this.xAxis.$('option').prop('disabled', false);
this.series.$('option').prop('disabled', false);
}
else {
this.xAxis.$('option').prop('disabled', false);
this.yAxis.$('option').prop('disabled', false);
}
var value = $target.val();
if (value !== '') {
if (this.xAxis.el === event.target) {
this.yAxis.$('option[value=' + value + ']')
.prop('disabled', true).val('');
this.series.$('option[value=' + value + ']')
.prop('disabled', true).val('');
}
else if (this.yAxis.el === event.target) {
this.xAxis.$('option[value=' + value + ']')
.prop('disable', true).val('');
this.series.$('option[value=' + value + ']')
.prop('disable', true).val('');
}
else {
this.xAxis.$('option[value=' + value + ']')
.prop('disable', true).val('');
this.yAxis.$('option[value=' + value + ']')
.prop('disable', true).val('');
}
}
},
toggleExpanded: function() {
var expanded = this.model.get('expanded');
if (expanded) {
this.contract();
}
else {
this.expand();
}
this.model.save({
expanded: !expanded
});
},
resize: function() {
var chartWidth = this.ui.chart.width();
if (this.chart) {
this.chart.setSize(chartWidth, null, false);
}
},
expand: function() {
this.$fullsizeToggle.children('i')
.removeClass('icon-resize-small')
.addClass('icon-resize-full');
this.$el.addClass('expanded');
this.resize();
},
contract: function() {
this.$fullsizeToggle.children('i')
.removeClass('icon-resize-full')
.addClass('icon-resize-small');
this.$el.removeClass('expanded');
this.resize();
},
hideToolbar: function() {
this.ui.toolbar.fadeOut(this.toolbarAnimationTime);
},
showToolbar: function() {
this.ui.toolbar.fadeIn(this.toolbarAnimationTime);
},
toggleEdit: function() {
if (this.ui.form.is(':visible')) {
this.ui.form.fadeOut(this.formAnimationTime);
}
else {
this.ui.form.fadeIn(this.formAnimationTime);
}
}
});
return {
EditableFieldChart: EditableFieldChart
};
});
| chop-dbhi/cilantro | src/js/cilantro/ui/charts/editable.js | JavaScript | bsd-2-clause | 8,396 |
# frozen_string_literal: true
require_relative "commands/break"
require_relative "commands/catch"
require_relative "commands/condition"
require_relative "commands/continue"
require_relative "commands/debug"
require_relative "commands/delete"
require_relative "commands/disable"
require_relative "commands/display"
require_relative "commands/down"
require_relative "commands/edit"
require_relative "commands/enable"
require_relative "commands/finish"
require_relative "commands/frame"
require_relative "commands/help"
require_relative "commands/history"
require_relative "commands/info"
require_relative "commands/interrupt"
require_relative "commands/irb"
require_relative "commands/kill"
require_relative "commands/list"
require_relative "commands/method"
require_relative "commands/next"
require_relative "commands/pry"
require_relative "commands/quit"
require_relative "commands/restart"
require_relative "commands/save"
require_relative "commands/set"
require_relative "commands/show"
require_relative "commands/skip"
require_relative "commands/source"
require_relative "commands/step"
require_relative "commands/thread"
require_relative "commands/tracevar"
require_relative "commands/undisplay"
require_relative "commands/untracevar"
require_relative "commands/up"
require_relative "commands/var"
require_relative "commands/where"
| deivid-rodriguez/byebug | lib/byebug/commands.rb | Ruby | bsd-2-clause | 1,337 |
#!/usr/bin/env python3
import canmatrix.formats
from canmatrix.join import join_frame_by_signal_start_bit
files = ["../test/db_B.dbc", "../test/db_A.dbc"]
target = join_frame_by_signal_start_bit(files)
#
# export the new (target)-Matrix for example as .dbc:
#
canmatrix.formats.dumpp(target, "target.dbc")
canmatrix.formats.dumpp(target, "target.xlsx")
| ebroecker/canmatrix | examples/exampleJoin.py | Python | bsd-2-clause | 357 |
module.exports =
{
"WMSC": {
"WMSC_1_1_1" : require('./1.1.1/WMSC_1_1_1')
}
};
| juanrapoport/ogc-schemas | scripts/tests/WMSC/WMSC.js | JavaScript | bsd-2-clause | 84 |
package cz.metacentrum.perun.core.bl;
import cz.metacentrum.perun.core.api.Attribute;
import cz.metacentrum.perun.core.api.Member;
import cz.metacentrum.perun.core.api.PerunSession;
import cz.metacentrum.perun.core.api.User;
import cz.metacentrum.perun.core.api.exceptions.InternalErrorException;
import cz.metacentrum.perun.core.api.exceptions.AttributeNotExistsException;
import cz.metacentrum.perun.core.api.exceptions.WrongAttributeAssignmentException;
import java.util.Calendar;
import java.util.List;
import java.util.Map;
/**
* Searcher Class for searching objects by Map of Attributes
*
* @author Michal Stava <stavamichal@gmail.com>
*/
public interface SearcherBl {
/**
* This method get Map of Attributes with searching values and try to find all users, which have specific attributes in format.
* Better information about format below. When there are more than 1 attribute in Map, it means all must be true "looking for all of them" (AND)
*
* @param sess perun session
* @param attributesWithSearchingValues map of attributes names
* when attribute is type String, so value is string and we are looking for total match (Partial is not supported now, will be supported later by symbol *)
* when attribute is type Integer, so value is integer in String and we are looking for total match
* when attribute is type List<String>, so value is String and we are looking for at least one total or partial matching element
* when attribute is type Map<String> so value is String in format "key=value" and we are looking total match of both or if is it "key" so we are looking for total match of key
* IMPORTANT: In map there is not allowed char '=' in key. First char '=' is delimiter in MAP item key=value!!!
* @return list of users who have attributes with specific values (behavior above)
* if no user exist, return empty list of users
*
* @throws AttributeNotExistsException
* @throws InternalErrorException
* @throws WrongAttributeAssignmentException
*/
List<User> getUsers(PerunSession sess, Map<String, String> attributesWithSearchingValues) throws InternalErrorException, AttributeNotExistsException, WrongAttributeAssignmentException;
/**
* This method take map of coreAttributes with search values and return all
* users who have the specific match for all of these core attributes.
*
* @param sess
* @param coreAttributesWithSearchingValues
* @return
* @throws InternalErrorException
* @throws AttributeNotExistsException
* @throws WrongAttributeAssignmentException
*/
List<User> getUsersForCoreAttributes(PerunSession sess, Map<String, String> coreAttributesWithSearchingValues) throws InternalErrorException, AttributeNotExistsException, WrongAttributeAssignmentException;
/**
* Return members with expiration date set, which will expire on today +/- X days.
* You can specify operator for comparison (by default "=") returning exact match.
* So you can get all expired members (including today) using "<=" and zero days shift.
* or using "<" and +1 day shift.
*
* Method ignores current member state, just compares expiration date !
*
* @param sess PerunSession
* @param operator One of "=", "<", ">", "<=", ">=". If null, "=" is anticipated.
* @param days X days before/after today
* @return Members with expiration relative to method params.
* @throws InternalErrorException
*/
List<Member> getMembersByExpiration(PerunSession sess, String operator, int days) throws InternalErrorException;
/**
* Return members with expiration date set, which will expire on specified date.
* You can specify operator for comparison (by default "=") returning exact match.
* So you can get all expired members (including today) using "<=" and today date.
* or using "<" and tomorrow date.
*
* Method ignores current member state, just compares expiration date !
*
* @param sess PerunSession
* @param operator One of "=", "<", ">", "<=", ">=". If null, "=" is anticipated.
* @param date Date to compare expiration with (if null, current date is used).
* @return Members with expiration relative to method params.
* @throws InternalErrorException
*/
List<Member> getMembersByExpiration(PerunSession sess, String operator, Calendar date) throws InternalErrorException;
}
| jirmauritz/perun | perun-core/src/main/java/cz/metacentrum/perun/core/bl/SearcherBl.java | Java | bsd-2-clause | 4,338 |