repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
jentfoo/aws-sdk-java | aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/model/DescribeGlobalTableRequest.java | 3672 | /*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/dynamodb-2012-08-10/DescribeGlobalTable" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeGlobalTableRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The name of the global table.
* </p>
*/
private String globalTableName;
/**
* <p>
* The name of the global table.
* </p>
*
* @param globalTableName
* The name of the global table.
*/
public void setGlobalTableName(String globalTableName) {
this.globalTableName = globalTableName;
}
/**
* <p>
* The name of the global table.
* </p>
*
* @return The name of the global table.
*/
public String getGlobalTableName() {
return this.globalTableName;
}
/**
* <p>
* The name of the global table.
* </p>
*
* @param globalTableName
* The name of the global table.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DescribeGlobalTableRequest withGlobalTableName(String globalTableName) {
setGlobalTableName(globalTableName);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getGlobalTableName() != null)
sb.append("GlobalTableName: ").append(getGlobalTableName());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DescribeGlobalTableRequest == false)
return false;
DescribeGlobalTableRequest other = (DescribeGlobalTableRequest) obj;
if (other.getGlobalTableName() == null ^ this.getGlobalTableName() == null)
return false;
if (other.getGlobalTableName() != null && other.getGlobalTableName().equals(this.getGlobalTableName()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getGlobalTableName() == null) ? 0 : getGlobalTableName().hashCode());
return hashCode;
}
@Override
public DescribeGlobalTableRequest clone() {
return (DescribeGlobalTableRequest) super.clone();
}
}
| apache-2.0 |
citizenmatt/gallio | src/Gallio/Gallio.UI/ControlPanel/Preferences/NamespaceDoc.cs | 1072 | // Copyright 2005-2010 Gallio Project - http://www.gallio.org/
// Portions Copyright 2000-2004 Jonathan de Halleux
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Text;
namespace Gallio.UI.ControlPanel.Preferences
{
/// <summary>
/// The Gallio.UI.ControlPanel namespace contains types for using and extending the
/// Preference Tab of the Gallio Control Panel UI.
/// </summary>
[CompilerGenerated]
class NamespaceDoc
{
}
}
| apache-2.0 |
rameshdharan/cloud-bigtable-client | bigtable-hbase-parent/bigtable-hbase/src/main/java/com/google/cloud/bigtable/hbase/adapters/Adapters.java | 3851 | /*
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.bigtable.hbase.adapters;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Increment;
import com.google.cloud.bigtable.config.BigtableOptions;
import com.google.cloud.bigtable.hbase.adapters.filters.BigtableWhileMatchResultScannerAdapter;
import com.google.cloud.bigtable.hbase.adapters.filters.FilterAdapter;
import com.google.cloud.bigtable.hbase.adapters.read.BigtableResultScannerAdapter;
import com.google.cloud.bigtable.hbase.adapters.read.GetAdapter;
import com.google.cloud.bigtable.hbase.adapters.read.RowAdapter;
import com.google.cloud.bigtable.hbase.adapters.read.ScanAdapter;
/**
* Manages all Adapters
*
* @author sduskis
* @version $Id: $Id
*/
public final class Adapters {
/** Constant <code>ROW_ADAPTER</code> */
public static final RowAdapter ROW_ADAPTER = new RowAdapter();
/** Constant <code>APPEND_ADAPTER</code> */
public static final AppendAdapter APPEND_ADAPTER = new AppendAdapter();
/** Constant <code>INCREMENT_ADAPTER</code> */
public static final IncrementAdapter INCREMENT_ADAPTER = new IncrementAdapter();
/** Constant <code>DELETE_ADAPTER</code> */
public static final DeleteAdapter DELETE_ADAPTER = new DeleteAdapter();
/** Constant <code>FILTER_ADAPTER</code> */
public static final FilterAdapter FILTER_ADAPTER = FilterAdapter.buildAdapter();
/** Constant <code>SCAN_ADAPTER</code> */
public static final ScanAdapter SCAN_ADAPTER = new ScanAdapter(FILTER_ADAPTER);
/** Constant <code>BIGTABLE_RESULT_SCAN_ADAPTER</code> */
public static final BigtableResultScannerAdapter BIGTABLE_RESULT_SCAN_ADAPTER =
new BigtableResultScannerAdapter(ROW_ADAPTER);
/** Constant <code>BIGTABLE_WHILE_MATCH_RESULT_RESULT_SCAN_ADAPTER</code> */
public static final BigtableWhileMatchResultScannerAdapter
BIGTABLE_WHILE_MATCH_RESULT_RESULT_SCAN_ADAPTER =
new BigtableWhileMatchResultScannerAdapter(ROW_ADAPTER);
/** Constant <code>GET_ADAPTER</code> */
public static final GetAdapter GET_ADAPTER = new GetAdapter(SCAN_ADAPTER);
/**
* <p>createMutationsAdapter.</p>
*
* @param putAdapter a {@link com.google.cloud.bigtable.hbase.adapters.PutAdapter} object.
* @return a {@link com.google.cloud.bigtable.hbase.adapters.MutationAdapter} object.
*/
public static MutationAdapter createMutationsAdapter(PutAdapter putAdapter) {
return new MutationAdapter(
DELETE_ADAPTER,
putAdapter,
new UnsupportedOperationAdapter<Increment>("increment"),
new UnsupportedOperationAdapter<Append>("append"));
}
/**
* <p>createPutAdapter.</p>
*
* @param config a {@link org.apache.hadoop.conf.Configuration} object.
* @param options a {@link com.google.cloud.bigtable.config.BigtableOptions} object.
* @return a {@link com.google.cloud.bigtable.hbase.adapters.PutAdapter} object.
*/
public static PutAdapter createPutAdapter(Configuration config, BigtableOptions options) {
boolean setClientTimestamp = !options.getRetryOptions().allowRetriesWithoutTimestamp();
return new PutAdapter(config.getInt("hbase.client.keyvalue.maxsize", -1), setClientTimestamp);
}
private Adapters() {
}
}
| apache-2.0 |
skunkiferous/Util | jactor2-coreSt/src/main/java/org/agilewiki/jactor2/core/impl/stReactors/PoolThreadReactorStImpl.java | 1290 | package org.agilewiki.jactor2.core.impl.stReactors;
import org.agilewiki.jactor2.core.impl.stPlant.PlantStImpl;
import org.agilewiki.jactor2.core.reactors.IsolationReactor;
import org.agilewiki.jactor2.core.reactors.NonBlockingReactor;
import org.agilewiki.jactor2.core.reactors.impl.PoolThreadReactorImpl;
abstract public class PoolThreadReactorStImpl extends ReactorStImpl implements
PoolThreadReactorImpl {
private Runnable onIdle;
/**
* Create an PoolThreadReactorStImpl.
*
* @param _parentReactor The parent reactor.
*/
public PoolThreadReactorStImpl(final IsolationReactor _parentReactor) {
super(_parentReactor);
}
@Override
protected void notBusy() throws Exception {
if ((onIdle != null) && inbox.isIdle()) {
onIdle.run();
}
}
@Override
protected void afterAdd() {
final PlantStImpl plantStImpl = PlantStImpl.getSingleton();
plantStImpl.submit(this);
}
/**
* The object to be run when the inbox is emptied and before the threadReference is cleared.
*/
@Override
public Runnable getOnIdle() {
return onIdle;
}
@Override
public void setOnIdle(final Runnable onIdle) {
this.onIdle = onIdle;
}
}
| apache-2.0 |
chmyga/component-runtime | component-runtime-beam/src/test/java/org/talend/test/FileOutput.java | 1651 | /**
* Copyright (C) 2006-2020 Talend Inc. - www.talend.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.talend.test;
import java.io.File;
import java.io.IOException;
import java.io.Serializable;
import java.io.Writer;
import javax.annotation.PreDestroy;
import javax.json.JsonObject;
import org.talend.sdk.component.api.processor.ElementListener;
import org.talend.sdk.component.api.processor.Processor;
import lombok.AllArgsConstructor;
@AllArgsConstructor
@Processor(family = "chain", name = "file")
public class FileOutput implements Serializable {
private final File file;
private final FileService service;
@ElementListener
public void length(final JsonObject data) throws IOException {
final Writer writer = service.writerFor(file.getAbsolutePath());
synchronized (writer) {
writer.write(data.getString("data") + System.lineSeparator());
}
}
@PreDestroy
public void close() throws IOException {
final Writer writer = service.writerFor(file.getAbsolutePath());
synchronized (writer) {
writer.close();
}
}
}
| apache-2.0 |
googleads/googleads-java-lib | examples/admanager_axis/src/main/java/admanager/axis/v202105/siteservice/SubmitSiteForApproval.java | 7356 | // Copyright 2020 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package admanager.axis.v202105.siteservice;
import static com.google.api.ads.common.lib.utils.Builder.DEFAULT_CONFIGURATION_FILENAME;
import com.beust.jcommander.Parameter;
import com.google.api.ads.admanager.axis.factory.AdManagerServices;
import com.google.api.ads.admanager.axis.utils.v202105.StatementBuilder;
import com.google.api.ads.admanager.axis.v202105.ApiError;
import com.google.api.ads.admanager.axis.v202105.ApiException;
import com.google.api.ads.admanager.axis.v202105.Site;
import com.google.api.ads.admanager.axis.v202105.SitePage;
import com.google.api.ads.admanager.axis.v202105.SiteServiceInterface;
import com.google.api.ads.admanager.axis.v202105.UpdateResult;
import com.google.api.ads.admanager.lib.client.AdManagerSession;
import com.google.api.ads.admanager.lib.utils.examples.ArgumentNames;
import com.google.api.ads.common.lib.auth.OfflineCredentials;
import com.google.api.ads.common.lib.auth.OfflineCredentials.Api;
import com.google.api.ads.common.lib.conf.ConfigurationLoadException;
import com.google.api.ads.common.lib.exception.OAuthException;
import com.google.api.ads.common.lib.exception.ValidationException;
import com.google.api.ads.common.lib.utils.examples.CodeSampleParams;
import com.google.api.client.auth.oauth2.Credential;
import java.rmi.RemoteException;
/**
* This example submits a Site for approval.
*
* <p>Credentials and properties in {@code fromFile()} are pulled from the "ads.properties" file.
* See README for more info.
*/
public class SubmitSiteForApproval {
private static class SubmitSitesForApprovalParams extends CodeSampleParams {
@Parameter(
names = ArgumentNames.SITE_ID,
required = true,
description = "The ID of the site to submit for approval.")
private Long siteId;
}
/**
* Runs the example.
*
* @param adManagerServices the services factory.
* @param session the session.
* @param siteId the ID of the site to submit for approval.
* @throws ApiException if the API request failed with one or more service errors.
* @throws RemoteException if the API request failed due to other errors.
*/
public static void runExample(
AdManagerServices adManagerServices, AdManagerSession session, long siteId)
throws RemoteException {
// Get the SiteService.
SiteServiceInterface siteService = adManagerServices.get(session, SiteServiceInterface.class);
// Create a statement to select a site.
StatementBuilder statementBuilder =
new StatementBuilder()
.where("WHERE id = :id")
.orderBy("id ASC")
.limit(StatementBuilder.SUGGESTED_PAGE_LIMIT)
.withBindVariableValue("id", siteId);
// Default for total result set size.
int totalResultSetSize = 0;
do {
// Get sites by statement.
SitePage page = siteService.getSitesByStatement(statementBuilder.toStatement());
if (page.getResults() != null) {
totalResultSetSize = page.getTotalResultSetSize();
int i = page.getStartIndex();
for (Site site : page.getResults()) {
System.out.printf(
"%d) Site with ID %d will be submitted for approval.%n", i++, site.getId());
}
}
statementBuilder.increaseOffsetBy(StatementBuilder.SUGGESTED_PAGE_LIMIT);
} while (statementBuilder.getOffset() < totalResultSetSize);
System.out.printf("Number of sites to be submitted: %d%n", totalResultSetSize);
if (totalResultSetSize > 0) {
// Remove limit and offset from statement.
statementBuilder.removeLimitAndOffset();
// Create action.
com.google.api.ads.admanager.axis.v202105.SubmitSiteForApproval action =
new com.google.api.ads.admanager.axis.v202105.SubmitSiteForApproval();
// Perform action.
UpdateResult result = siteService.performSiteAction(action, statementBuilder.toStatement());
if (result != null && result.getNumChanges() > 0) {
System.out.printf("Number of sites submitted: %d%n", result.getNumChanges());
} else {
System.out.println("No sites were submitted.");
}
}
}
public static void main(String[] args) {
AdManagerSession session;
try {
// Generate a refreshable OAuth2 credential.
Credential oAuth2Credential =
new OfflineCredentials.Builder()
.forApi(Api.AD_MANAGER)
.fromFile()
.build()
.generateCredential();
// Construct a AdManagerSession.
session =
new AdManagerSession.Builder().fromFile().withOAuth2Credential(oAuth2Credential).build();
} catch (ConfigurationLoadException cle) {
System.err.printf(
"Failed to load configuration from the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, cle);
return;
} catch (ValidationException ve) {
System.err.printf(
"Invalid configuration in the %s file. Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, ve);
return;
} catch (OAuthException oe) {
System.err.printf(
"Failed to create OAuth credentials. Check OAuth settings in the %s file. "
+ "Exception: %s%n",
DEFAULT_CONFIGURATION_FILENAME, oe);
return;
}
AdManagerServices adManagerServices = new AdManagerServices();
SubmitSitesForApprovalParams params = new SubmitSitesForApprovalParams();
if (!params.parseArguments(args)) {
// Either pass the required parameters for this example on the command line, or insert them
// into the code here. See the parameter class definition above for descriptions.
params.siteId = Long.parseLong("INSERT_SITE_ID_HERE");
}
try {
runExample(adManagerServices, session, params.siteId);
} catch (ApiException apiException) {
// ApiException is the base class for most exceptions thrown by an API request. Instances
// of this exception have a message and a collection of ApiErrors that indicate the
// type and underlying cause of the exception. Every exception object in the admanager.axis
// packages will return a meaningful value from toString
//
// ApiException extends RemoteException, so this catch block must appear before the
// catch block for RemoteException.
System.err.println("Request failed due to ApiException. Underlying ApiErrors:");
if (apiException.getErrors() != null) {
int i = 0;
for (ApiError apiError : apiException.getErrors()) {
System.err.printf(" Error %d: %s%n", i++, apiError);
}
}
} catch (RemoteException re) {
System.err.printf("Request failed unexpectedly due to RemoteException: %s%n", re);
}
}
}
| apache-2.0 |
RockeyHoo/wechat | wechat-mp-sdk/src/main/java/org/rockey/wechat/mp/sdk/vo/message/reply/MusicReply.java | 974 | package org.rockey.wechat.mp.sdk.vo.message.reply;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.rockey.wechat.mp.sdk.factory.ReplyEnumFactory;
import org.rockey.wechat.mp.sdk.vo.message.reply.detail.MusicDetail;
/**
*
* @author RockeyHoo
*/
@XmlRootElement(name = "xml")
@XmlAccessorType(XmlAccessType.FIELD)
public class MusicReply extends Reply {
@XmlElement(name = "Music")
private MusicDetail musicDetail;
{
super.setMsgType(ReplyEnumFactory.MUSIC.getReplyType());
}
public MusicReply() {
}
public MusicReply(MusicDetail musicDetail) {
this.musicDetail = musicDetail;
}
public MusicDetail getMusicDetail() {
return musicDetail;
}
public void setMusicDetail(MusicDetail musicDetail) {
this.musicDetail = musicDetail;
}
}
| apache-2.0 |
nemanja88/azure-powershell | src/ResourceManager/AzureBackup/Commands.AzureBackup.Test/Properties/AssemblyInfo.cs | 2148 | // ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using Xunit;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Microsoft.Azure.Commands.AzureBackup.Test")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Commands.AzureBackup.Test")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("49823b4f-deb2-4cf5-a8e7-5118fc6a05d6")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
[assembly: AssemblyVersion("2.2.0")]
[assembly: AssemblyFileVersion("2.2.0")]
[assembly: CollectionBehavior(DisableTestParallelization = true)]
| apache-2.0 |
nuest/worldviz | src/main/java/org/n52/v3d/worldviz/featurenet/shapes/Circle.java | 1337 | package org.n52.v3d.worldviz.featurenet.shapes;
import java.util.ArrayList;
import org.n52.v3d.triturus.gisimplm.GmPoint;
import org.n52.v3d.triturus.t3dutil.T3dVector;
import org.n52.v3d.triturus.vgis.VgPoint;
/**
*
* @author Adhitya Kamakshidasan
*/
public class Circle {
public ArrayList<T3dVector> circlePoints = new ArrayList<T3dVector>();
public VgPoint generatePoint(double radius, double angle){
//Angle to be specified in radians
double x = radius * Math.cos(angle);
double y = radius * Math.sin(angle);
double z = 0.0;
VgPoint point = new GmPoint(x, y, z);
return point;
}
public T3dVector generateVector(VgPoint point){
double x,y,z;
x = point.getX();
y = point.getY();
z = point.getZ();
return new T3dVector(x, y, z);
}
public ArrayList<T3dVector> generateCircle(double radius, int theta){
VgPoint point;
T3dVector vector;
double angle;
for(long i=0; i<=2*theta; i++){
angle = (i * Math.PI)/theta;
point = generatePoint(radius,angle);
point.setZ(0.0);
vector = generateVector(point);
circlePoints.add(vector);
}
return circlePoints;
}
}
| apache-2.0 |
SAP/openui5 | src/sap.m/test/sap/m/demokit/sample/TextArea/Component.js | 197 | sap.ui.define(['sap/ui/core/UIComponent'],
function(UIComponent) {
"use strict";
return UIComponent.extend("sap.m.sample.TextArea.Component", {
metadata : {
manifest: "json"
}
});
});
| apache-2.0 |
lmjacksoniii/hazelcast | hazelcast/src/main/java/com/hazelcast/cache/impl/CacheMXBeanImpl.java | 1936 | /*
* Copyright (c) 2008-2016, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.cache.impl;
import com.hazelcast.config.CacheConfig;
import javax.cache.management.CacheMXBean;
/**
* Implementation of {@link CacheMXBean}.
*
* This class is published through MXBean. It's a simple wrapper around {@link CacheConfig} for readonly
* access to cache configuration.
* @see com.hazelcast.config.CacheConfig
*/
public class CacheMXBeanImpl
implements CacheMXBean {
private CacheConfig cacheConfig;
public CacheMXBeanImpl(CacheConfig cacheConfig) {
this.cacheConfig = cacheConfig;
}
@Override
public String getKeyType() {
return cacheConfig.getKeyType().getName();
}
@Override
public String getValueType() {
return cacheConfig.getValueType().getName();
}
@Override
public boolean isReadThrough() {
return cacheConfig.isReadThrough();
}
@Override
public boolean isWriteThrough() {
return cacheConfig.isWriteThrough();
}
@Override
public boolean isStoreByValue() {
return cacheConfig.isStoreByValue();
}
@Override
public boolean isStatisticsEnabled() {
return cacheConfig.isStatisticsEnabled();
}
@Override
public boolean isManagementEnabled() {
return cacheConfig.isManagementEnabled();
}
}
| apache-2.0 |
fabioCollini/DaggerMock | daggermockTests/src/test/java/it/cosenonjaviste/daggermock/nesteddependency/MyComponent4.java | 795 | /*
* Copyright 2016 Fabio Collini.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.cosenonjaviste.daggermock.nesteddependency;
import dagger.Component;
@Component(modules = MyModule4.class)
public interface MyComponent4 {
MyService4 myService4();
}
| apache-2.0 |
googleapis/google-api-ruby-client | google-api-client/generated/google/apis/speech_v2beta1/representations.rb | 5368 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module SpeechV2beta1
class ListOperationsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LongRunningRecognizeMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LongRunningRecognizeResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Operation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SpeechRecognitionAlternative
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SpeechRecognitionResult
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Status
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class WordInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListOperationsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :next_page_token, as: 'nextPageToken'
collection :operations, as: 'operations', class: Google::Apis::SpeechV2beta1::Operation, decorator: Google::Apis::SpeechV2beta1::Operation::Representation
end
end
class LongRunningRecognizeMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :last_update_time, as: 'lastUpdateTime'
property :progress_percent, as: 'progressPercent'
property :start_time, as: 'startTime'
property :uri, as: 'uri'
end
end
class LongRunningRecognizeResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :results, as: 'results', class: Google::Apis::SpeechV2beta1::SpeechRecognitionResult, decorator: Google::Apis::SpeechV2beta1::SpeechRecognitionResult::Representation
end
end
class Operation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :done, as: 'done'
property :error, as: 'error', class: Google::Apis::SpeechV2beta1::Status, decorator: Google::Apis::SpeechV2beta1::Status::Representation
hash :metadata, as: 'metadata'
property :name, as: 'name'
hash :response, as: 'response'
end
end
class SpeechRecognitionAlternative
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :confidence, as: 'confidence'
property :transcript, as: 'transcript'
collection :words, as: 'words', class: Google::Apis::SpeechV2beta1::WordInfo, decorator: Google::Apis::SpeechV2beta1::WordInfo::Representation
end
end
class SpeechRecognitionResult
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :alternatives, as: 'alternatives', class: Google::Apis::SpeechV2beta1::SpeechRecognitionAlternative, decorator: Google::Apis::SpeechV2beta1::SpeechRecognitionAlternative::Representation
property :channel_tag, as: 'channelTag'
property :language_code, as: 'languageCode'
end
end
class Status
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :code, as: 'code'
collection :details, as: 'details'
property :message, as: 'message'
end
end
class WordInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :confidence, as: 'confidence'
property :end_offset, as: 'endOffset'
property :speaker_tag, as: 'speakerTag'
property :start_offset, as: 'startOffset'
property :word, as: 'word'
end
end
end
end
end
| apache-2.0 |
bisigc/art | ui/test/spec/controllers/userviewcontroller.js | 614 | 'use strict';
describe('Controller: UserviewcontrollerCtrl', function () {
// load the controller's module
beforeEach(module('uiApp'));
var UserviewcontrollerCtrl,
scope;
// Initialize the controller and a mock scope
beforeEach(inject(function ($controller, $rootScope) {
scope = $rootScope.$new();
UserviewcontrollerCtrl = $controller('UserviewcontrollerCtrl', {
$scope: scope
// place here mocked dependencies
});
}));
it('should attach a list of awesomeThings to the scope', function () {
expect(UserviewcontrollerCtrl.awesomeThings.length).toBe(3);
});
});
| apache-2.0 |
Shmuma/hbase | src/test/java/org/apache/hadoop/hbase/master/TestZKBasedOpenCloseRegion.java | 13784 | /**
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.master;
import java.io.IOException;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.executor.EventHandler;
import org.apache.hadoop.hbase.executor.EventHandler.EventHandlerListener;
import org.apache.hadoop.hbase.executor.EventHandler.EventType;
import org.apache.hadoop.hbase.master.handler.TotesHRegionInfo;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hbase.util.Writables;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.assertTrue;
/**
* Test open and close of regions using zk.
*/
public class TestZKBasedOpenCloseRegion {
private static final Log LOG = LogFactory.getLog(TestZKBasedOpenCloseRegion.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final String TABLENAME = "TestZKBasedOpenCloseRegion";
private static final byte [][] FAMILIES = new byte [][] {Bytes.toBytes("a"),
Bytes.toBytes("b"), Bytes.toBytes("c")};
private static int countOfRegions;
@BeforeClass public static void beforeAllTests() throws Exception {
Configuration c = TEST_UTIL.getConfiguration();
c.setBoolean("dfs.support.append", true);
c.setInt("hbase.regionserver.info.port", 0);
TEST_UTIL.startMiniCluster(2);
TEST_UTIL.createTable(Bytes.toBytes(TABLENAME), FAMILIES);
HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME);
countOfRegions = TEST_UTIL.createMultiRegions(t, getTestFamily());
waitUntilAllRegionsAssigned();
addToEachStartKey(countOfRegions);
}
@AfterClass public static void afterAllTests() throws IOException {
TEST_UTIL.shutdownMiniCluster();
}
@Before public void setup() throws IOException {
if (TEST_UTIL.getHBaseCluster().getLiveRegionServerThreads().size() < 2) {
// Need at least two servers.
LOG.info("Started new server=" +
TEST_UTIL.getHBaseCluster().startRegionServer());
}
waitUntilAllRegionsAssigned();
}
/**
* Test we reopen a region once closed.
* @throws Exception
*/
@Test (timeout=300000) public void testReOpenRegion()
throws Exception {
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
LOG.info("Number of region servers = " +
cluster.getLiveRegionServerThreads().size());
int rsIdx = 0;
HRegionServer regionServer =
TEST_UTIL.getHBaseCluster().getRegionServer(rsIdx);
HRegionInfo hri = getNonMetaRegion(regionServer.getOnlineRegions());
LOG.debug("Asking RS to close region " + hri.getRegionNameAsString());
AtomicBoolean closeEventProcessed = new AtomicBoolean(false);
AtomicBoolean reopenEventProcessed = new AtomicBoolean(false);
EventHandlerListener closeListener =
new ReopenEventListener(hri.getRegionNameAsString(),
closeEventProcessed, EventType.RS_ZK_REGION_CLOSED);
cluster.getMaster().executorService.
registerListener(EventType.RS_ZK_REGION_CLOSED, closeListener);
EventHandlerListener openListener =
new ReopenEventListener(hri.getRegionNameAsString(),
reopenEventProcessed, EventType.RS_ZK_REGION_OPENED);
cluster.getMaster().executorService.
registerListener(EventType.RS_ZK_REGION_OPENED, openListener);
LOG.info("Unassign " + hri.getRegionNameAsString());
cluster.getMaster().assignmentManager.unassign(hri);
while (!closeEventProcessed.get()) {
Threads.sleep(100);
}
while (!reopenEventProcessed.get()) {
Threads.sleep(100);
}
LOG.info("Done with testReOpenRegion");
}
private HRegionInfo getNonMetaRegion(final Collection<HRegionInfo> regions) {
HRegionInfo hri = null;
for (HRegionInfo i: regions) {
LOG.info(i.getRegionNameAsString());
if (!i.isMetaRegion()) {
hri = i;
break;
}
}
return hri;
}
public static class ReopenEventListener implements EventHandlerListener {
private static final Log LOG = LogFactory.getLog(ReopenEventListener.class);
String regionName;
AtomicBoolean eventProcessed;
EventType eventType;
public ReopenEventListener(String regionName,
AtomicBoolean eventProcessed, EventType eventType) {
this.regionName = regionName;
this.eventProcessed = eventProcessed;
this.eventType = eventType;
}
@Override
public void beforeProcess(EventHandler event) {
if(event.getEventType() == eventType) {
LOG.info("Received " + eventType + " and beginning to process it");
}
}
@Override
public void afterProcess(EventHandler event) {
LOG.info("afterProcess(" + event + ")");
if(event.getEventType() == eventType) {
LOG.info("Finished processing " + eventType);
String regionName = "";
if(eventType == EventType.RS_ZK_REGION_OPENED) {
TotesHRegionInfo hriCarrier = (TotesHRegionInfo)event;
regionName = hriCarrier.getHRegionInfo().getRegionNameAsString();
} else if(eventType == EventType.RS_ZK_REGION_CLOSED) {
TotesHRegionInfo hriCarrier = (TotesHRegionInfo)event;
regionName = hriCarrier.getHRegionInfo().getRegionNameAsString();
}
if(this.regionName.equals(regionName)) {
eventProcessed.set(true);
}
synchronized(eventProcessed) {
eventProcessed.notifyAll();
}
}
}
}
@Test (timeout=300000) public void testCloseRegion()
throws Exception {
LOG.info("Running testCloseRegion");
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
LOG.info("Number of region servers = " + cluster.getLiveRegionServerThreads().size());
int rsIdx = 0;
HRegionServer regionServer = TEST_UTIL.getHBaseCluster().getRegionServer(rsIdx);
HRegionInfo hri = getNonMetaRegion(regionServer.getOnlineRegions());
LOG.debug("Asking RS to close region " + hri.getRegionNameAsString());
AtomicBoolean closeEventProcessed = new AtomicBoolean(false);
EventHandlerListener listener =
new CloseRegionEventListener(hri.getRegionNameAsString(),
closeEventProcessed);
cluster.getMaster().executorService.registerListener(EventType.RS_ZK_REGION_CLOSED, listener);
cluster.getMaster().assignmentManager.unassign(hri);
while (!closeEventProcessed.get()) {
Threads.sleep(100);
}
LOG.info("Done with testCloseRegion");
}
public static class CloseRegionEventListener implements EventHandlerListener {
private static final Log LOG = LogFactory.getLog(CloseRegionEventListener.class);
String regionToClose;
AtomicBoolean closeEventProcessed;
public CloseRegionEventListener(String regionToClose,
AtomicBoolean closeEventProcessed) {
this.regionToClose = regionToClose;
this.closeEventProcessed = closeEventProcessed;
}
@Override
public void afterProcess(EventHandler event) {
LOG.info("afterProcess(" + event + ")");
if(event.getEventType() == EventType.RS_ZK_REGION_CLOSED) {
LOG.info("Finished processing CLOSE REGION");
TotesHRegionInfo hriCarrier = (TotesHRegionInfo)event;
if (regionToClose.equals(hriCarrier.getHRegionInfo().getRegionNameAsString())) {
LOG.info("Setting closeEventProcessed flag");
closeEventProcessed.set(true);
} else {
LOG.info("Region to close didn't match");
}
}
}
@Override
public void beforeProcess(EventHandler event) {
if(event.getEventType() == EventType.M_RS_CLOSE_REGION) {
LOG.info("Received CLOSE RPC and beginning to process it");
}
}
}
/**
* This test shows how a region won't be able to be assigned to a RS
* if it's already "processing" it.
* @throws Exception
*/
@Test
public void testRSAlreadyProcessingRegion() throws Exception {
MiniHBaseCluster cluster = TEST_UTIL.getHBaseCluster();
HRegionServer hr0 =
cluster.getLiveRegionServerThreads().get(0).getRegionServer();
HRegionServer hr1 =
cluster.getLiveRegionServerThreads().get(1).getRegionServer();
HRegionInfo hri = getNonMetaRegion(hr0.getOnlineRegions());
// fake that hr1 is processing the region
hr1.getRegionsInTransitionInRS().add(hri.getEncodedNameAsBytes());
AtomicBoolean reopenEventProcessed = new AtomicBoolean(false);
EventHandlerListener openListener =
new ReopenEventListener(hri.getRegionNameAsString(),
reopenEventProcessed, EventType.RS_ZK_REGION_OPENED);
cluster.getMaster().executorService.
registerListener(EventType.RS_ZK_REGION_OPENED, openListener);
// now ask the master to move the region to hr1, will fail
TEST_UTIL.getHBaseAdmin().move(hri.getEncodedNameAsBytes(),
Bytes.toBytes(hr1.getServerName()));
while (!reopenEventProcessed.get()) {
Threads.sleep(100);
}
// make sure the region came back
assertTrue(hr1.getOnlineRegion(hri.getEncodedNameAsBytes()) == null);
// remove the block and reset the boolean
hr1.getRegionsInTransitionInRS().remove(hri.getEncodedNameAsBytes());
reopenEventProcessed.set(false);
// move the region again, but this time it will work
TEST_UTIL.getHBaseAdmin().move(hri.getEncodedNameAsBytes(),
Bytes.toBytes(hr1.getServerName()));
while (!reopenEventProcessed.get()) {
Threads.sleep(100);
}
// make sure the region has moved from the original RS
assertTrue(hr0.getOnlineRegion(hri.getEncodedNameAsBytes()) == null);
}
private static void waitUntilAllRegionsAssigned()
throws IOException {
HTable meta = new HTable(TEST_UTIL.getConfiguration(),
HConstants.META_TABLE_NAME);
while (true) {
int rows = 0;
Scan scan = new Scan();
scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER);
ResultScanner s = meta.getScanner(scan);
for (Result r = null; (r = s.next()) != null;) {
byte [] b =
r.getValue(HConstants.CATALOG_FAMILY, HConstants.SERVER_QUALIFIER);
if (b == null || b.length <= 0) {
break;
}
rows++;
}
s.close();
// If I get to here and all rows have a Server, then all have been assigned.
if (rows >= countOfRegions) {
break;
}
LOG.info("Found=" + rows);
Threads.sleep(1000);
}
}
/*
* Add to each of the regions in .META. a value. Key is the startrow of the
* region (except its 'aaa' for first region). Actual value is the row name.
* @param expected
* @return
* @throws IOException
*/
private static int addToEachStartKey(final int expected) throws IOException {
HTable t = new HTable(TEST_UTIL.getConfiguration(), TABLENAME);
HTable meta = new HTable(TEST_UTIL.getConfiguration(),
HConstants.META_TABLE_NAME);
int rows = 0;
Scan scan = new Scan();
scan.addColumn(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
ResultScanner s = meta.getScanner(scan);
for (Result r = null; (r = s.next()) != null;) {
byte [] b =
r.getValue(HConstants.CATALOG_FAMILY, HConstants.REGIONINFO_QUALIFIER);
if (b == null || b.length <= 0) {
break;
}
HRegionInfo hri = Writables.getHRegionInfo(b);
// If start key, add 'aaa'.
byte [] row = getStartKey(hri);
Put p = new Put(row);
p.add(getTestFamily(), getTestQualifier(), row);
t.put(p);
rows++;
}
s.close();
Assert.assertEquals(expected, rows);
return rows;
}
private static byte [] getStartKey(final HRegionInfo hri) {
return Bytes.equals(HConstants.EMPTY_START_ROW, hri.getStartKey())?
Bytes.toBytes("aaa"): hri.getStartKey();
}
private static byte [] getTestFamily() {
return FAMILIES[0];
}
private static byte [] getTestQualifier() {
return getTestFamily();
}
public static void main(String args[]) throws Exception {
TestZKBasedOpenCloseRegion.beforeAllTests();
TestZKBasedOpenCloseRegion test = new TestZKBasedOpenCloseRegion();
test.setup();
test.testCloseRegion();
TestZKBasedOpenCloseRegion.afterAllTests();
}
}
| apache-2.0 |
concourse/concourse | skymarshal/token/middleware_test.go | 4015 | package token_test
import (
"time"
"net/http"
"net/http/httptest"
"github.com/concourse/concourse/skymarshal/token"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Token Middleware", func() {
var (
err error
expiry time.Time
r *http.Request
w *httptest.ResponseRecorder
middleware token.Middleware
)
BeforeEach(func() {
expiry = time.Now().Add(time.Minute)
r, err = http.NewRequest("GET", "http://example.come", nil)
Expect(err).NotTo(HaveOccurred())
w = httptest.NewRecorder()
middleware = token.NewMiddleware(false)
})
Describe("Auth Tokens", func() {
Describe("GetAuthToken", func() {
var result string
BeforeEach(func() {
r.AddCookie(&http.Cookie{Name: "skymarshal_auth", Value: "blah"})
})
JustBeforeEach(func() {
result = middleware.GetAuthToken(r)
})
It("gets the token from the request", func() {
Expect(result).To(Equal("blah"))
})
})
Describe("SetAuthToken", func() {
JustBeforeEach(func() {
err = middleware.SetAuthToken(w, "blah", expiry)
})
It("writes the token to a cookie", func() {
cookies := w.Result().Cookies()
Expect(cookies).To(HaveLen(1))
Expect(cookies[0].Name).To(Equal("skymarshal_auth"))
Expect(cookies[0].Expires.Unix()).To(Equal(expiry.Unix()))
Expect(cookies[0].Value).To(Equal("blah"))
})
})
Describe("UnsetAuthToken", func() {
JustBeforeEach(func() {
middleware.UnsetAuthToken(w)
})
It("clears the token from the cookie", func() {
cookies := w.Result().Cookies()
Expect(cookies).To(HaveLen(1))
Expect(cookies[0].Name).To(Equal("skymarshal_auth"))
Expect(cookies[0].Value).To(Equal(""))
})
})
})
Describe("CSRF Tokens", func() {
Describe("GetCSRFToken", func() {
var result string
BeforeEach(func() {
r.AddCookie(&http.Cookie{Name: "skymarshal_csrf", Value: "blah"})
})
JustBeforeEach(func() {
result = middleware.GetCSRFToken(r)
})
It("gets the token from the request", func() {
Expect(result).To(Equal("blah"))
})
})
Describe("SetCSRFToken", func() {
JustBeforeEach(func() {
err = middleware.SetCSRFToken(w, "blah", expiry)
})
It("writes the token to a cookie", func() {
cookies := w.Result().Cookies()
Expect(cookies).To(HaveLen(1))
Expect(cookies[0].Name).To(Equal("skymarshal_csrf"))
Expect(cookies[0].Expires.Unix()).To(Equal(expiry.Unix()))
Expect(cookies[0].Value).To(Equal("blah"))
})
})
Describe("UnsetCSRFToken", func() {
JustBeforeEach(func() {
middleware.UnsetCSRFToken(w)
})
It("clears the token from the cookie", func() {
cookies := w.Result().Cookies()
Expect(cookies).To(HaveLen(1))
Expect(cookies[0].Name).To(Equal("skymarshal_csrf"))
Expect(cookies[0].Value).To(Equal(""))
})
})
})
Describe("State Tokens", func() {
Describe("GetStateToken", func() {
var result string
BeforeEach(func() {
r.AddCookie(&http.Cookie{Name: "skymarshal_state", Value: "blah"})
})
JustBeforeEach(func() {
result = middleware.GetStateToken(r)
})
It("gets the token from the request", func() {
Expect(result).To(Equal("blah"))
})
})
Describe("SetStateToken", func() {
JustBeforeEach(func() {
err = middleware.SetStateToken(w, "blah", expiry)
})
It("writes the token to a cookie", func() {
cookies := w.Result().Cookies()
Expect(cookies).To(HaveLen(1))
Expect(cookies[0].Name).To(Equal("skymarshal_state"))
Expect(cookies[0].Expires.Unix()).To(Equal(expiry.Unix()))
Expect(cookies[0].Value).To(Equal("blah"))
})
})
Describe("UnsetStateToken", func() {
JustBeforeEach(func() {
middleware.UnsetStateToken(w)
})
It("clears the token from the cookie", func() {
cookies := w.Result().Cookies()
Expect(cookies).To(HaveLen(1))
Expect(cookies[0].Name).To(Equal("skymarshal_state"))
Expect(cookies[0].Value).To(Equal(""))
})
})
})
})
| apache-2.0 |
vespa-engine/vespa | container-search/src/main/java/com/yahoo/search/dispatch/Dispatcher.java | 11288 | // Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.search.dispatch;
import com.google.inject.Inject;
import com.yahoo.component.AbstractComponent;
import com.yahoo.component.ComponentId;
import com.yahoo.compress.Compressor;
import com.yahoo.container.handler.VipStatus;
import com.yahoo.jdisc.Metric;
import com.yahoo.prelude.fastsearch.VespaBackEndSearcher;
import com.yahoo.processing.request.CompoundName;
import com.yahoo.search.Query;
import com.yahoo.search.Result;
import com.yahoo.search.cluster.ClusterMonitor;
import com.yahoo.search.dispatch.SearchPath.InvalidSearchPathException;
import com.yahoo.search.dispatch.rpc.RpcInvokerFactory;
import com.yahoo.search.dispatch.rpc.RpcPingFactory;
import com.yahoo.search.dispatch.rpc.RpcResourcePool;
import com.yahoo.search.dispatch.searchcluster.Group;
import com.yahoo.search.dispatch.searchcluster.Node;
import com.yahoo.search.dispatch.searchcluster.SearchCluster;
import com.yahoo.search.query.profile.types.FieldDescription;
import com.yahoo.search.query.profile.types.FieldType;
import com.yahoo.search.query.profile.types.QueryProfileType;
import com.yahoo.search.result.ErrorMessage;
import com.yahoo.vespa.config.search.DispatchConfig;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
/**
* A dispatcher communicates with search nodes to perform queries and fill hits.
*
* This class allocates {@link SearchInvoker} and {@link FillInvoker} objects based
* on query properties and general system status. The caller can then use the provided
* invocation object to execute the search or fill.
*
* This class is multithread safe.
*
* @author bratseth
* @author ollvir
*/
public class Dispatcher extends AbstractComponent {
public static final String DISPATCH = "dispatch";
private static final String TOP_K_PROBABILITY = "topKProbability";
private static final String INTERNAL_METRIC = "dispatch_internal";
private static final int MAX_GROUP_SELECTION_ATTEMPTS = 3;
/** If set will control computation of how many hits will be fetched from each partition.*/
public static final CompoundName topKProbability = CompoundName.fromComponents(DISPATCH, TOP_K_PROBABILITY);
/** A model of the search cluster this dispatches to */
private final SearchCluster searchCluster;
private final ClusterMonitor<Node> clusterMonitor;
private final LoadBalancer loadBalancer;
private final InvokerFactory invokerFactory;
private final Metric metric;
private final Metric.Context metricContext;
private final int maxHitsPerNode;
private static final QueryProfileType argumentType;
static {
argumentType = new QueryProfileType(DISPATCH);
argumentType.setStrict(true);
argumentType.setBuiltin(true);
argumentType.addField(new FieldDescription(TOP_K_PROBABILITY, FieldType.doubleType));
argumentType.freeze();
}
public static QueryProfileType getArgumentType() { return argumentType; }
@Inject
public Dispatcher(RpcResourcePool resourcePool,
ComponentId clusterId,
DispatchConfig dispatchConfig,
VipStatus vipStatus,
Metric metric) {
this(resourcePool, new SearchCluster(clusterId.stringValue(), dispatchConfig,
vipStatus, new RpcPingFactory(resourcePool)),
dispatchConfig, metric);
}
private Dispatcher(RpcResourcePool resourcePool, SearchCluster searchCluster, DispatchConfig dispatchConfig, Metric metric) {
this(new ClusterMonitor<>(searchCluster, true), searchCluster, dispatchConfig, new RpcInvokerFactory(resourcePool, searchCluster), metric);
}
/* Protected for simple mocking in tests. Beware that searchCluster is shutdown on in deconstruct() */
protected Dispatcher(ClusterMonitor<Node> clusterMonitor,
SearchCluster searchCluster,
DispatchConfig dispatchConfig,
InvokerFactory invokerFactory,
Metric metric) {
if (dispatchConfig.useMultilevelDispatch())
throw new IllegalArgumentException(searchCluster + " is configured with multilevel dispatch, but this is not supported");
this.searchCluster = searchCluster;
this.clusterMonitor = clusterMonitor;
this.loadBalancer = new LoadBalancer(searchCluster,
dispatchConfig.distributionPolicy() == DispatchConfig.DistributionPolicy.ROUNDROBIN);
this.invokerFactory = invokerFactory;
this.metric = metric;
this.metricContext = metric.createContext(null);
this.maxHitsPerNode = dispatchConfig.maxHitsPerNode();
searchCluster.addMonitoring(clusterMonitor);
Thread warmup = new Thread(() -> warmup(dispatchConfig.warmuptime()));
warmup.start();
try {
while ( ! searchCluster.hasInformationAboutAllNodes()) {
Thread.sleep(1);
}
warmup.join();
} catch (InterruptedException e) {}
// Now we have information from all nodes and a ping iteration has completed.
// Instead of waiting until next ping interval to update coverage and group state,
// we should compute the state ourselves, so that when the dispatcher is ready the state
// of its groups are also known.
searchCluster.pingIterationCompleted();
}
/**
* Will run important code in order to trigger JIT compilation and avoid cold start issues.
* Currently warms up lz4 compression code.
*/
private static long warmup(double seconds) {
return new Compressor().warmup(seconds);
}
/** Returns the search cluster this dispatches to */
public SearchCluster searchCluster() {
return searchCluster;
}
@Override
public void deconstruct() {
// The clustermonitor must be shutdown first as it uses the invokerfactory through the searchCluster.
clusterMonitor.shutdown();
invokerFactory.release();
}
public FillInvoker getFillInvoker(Result result, VespaBackEndSearcher searcher) {
return invokerFactory.createFillInvoker(searcher, result);
}
public SearchInvoker getSearchInvoker(Query query, VespaBackEndSearcher searcher) {
SearchInvoker invoker = getSearchPathInvoker(query, searcher).orElseGet(() -> getInternalInvoker(query, searcher));
if (query.properties().getBoolean(com.yahoo.search.query.Model.ESTIMATE)) {
query.setHits(0);
query.setOffset(0);
}
metric.add(INTERNAL_METRIC, 1, metricContext);
return invoker;
}
/** Builds an invoker based on searchpath */
private Optional<SearchInvoker> getSearchPathInvoker(Query query, VespaBackEndSearcher searcher) {
String searchPath = query.getModel().getSearchPath();
if (searchPath == null) return Optional.empty();
try {
List<Node> nodes = SearchPath.selectNodes(searchPath, searchCluster);
if (nodes.isEmpty()) return Optional.empty();
query.trace(false, 2, "Dispatching with search path ", searchPath);
return invokerFactory.createSearchInvoker(searcher,
query,
nodes,
true,
maxHitsPerNode);
} catch (InvalidSearchPathException e) {
return Optional.of(new SearchErrorInvoker(ErrorMessage.createIllegalQuery(e.getMessage())));
}
}
private SearchInvoker getInternalInvoker(Query query, VespaBackEndSearcher searcher) {
Optional<Node> directNode = searchCluster.localCorpusDispatchTarget();
if (directNode.isPresent()) {
Node node = directNode.get();
query.trace(false, 2, "Dispatching to ", node);
return invokerFactory.createSearchInvoker(searcher,
query,
List.of(node),
true,
maxHitsPerNode)
.orElseThrow(() -> new IllegalStateException("Could not dispatch directly to " + node));
}
int covered = searchCluster.groupsWithSufficientCoverage();
int groups = searchCluster.orderedGroups().size();
int max = Integer.min(Integer.min(covered + 1, groups), MAX_GROUP_SELECTION_ATTEMPTS);
Set<Integer> rejected = rejectGroupBlockingFeed(searchCluster.orderedGroups());
for (int i = 0; i < max; i++) {
Optional<Group> groupInCluster = loadBalancer.takeGroup(rejected);
if (groupInCluster.isEmpty()) break; // No groups available
Group group = groupInCluster.get();
boolean acceptIncompleteCoverage = (i == max - 1);
Optional<SearchInvoker> invoker = invokerFactory.createSearchInvoker(searcher,
query,
group.nodes(),
acceptIncompleteCoverage,
maxHitsPerNode);
if (invoker.isPresent()) {
query.trace(false, 2, "Dispatching to group ", group.id());
query.getModel().setSearchPath("/" + group.id());
invoker.get().teardown((success, time) -> loadBalancer.releaseGroup(group, success, time));
return invoker.get();
} else {
loadBalancer.releaseGroup(group, false, 0);
if (rejected == null) {
rejected = new HashSet<>();
}
rejected.add(group.id());
}
}
throw new IllegalStateException("No suitable groups to dispatch query. Rejected: " + rejected);
}
/**
* We want to avoid groups blocking feed because their data may be out of date.
* If there is a single group blocking feed, we want to reject it.
* If multiple groups are blocking feed we should use them anyway as we may not have remaining
* capacity otherwise. Same if there are no other groups.
*
* @return a modifiable set containing the single group to reject, or null otherwise
*/
private Set<Integer> rejectGroupBlockingFeed(List<Group> groups) {
if (groups.size() == 1) return null;
List<Group> groupsRejectingFeed = groups.stream().filter(Group::isBlockingWrites).collect(Collectors.toList());
if (groupsRejectingFeed.size() != 1) return null;
Set<Integer> rejected = new HashSet<>();
rejected.add(groupsRejectingFeed.get(0).id());
return rejected;
}
}
| apache-2.0 |
leowh/colla | app/oa/block/view/index.html.php | 466 | <?php
/**
* The index view file of block module of RanZhi.
*
* @copyright Copyright 2009-2015 青岛易软天创网络科技有限公司(QingDao Nature Easy Soft Network Technology Co,LTD, www.cnezsoft.com)
* @license ZPL (http://zpl.pub/page/zplv12.html)
* @author Yidong Wang <yidong@cnezsoft.com>
* @package block
* @version $Id$
* @link http://www.ranzhico.com
*/
$code = strtolower($code);
include "{$code}block.html.php";
?>
| apache-2.0 |
terrancesnyder/solr-analytics | solr/core/src/java/org/apache/solr/util/SolrPluginUtils.java | 31135 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.util;
import org.apache.lucene.document.Document;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.queryparser.classic.ParseException;
import org.apache.lucene.queryparser.classic.QueryParser;
import org.apache.lucene.search.*;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.params.CommonParams;
import org.apache.solr.common.params.SolrParams;
import org.apache.solr.common.params.UpdateParams;
import org.apache.solr.common.util.NamedList;
import org.apache.solr.common.util.SimpleOrderedMap;
import org.apache.solr.common.util.StrUtils;
import org.apache.solr.core.SolrCore;
import org.apache.solr.handler.component.HighlightComponent;
import org.apache.solr.handler.component.ResponseBuilder;
import org.apache.solr.highlight.SolrHighlighter;
import org.apache.solr.request.SolrQueryRequest;
import org.apache.solr.response.SolrQueryResponse;
import org.apache.solr.schema.IndexSchema;
import org.apache.solr.schema.SchemaField;
import org.apache.solr.search.*;
import org.apache.solr.update.DocumentBuilder;
import org.slf4j.Logger;
import java.io.IOException;
import java.util.*;
import java.util.regex.Pattern;
import java.lang.reflect.Method;
import java.lang.reflect.InvocationTargetException;
/**
* <p>Utilities that may be of use to RequestHandlers.</p>
*
* <p>
* Many of these functions have code that was stolen/mutated from
* StandardRequestHandler.
* </p>
*
* <p>:TODO: refactor StandardRequestHandler to use these utilities</p>
*
* <p>:TODO: Many "standard" functionality methods are not cognisant of
* default parameter settings.
*/
public class SolrPluginUtils {
/**
* Set default-ish params on a SolrQueryRequest.
*
* RequestHandlers can use this method to ensure their defaults and
* overrides are visible to other components such as the response writer
*
* @param req The request whose params we are interested i
* @param defaults values to be used if no values are specified in the request params
* @param appends values to be appended to those from the request (or defaults) when dealing with multi-val params, or treated as another layer of defaults for singl-val params.
* @param invariants values which will be used instead of any request, or default values, regardless of context.
*/
public static void setDefaults(SolrQueryRequest req, SolrParams defaults,
SolrParams appends, SolrParams invariants) {
SolrParams p = req.getParams();
p = SolrParams.wrapDefaults(p, defaults);
p = SolrParams.wrapAppended(p, appends);
p = SolrParams.wrapDefaults(invariants, p);
req.setParams(p);
}
/**
* SolrIndexSearch.numDocs(Query,Query) freaks out if the filtering
* query is null, so we use this workarround.
*/
public static int numDocs(SolrIndexSearcher s, Query q, Query f)
throws IOException {
return (null == f) ? s.getDocSet(q).size() : s.numDocs(q,f);
}
private final static Pattern splitList=Pattern.compile(",| ");
/** Split a value that may contain a comma, space of bar separated list. */
public static String[] split(String value){
return splitList.split(value.trim(), 0);
}
/**
* Pre-fetch documents into the index searcher's document cache.
*
* This is an entirely optional step which you might want to perform for
* the following reasons:
*
* <ul>
* <li>Locates the document-retrieval costs in one spot, which helps
* detailed performance measurement</li>
*
* <li>Determines a priori what fields will be needed to be fetched by
* various subtasks, like response writing and highlighting. This
* minimizes the chance that many needed fields will be loaded lazily.
* (it is more efficient to load all the field we require normally).</li>
* </ul>
*
* If lazy field loading is disabled, this method does nothing.
*/
public static void optimizePreFetchDocs(ResponseBuilder rb,
DocList docs,
Query query,
SolrQueryRequest req,
SolrQueryResponse res) throws IOException {
SolrIndexSearcher searcher = req.getSearcher();
if(!searcher.enableLazyFieldLoading) {
// nothing to do
return;
}
ReturnFields returnFields = res.getReturnFields();
if(returnFields.getLuceneFieldNames() != null) {
Set<String> fieldFilter = returnFields.getLuceneFieldNames();
if (rb.doHighlights) {
// copy return fields list
fieldFilter = new HashSet<String>(fieldFilter);
// add highlight fields
SolrHighlighter highlighter = HighlightComponent.getHighlighter(req.getCore());
for (String field: highlighter.getHighlightFields(query, req, null))
fieldFilter.add(field);
// fetch unique key if one exists.
SchemaField keyField = req.getSearcher().getSchema().getUniqueKeyField();
if(null != keyField)
fieldFilter.add(keyField.getName());
}
// get documents
DocIterator iter = docs.iterator();
for (int i=0; i<docs.size(); i++) {
searcher.doc(iter.nextDoc(), fieldFilter);
}
}
}
public static Set<String> getDebugInterests(String[] params, ResponseBuilder rb){
Set<String> debugInterests = new HashSet<String>();
if (params != null) {
for (int i = 0; i < params.length; i++) {
if (params[i].equalsIgnoreCase("all") || params[i].equalsIgnoreCase("true")){
rb.setDebug(true);
break;
//still might add others
} else if (params[i].equals(CommonParams.TIMING)){
rb.setDebugTimings(true);
} else if (params[i].equals(CommonParams.QUERY)){
rb.setDebugQuery(true);
} else if (params[i].equals(CommonParams.RESULTS)){
rb.setDebugResults(true);
}
}
}
return debugInterests;
}
/**
* <p>
* Returns a NamedList containing many "standard" pieces of debugging
* information.
* </p>
*
* <ul>
* <li>rawquerystring - the 'q' param exactly as specified by the client
* </li>
* <li>querystring - the 'q' param after any preprocessing done by the plugin
* </li>
* <li>parsedquery - the main query executed formated by the Solr
* QueryParsing utils class (which knows about field types)
* </li>
* <li>parsedquery_toString - the main query executed formated by it's
* own toString method (in case it has internal state Solr
* doesn't know about)
* </li>
* <li>explain - the list of score explanations for each document in
* results against query.
* </li>
* <li>otherQuery - the query string specified in 'explainOther' query param.
* </li>
* <li>explainOther - the list of score explanations for each document in
* results against 'otherQuery'
* </li>
* </ul>
*
* @param req the request we are dealing with
* @param userQuery the users query as a string, after any basic
* preprocessing has been done
* @param query the query built from the userQuery
* (and perhaps other clauses) that identifies the main
* result set of the response.
* @param results the main result set of the response
* @return The debug info
* @throws java.io.IOException if there was an IO error
*/
public static NamedList doStandardDebug(
SolrQueryRequest req,
String userQuery,
Query query,
DocList results,
boolean dbgQuery,
boolean dbgResults)
throws IOException
{
NamedList dbg = new SimpleOrderedMap();
doStandardQueryDebug(req, userQuery, query, dbgQuery, dbg);
doStandardResultsDebug(req, query, results, dbgResults, dbg);
return dbg;
}
public static void doStandardQueryDebug(
SolrQueryRequest req,
String userQuery,
Query query,
boolean dbgQuery,
NamedList dbg)
{
if (dbgQuery) {
/* userQuery may have been pre-processed .. expose that */
dbg.add("rawquerystring", req.getParams().get(CommonParams.Q));
dbg.add("querystring", userQuery);
/* QueryParsing.toString isn't perfect, use it to see converted
* values, use regular toString to see any attributes of the
* underlying Query it may have missed.
*/
dbg.add("parsedquery", QueryParsing.toString(query, req.getSchema()));
dbg.add("parsedquery_toString", query.toString());
}
}
public static void doStandardResultsDebug(
SolrQueryRequest req,
Query query,
DocList results,
boolean dbgResults,
NamedList dbg) throws IOException
{
if (dbgResults) {
SolrIndexSearcher searcher = req.getSearcher();
IndexSchema schema = req.getSchema();
boolean explainStruct = req.getParams().getBool(CommonParams.EXPLAIN_STRUCT, false);
NamedList<Explanation> explain = getExplanations(query, results, searcher, schema);
dbg.add("explain", explainStruct
? explanationsToNamedLists(explain)
: explanationsToStrings(explain));
String otherQueryS = req.getParams().get(CommonParams.EXPLAIN_OTHER);
if (otherQueryS != null && otherQueryS.length() > 0) {
DocList otherResults = doSimpleQuery(otherQueryS, req, 0, 10);
dbg.add("otherQuery", otherQueryS);
NamedList<Explanation> explainO = getExplanations(query, otherResults, searcher, schema);
dbg.add("explainOther", explainStruct
? explanationsToNamedLists(explainO)
: explanationsToStrings(explainO));
}
}
}
public static NamedList<Object> explanationToNamedList(Explanation e) {
NamedList<Object> out = new SimpleOrderedMap<Object>();
out.add("match", e.isMatch());
out.add("value", e.getValue());
out.add("description", e.getDescription());
Explanation[] details = e.getDetails();
// short circut out
if (null == details || 0 == details.length) return out;
List<NamedList<Object>> kids
= new ArrayList<NamedList<Object>>(details.length);
for (Explanation d : details) {
kids.add(explanationToNamedList(d));
}
out.add("details", kids);
return out;
}
public static NamedList<NamedList<Object>> explanationsToNamedLists
(NamedList<Explanation> explanations) {
NamedList<NamedList<Object>> out
= new SimpleOrderedMap<NamedList<Object>>();
for (Map.Entry<String,Explanation> entry : explanations) {
out.add(entry.getKey(), explanationToNamedList(entry.getValue()));
}
return out;
}
/**
* Generates an NamedList of Explanations for each item in a list of docs.
*
* @param query The Query you want explanations in the context of
* @param docs The Documents you want explained relative that query
*/
public static NamedList<Explanation> getExplanations
(Query query,
DocList docs,
SolrIndexSearcher searcher,
IndexSchema schema) throws IOException {
NamedList<Explanation> explainList = new SimpleOrderedMap<Explanation>();
DocIterator iterator = docs.iterator();
for (int i=0; i<docs.size(); i++) {
int id = iterator.nextDoc();
Document doc = searcher.doc(id);
String strid = schema.printableUniqueKey(doc);
explainList.add(strid, searcher.explain(query, id) );
}
return explainList;
}
private static NamedList<String> explanationsToStrings
(NamedList<Explanation> explanations) {
NamedList<String> out = new SimpleOrderedMap<String>();
for (Map.Entry<String,Explanation> entry : explanations) {
out.add(entry.getKey(), "\n"+entry.getValue().toString());
}
return out;
}
/**
* Executes a basic query
*/
public static DocList doSimpleQuery(String sreq,
SolrQueryRequest req,
int start, int limit) throws IOException {
List<String> commands = StrUtils.splitSmart(sreq,';');
String qs = commands.size() >= 1 ? commands.get(0) : "";
try {
Query query = QParser.getParser(qs, null, req).getQuery();
// If the first non-query, non-filter command is a simple sort on an indexed field, then
// we can use the Lucene sort ability.
Sort sort = null;
if (commands.size() >= 2) {
sort = QueryParsing.parseSort(commands.get(1), req);
}
DocList results = req.getSearcher().getDocList(query,(DocSet)null, sort, start, limit);
return results;
} catch (ParseException e) {
throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing query: " + qs);
}
}
private static final Pattern whitespacePattern = Pattern.compile("\\s+");
private static final Pattern caratPattern = Pattern.compile("\\^");
private static final Pattern tildePattern = Pattern.compile("[~]");
/**
* Given a string containing fieldNames and boost info,
* converts it to a Map from field name to boost info.
*
* <p>
* Doesn't care if boost info is negative, you're on your own.
* </p>
* <p>
* Doesn't care if boost info is missing, again: you're on your own.
* </p>
*
* @param in a String like "fieldOne^2.3 fieldTwo fieldThree^-0.4"
* @return Map of fieldOne => 2.3, fieldTwo => null, fieldThree => -0.4
*/
public static Map<String,Float> parseFieldBoosts(String in) {
return parseFieldBoosts(new String[]{in});
}
/**
* Like <code>parseFieldBoosts(String)</code>, but parses all the strings
* in the provided array (which may be null).
*
* @param fieldLists an array of Strings eg. <code>{"fieldOne^2.3", "fieldTwo", fieldThree^-0.4}</code>
* @return Map of fieldOne => 2.3, fieldTwo => null, fieldThree => -0.4
*/
public static Map<String,Float> parseFieldBoosts(String[] fieldLists) {
if (null == fieldLists || 0 == fieldLists.length) {
return new HashMap<String,Float>();
}
Map<String, Float> out = new HashMap<String,Float>(7);
for (String in : fieldLists) {
if (null == in) {
continue;
}
in = in.trim();
if(in.length()==0) {
continue;
}
String[] bb = whitespacePattern.split(in);
for (String s : bb) {
String[] bbb = caratPattern.split(s);
out.put(bbb[0], 1 == bbb.length ? null : Float.valueOf(bbb[1]));
}
}
return out;
}
/**
/**
* Like {@link #parseFieldBoosts}, but allows for an optional slop value prefixed by "~".
*
* @param fieldLists - an array of Strings eg. <code>{"fieldOne^2.3", "fieldTwo", fieldThree~5^-0.4}</code>
* @param wordGrams - (0=all words, 2,3 = shingle size)
* @param defaultSlop - the default slop for this param
* @return - FieldParams containing the fieldname,boost,slop,and shingle size
*/
public static List<FieldParams> parseFieldBoostsAndSlop(String[] fieldLists,int wordGrams,int defaultSlop) {
if (null == fieldLists || 0 == fieldLists.length) {
return new ArrayList<FieldParams>();
}
List<FieldParams> out = new ArrayList<FieldParams>();
for (String in : fieldLists) {
if (null == in) {
continue;
}
in = in.trim();
if(in.length()==0) {
continue;
}
String[] fieldConfigs = whitespacePattern.split(in);
for (String s : fieldConfigs) {
String[] fieldAndSlopVsBoost = caratPattern.split(s);
String[] fieldVsSlop = tildePattern.split(fieldAndSlopVsBoost[0]);
String field = fieldVsSlop[0];
int slop = (2 == fieldVsSlop.length) ? Integer.valueOf(fieldVsSlop[1]) : defaultSlop;
Float boost = (1 == fieldAndSlopVsBoost.length) ? 1 : Float.valueOf(fieldAndSlopVsBoost[1]);
FieldParams fp = new FieldParams(field,wordGrams,slop,boost);
out.add(fp);
}
}
return out;
}
/**
* Checks the number of optional clauses in the query, and compares it
* with the specification string to determine the proper value to use.
*
* <p>
* Details about the specification format can be found
* <a href="doc-files/min-should-match.html">here</a>
* </p>
*
* <p>A few important notes...</p>
* <ul>
* <li>
* If the calculations based on the specification determine that no
* optional clauses are needed, BooleanQuerysetMinMumberShouldMatch
* will never be called, but the usual rules about BooleanQueries
* still apply at search time (a BooleanQuery containing no required
* clauses must still match at least one optional clause)
* <li>
* <li>
* No matter what number the calculation arrives at,
* BooleanQuery.setMinShouldMatch() will never be called with a
* value greater then the number of optional clauses (or less then 1)
* </li>
* </ul>
*
* <p>:TODO: should optimize the case where number is same
* as clauses to just make them all "required"
* </p>
*/
public static void setMinShouldMatch(BooleanQuery q, String spec) {
int optionalClauses = 0;
for (BooleanClause c : q.clauses()) {
if (c.getOccur() == Occur.SHOULD) {
optionalClauses++;
}
}
int msm = calculateMinShouldMatch(optionalClauses, spec);
if (0 < msm) {
q.setMinimumNumberShouldMatch(msm);
}
}
// private static Pattern spaceAroundLessThanPattern = Pattern.compile("\\s*<\\s*");
private static Pattern spaceAroundLessThanPattern = Pattern.compile("(\\s+<\\s*)|(\\s*<\\s+)");
private static Pattern spacePattern = Pattern.compile(" ");
private static Pattern lessThanPattern = Pattern.compile("<");
/**
* helper exposed for UnitTests
* @see #setMinShouldMatch
*/
static int calculateMinShouldMatch(int optionalClauseCount, String spec) {
int result = optionalClauseCount;
spec = spec.trim();
if (-1 < spec.indexOf("<")) {
/* we have conditional spec(s) */
spec = spaceAroundLessThanPattern.matcher(spec).replaceAll("<");
for (String s : spacePattern.split(spec)) {
String[] parts = lessThanPattern.split(s,0);
int upperBound = Integer.parseInt(parts[0]);
if (optionalClauseCount <= upperBound) {
return result;
} else {
result = calculateMinShouldMatch
(optionalClauseCount, parts[1]);
}
}
return result;
}
/* otherwise, simple expresion */
if (-1 < spec.indexOf('%')) {
/* percentage - assume the % was the last char. If not, let Integer.parseInt fail. */
spec = spec.substring(0,spec.length()-1);
int percent = Integer.parseInt(spec);
float calc = (result * percent) * (1/100f);
result = calc < 0 ? result + (int)calc : (int)calc;
} else {
int calc = Integer.parseInt(spec);
result = calc < 0 ? result + calc : calc;
}
return (optionalClauseCount < result ?
optionalClauseCount : (result < 0 ? 0 : result));
}
/**
* Recursively walks the "from" query pulling out sub-queries and
* adding them to the "to" query.
*
* <p>
* Boosts are multiplied as needed. Sub-BooleanQueryies which are not
* optional will not be flattened. From will be mangled durring the walk,
* so do not attempt to reuse it.
* </p>
*/
public static void flattenBooleanQuery(BooleanQuery to, BooleanQuery from) {
for (BooleanClause clause : from.clauses()) {
Query cq = clause.getQuery();
cq.setBoost(cq.getBoost() * from.getBoost());
if (cq instanceof BooleanQuery
&& !clause.isRequired()
&& !clause.isProhibited()) {
/* we can recurse */
flattenBooleanQuery(to, (BooleanQuery)cq);
} else {
to.add(clause);
}
}
}
/**
* Escapes all special characters except '"', '-', and '+'
*
* @see QueryParser#escape
*/
public static CharSequence partialEscape(CharSequence s) {
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
if (c == '\\' || c == '!' || c == '(' || c == ')' ||
c == ':' || c == '^' || c == '[' || c == ']' || c == '/' ||
c == '{' || c == '}' || c == '~' || c == '*' || c == '?'
) {
sb.append('\\');
}
sb.append(c);
}
return sb;
}
// Pattern to detect dangling operator(s) at end of query
// \s+[-+\s]+$
private final static Pattern DANGLING_OP_PATTERN = Pattern.compile( "\\s+[-+\\s]+$" );
// Pattern to detect consecutive + and/or - operators
// \s+[+-](?:\s*[+-]+)+
private final static Pattern CONSECUTIVE_OP_PATTERN = Pattern.compile( "\\s+[+-](?:\\s*[+-]+)+" );
/**
* Strips operators that are used illegally, otherwise reuturns it's
* input. Some examples of illegal user queries are: "chocolate +-
* chip", "chocolate - - chip", and "chocolate chip -".
*/
public static CharSequence stripIllegalOperators(CharSequence s) {
String temp = CONSECUTIVE_OP_PATTERN.matcher( s ).replaceAll( " " );
return DANGLING_OP_PATTERN.matcher( temp ).replaceAll( "" );
}
/**
* Returns it's input if there is an even (ie: balanced) number of
* '"' characters -- otherwise returns a String in which all '"'
* characters are striped out.
*/
public static CharSequence stripUnbalancedQuotes(CharSequence s) {
int count = 0;
for (int i = 0; i < s.length(); i++) {
if (s.charAt(i) == '\"') { count++; }
}
if (0 == (count & 1)) {
return s;
}
return s.toString().replace("\"","");
}
public static NamedList removeNulls(NamedList nl) {
for (int i=0; i<nl.size(); i++) {
if (nl.getName(i)==null) {
NamedList newList = nl instanceof SimpleOrderedMap ? new SimpleOrderedMap() : new NamedList();
for (int j=0; j<nl.size(); j++) {
String n = nl.getName(j);
if (n != null) {
newList.add(n, nl.getVal(j));
}
}
return newList;
}
}
return nl;
}
/**
* A subclass of SolrQueryParser that supports aliasing fields for
* constructing DisjunctionMaxQueries.
*/
public static class DisjunctionMaxQueryParser extends SolrQueryParser {
/** A simple container for storing alias info
* @see #aliases
*/
protected static class Alias {
public float tie;
public Map<String,Float> fields;
}
/**
* Where we store a map from field name we expect to see in our query
* string, to Alias object containing the fields to use in our
* DisjunctionMaxQuery and the tiebreaker to use.
*/
protected Map<String,Alias> aliases = new HashMap<String,Alias>(3);
public DisjunctionMaxQueryParser(QParser qp, String defaultField) {
super(qp,defaultField);
// don't trust that our parent class won't ever change it's default
setDefaultOperator(QueryParser.Operator.OR);
}
/**
* Add an alias to this query parser.
*
* @param field the field name that should trigger alias mapping
* @param fieldBoosts the mapping from fieldname to boost value that
* should be used to build up the clauses of the
* DisjunctionMaxQuery.
* @param tiebreaker to the tiebreaker to be used in the
* DisjunctionMaxQuery
* @see SolrPluginUtils#parseFieldBoosts
*/
public void addAlias(String field, float tiebreaker,
Map<String,Float> fieldBoosts) {
Alias a = new Alias();
a.tie = tiebreaker;
a.fields = fieldBoosts;
aliases.put(field, a);
}
/**
* Delegates to the super class unless the field has been specified
* as an alias -- in which case we recurse on each of
* the aliased fields, and the results are composed into a
* DisjunctionMaxQuery. (so yes: aliases which point at other
* aliases should work)
*/
@Override
protected Query getFieldQuery(String field, String queryText, boolean quoted)
throws ParseException {
if (aliases.containsKey(field)) {
Alias a = aliases.get(field);
DisjunctionMaxQuery q = new DisjunctionMaxQuery(a.tie);
/* we might not get any valid queries from delegation,
* in which case we should return null
*/
boolean ok = false;
for (String f : a.fields.keySet()) {
Query sub = getFieldQuery(f,queryText,quoted);
if (null != sub) {
if (null != a.fields.get(f)) {
sub.setBoost(a.fields.get(f));
}
q.add(sub);
ok = true;
}
}
return ok ? q : null;
} else {
try {
return super.getFieldQuery(field, queryText, quoted);
} catch (Exception e) {
return null;
}
}
}
}
/**
* Determines the correct Sort based on the request parameter "sort"
*
* @return null if no sort is specified.
*/
public static Sort getSort(SolrQueryRequest req) {
String sort = req.getParams().get(CommonParams.SORT);
if (null == sort || sort.equals("")) {
return null;
}
SolrException sortE = null;
Sort ss = null;
try {
ss = QueryParsing.parseSort(sort, req);
} catch (SolrException e) {
sortE = e;
}
if ((null == ss) || (null != sortE)) {
/* we definitely had some sort of sort string from the user,
* but no SortSpec came out of it
*/
SolrCore.log.warn("Invalid sort \""+sort+"\" was specified, ignoring", sortE);
return null;
}
return ss;
}
/** Turns an array of query strings into a List of Query objects.
*
* @return null if no queries are generated
*/
public static List<Query> parseQueryStrings(SolrQueryRequest req,
String[] queries) throws ParseException {
if (null == queries || 0 == queries.length) return null;
List<Query> out = new ArrayList<Query>(queries.length);
for (String q : queries) {
if (null != q && 0 != q.trim().length()) {
out.add(QParser.getParser(q, null, req).getQuery());
}
}
return out;
}
/**
* A CacheRegenerator that can be used whenever the items in the cache
* are not dependant on the current searcher.
*
* <p>
* Flat out copies the oldKey=>oldVal pair into the newCache
* </p>
*/
public static class IdentityRegenerator implements CacheRegenerator {
public boolean regenerateItem(SolrIndexSearcher newSearcher,
SolrCache newCache,
SolrCache oldCache,
Object oldKey,
Object oldVal)
throws IOException {
newCache.put(oldKey,oldVal);
return true;
}
}
/**
* Convert a DocList to a SolrDocumentList
*
* The optional param "ids" is populated with the lucene document id
* for each SolrDocument.
*
* @param docs The {@link org.apache.solr.search.DocList} to convert
* @param searcher The {@link org.apache.solr.search.SolrIndexSearcher} to use to load the docs from the Lucene index
* @param fields The names of the Fields to load
* @param ids A map to store the ids of the docs
* @return The new {@link org.apache.solr.common.SolrDocumentList} containing all the loaded docs
* @throws java.io.IOException if there was a problem loading the docs
* @since solr 1.4
*/
public static SolrDocumentList docListToSolrDocumentList(
DocList docs,
SolrIndexSearcher searcher,
Set<String> fields,
Map<SolrDocument, Integer> ids ) throws IOException
{
IndexSchema schema = searcher.getSchema();
SolrDocumentList list = new SolrDocumentList();
list.setNumFound(docs.matches());
list.setMaxScore(docs.maxScore());
list.setStart(docs.offset());
DocIterator dit = docs.iterator();
while (dit.hasNext()) {
int docid = dit.nextDoc();
Document luceneDoc = searcher.doc(docid, fields);
SolrDocument doc = new SolrDocument();
for( IndexableField field : luceneDoc) {
if (null == fields || fields.contains(field.name())) {
SchemaField sf = schema.getField( field.name() );
doc.addField( field.name(), sf.getType().toObject( field ) );
}
}
if (docs.hasScores() && (null == fields || fields.contains("score"))) {
doc.addField("score", dit.score());
}
list.add( doc );
if( ids != null ) {
ids.put( doc, new Integer(docid) );
}
}
return list;
}
public static void invokeSetters(Object bean, NamedList initArgs) {
if (initArgs == null) return;
Class clazz = bean.getClass();
Method[] methods = clazz.getMethods();
Iterator<Map.Entry<String, Object>> iterator = initArgs.iterator();
while (iterator.hasNext()) {
Map.Entry<String, Object> entry = iterator.next();
String key = entry.getKey();
String setterName = "set" + String.valueOf(Character.toUpperCase(key.charAt(0))) + key.substring(1);
Method method = null;
try {
for (Method m : methods) {
if (m.getName().equals(setterName) && m.getParameterTypes().length == 1) {
method = m;
break;
}
}
if (method == null) {
throw new RuntimeException("no setter corrresponding to '" + key + "' in " + clazz.getName());
}
Class pClazz = method.getParameterTypes()[0];
Object val = entry.getValue();
method.invoke(bean, val);
} catch (InvocationTargetException e1) {
throw new RuntimeException("Error invoking setter " + setterName + " on class : " + clazz.getName(), e1);
} catch (IllegalAccessException e1) {
throw new RuntimeException("Error invoking setter " + setterName + " on class : " + clazz.getName(), e1);
}
}
}
}
| apache-2.0 |
iSergio/gwt-cs | cesiumjs4gwt-main/src/main/java/org/cesiumjs/cs/scene/TileDiscardPolicy.java | 1735 | /*
* Copyright 2018 iserge.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.cesiumjs.cs.scene;
import com.google.gwt.user.client.ui.Image;
import jsinterop.annotations.JsConstructor;
import jsinterop.annotations.JsMethod;
import jsinterop.annotations.JsType;
/**
* @author Serge Silaev aka iSergio
*/
@JsType(isNative = true, namespace = "Cesium", name = "TileDiscardPolicy")
public class TileDiscardPolicy {
/**
* A policy for discarding tile images according to some criteria. This type
* describes an interface and is not intended to be instantiated directly.
*/
@JsConstructor
public TileDiscardPolicy() {
}
/**
* Determines if the discard policy is ready to process images.
*
* @return True if the discard policy is ready to process images; otherwise,
* false.
*/
@JsMethod
public native boolean isReady();
/**
* Given a tile imagery, decide whether to discard that imagery.
*
* @param image An imagery to test.
* @return True if the imagery should be discarded; otherwise, false.
*/
@SuppressWarnings("unusable-by-js")
@JsMethod
public native boolean shouldDiscardImage(Image image);
}
| apache-2.0 |
WeTheInternet/collide | client/src/main/java/collide/demo/shared/SharedClass.java | 1206 | package collide.demo.shared;
import xapi.annotation.compile.MagicMethod;
import xapi.log.X_Log;
import xapi.util.X_Runtime;
import com.google.gwt.reflect.client.strategy.ReflectionStrategy;
import java.io.File;
/**
* This is a test class for our reflection api;
* it is annotated for gwt to pull out reflection data,
* plus it includes different methods for
*
* @author "James X. Nelson (james@wetheinter.net)"
*
*/
@ReflectionStrategy(keepEverything=true, keepCodeSource=true, annotationRetention=ReflectionStrategy.ALL_ANNOTATIONS, debug=ReflectionStrategy.ALL_ANNOTATIONS)
public class SharedClass {
public int sharedInt = 10;
public String sharedString = "stuff";
public void doStuff() throws Exception {
if (X_Runtime.isJava())
doJavaStuff();
else
doJavascriptStuff();
}
private native void doJavascriptStuff()
/*-{
$doc.body.contentEditable=true;
}-*/;
@MagicMethod(doNotVisit = true)
private void doJavaStuff() throws Exception{
X_Log.info("Running in "+
Class.forName("java.io.File").getMethod("getCanonicalPath").invoke(
Class.forName("java.io.File").getConstructor(String.class).newInstance(".")
)
);
}
}
| apache-2.0 |
jshvarts/OfflineSampleApp | app/src/main/java/com/example/offline/domain/SyncCommentUseCase.java | 553 | package com.example.offline.domain;
import com.example.offline.model.Comment;
import io.reactivex.Completable;
/**
* Responsible for syncing a comment with remote repository.
*/
public class SyncCommentUseCase {
private final RemoteCommentRepository remoteCommentRepository;
public SyncCommentUseCase(RemoteCommentRepository remoteCommentRepository) {
this.remoteCommentRepository = remoteCommentRepository;
}
public Completable syncComment(Comment comment) {
return remoteCommentRepository.sync(comment);
}
}
| apache-2.0 |
rameshthoomu/fabric | examples/chaincode/java/RangeExample/src/main/java/example/RangeExample.java | 2528 | /*
Copyright DTCC 2016 All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package example;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hyperledger.fabric.shim.ChaincodeBase;
import org.hyperledger.fabric.shim.ChaincodeStub;
import java.util.Map;
/**
* Created by cadmin on 6/30/16.
*/
public class RangeExample extends ChaincodeBase {
private static Log log = LogFactory.getLog(RangeExample.class);
@java.lang.Override
public String run(ChaincodeStub stub, String function, String[] args) {
log.info("In run, function:"+function);
switch (function) {
case "put":
for (int i = 0; i < args.length; i += 2)
stub.putState(args[i], args[i + 1]);
break;
case "del":
for (String arg : args)
stub.delState(arg);
break;
default:
log.error("No matching case for function:"+function);
}
return null;
}
@java.lang.Override
public String query(ChaincodeStub stub, String function, String[] args) {
log.info("query");
switch (function){
case "get": {
return stub.getState(args[0]);
}
case "keys":{
Map<String, String> keysIter = null;
if (args.length >= 2) {
keysIter = stub.getStateByRange(args[0], args[1]);
}else{
keysIter = stub.getStateByRange("","");
}
return keysIter.keySet().toString();
}
default:
log.error("No matching case for function:"+function);
return "";
}
}
@java.lang.Override
public String getChaincodeID() {
return "RangeExample";
}
public static void main(String[] args) throws Exception {
log.info("starting");
new RangeExample().start(args);
}
}
| apache-2.0 |
MobileManAG/KURAVIS | src/main/java/com/mobileman/kuravis/core/domain/treatment_review/statistics/TreatmentCostStatistics.java | 4535 | /*******************************************************************************
* Copyright 2015 MobileMan GmbH
* www.mobileman.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
/**
* TreatmentCostStat.java
*
* Project: Kuravis
*
* @author MobileMan GmbH
* @date 29.3.2014
* @version 1.0
*
* (c) 2013 MobileMan GmbH, www.mobileman.com
*/
package com.mobileman.kuravis.core.domain.treatment_review.statistics;
import java.math.BigDecimal;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.springframework.data.mongodb.core.mapping.Document;
import com.mobileman.kuravis.core.domain.Entity;
import com.mobileman.kuravis.core.domain.Pair;
/**
* @author MobileMan GmbH
*
*/
@Document(collection=TreatmentCostStatistics.ENTITY_NAME)
public class TreatmentCostStatistics extends Entity implements TreatmentCostStatisticsAttributes {
/**
*
*/
public static final int CATEGORY_UNDEFINED = -1;
private int category;
private int count;
private String diseaseId;
private String treatmentId;
private String summaryId;
private static final List<Pair<BigDecimal, BigDecimal>> CATEGORIES_BOUNDS;
private static final Set<Integer> CATEGORIES_SET;
static {
CATEGORIES_BOUNDS = Arrays.asList(
Pair.create(BigDecimal.ZERO, new BigDecimal(25.d)),
Pair.create(new BigDecimal(25.d), new BigDecimal(51.d)),
Pair.create(new BigDecimal(51.d), new BigDecimal(101.d)),
Pair.create(new BigDecimal(101.d), new BigDecimal(201.d)),
Pair.create(new BigDecimal(201.d), new BigDecimal(201.d))
);
Set<Integer> categoriesSet = new HashSet<>();
for (int i = 0; i < CATEGORIES_BOUNDS.size(); i++) {
categoriesSet.add(Integer.valueOf(i));
}
CATEGORIES_SET = Collections.unmodifiableSet(categoriesSet);
}
/**
* @return CATEGORIES_BOUNDS
*/
public static List<Pair<BigDecimal, BigDecimal>> getCategoriesBounds() {
return CATEGORIES_BOUNDS;
}
/**
* @return CATEGORIES_SET
*/
public static Set<Integer> getCategoriesSet() {
return CATEGORIES_SET;
}
/**
*
*/
public TreatmentCostStatistics() {
super();
}
/**
* @param group
* @param count
*/
public TreatmentCostStatistics(int group, int count) {
super();
this.category = group;
this.count = count;
}
/**
*
* @return group
*/
public int getCategory() {
return this.category;
}
/**
*
* @param group group
*/
public void setCategory(int group) {
this.category = group;
}
/**
*
* @return count
*/
public int getCount() {
return this.count;
}
/**
*
* @param count count
*/
public void setCount(int count) {
this.count = count;
}
/**
*
* @return summaryId
*/
public String getSummaryId() {
return this.summaryId;
}
/**
*
* @param summaryId summaryId
*/
public void setSummaryId(String summaryId) {
this.summaryId = summaryId;
}
/**
* @param cost
* @return cost category ID based on input cost
*/
public static int computeCategoryId(BigDecimal cost) {
if (cost == null) {
return TreatmentCostStatistics.CATEGORY_UNDEFINED;
}
for (byte i = 0; i < getCategoriesBounds().size(); i++) {
Pair<BigDecimal, BigDecimal> bound = getCategoriesBounds().get(i);
if (cost.compareTo(bound.getFirst()) == -1) {
return i;
}
if (cost.compareTo(bound.getSecond()) == -1) {
return i;
}
}
return getCategoriesBounds().size() - 1;
}
/**
*
* @return diseaseId
*/
public String getDiseaseId() {
return this.diseaseId;
}
/**
*
* @param diseaseId diseaseId
*/
public void setDiseaseId(String diseaseId) {
this.diseaseId = diseaseId;
}
/**
*
* @return treatmentId
*/
public String getTreatmentId() {
return this.treatmentId;
}
/**
*
* @param treatmentId treatmentId
*/
public void setTreatmentId(String treatmentId) {
this.treatmentId = treatmentId;
}
}
| apache-2.0 |
ye-kyaw-thu/sylbreak | python/sylbreak3.py | 2649 | #!/usr/bin/env python3
#-*- coding:utf-8 -*-
import argparse
import re
## syllable breaking tool for Myanmar language
## usage: sylbreak3.py -i input-file
## e.g. usage1: python ./sylbreak3.py -i ../data/my-input.txt
## usage2: ./sylbreak3.py -i ../data/my-input.txt -o out.txt -s " "
##
## Date: 21 July 2016
## Written by Ye Kyaw Thu, Visiting Researcher, Waseda University
## HP:https://sites.google.com/site/yekyawthunlp/
##
## last updated: 29 Sep 2021
## Add support for python3 by sengkyaut
##
## Reference of Myanmar Unicode: http://unicode.org/charts/PDF/U1000.pdf
parser = argparse.ArgumentParser(description='Syllable segmentation for Myanmar language')
parser.add_argument('-i', '--input', type=str, help='input file', required=True)
parser.add_argument('-o', '--output', type=str, default='sylbreak_out.txt', help='output file')
parser.add_argument('-s', '--separator', type=str, default=r'|', help='the separator option for syllable (e.g. -s "/"), default is "|"')
parser.add_argument('-p', '--print', type=bool, default=0, help='printing both input and syllable segmented sentences')
args = parser.parse_args()
inputFile = getattr(args, 'input')
outFile = getattr(args, 'output')
sOption = getattr(args, 'separator')
pOption = getattr(args, 'print')
myConsonant = r"က-အ"
enChar = r"a-zA-Z0-9"
otherChar = r"ဣဤဥဦဧဩဪဿ၌၍၏၀-၉၊။!-/:-@[-`{-~\s"
ssSymbol = r'္'
aThat = r'်'
#Regular expression pattern for Myanmar syllable breaking
#*** a consonant not after a subscript symbol AND a consonant is not followed by a-That character or a subscript symbol
BreakPattern = re.compile(r"((?<!" + ssSymbol + r")["+ myConsonant + r"](?![" + aThat + ssSymbol + r"])" + r"|[" + enChar + otherChar + r"])")
data = ""
# Try read file and syllable
try:
with open(inputFile, encoding='utf-8') as file:
for line in file:
# print before
if pOption:
print("input:\t" + line)
# remove space
line = line.replace(" ",'')
# start breaking
line = BreakPattern.sub(sOption + r"\1", line)
data += line
# print after
if pOption:
print("syl breaked:\t" + line)
except:
exit('Input file cannot be opened!')
# Writing Data to Output File
if outFile:
try:
with open(outFile, 'w', encoding='utf-8') as file:
file.write(data)
print(f"Sylbreak succcessfully done. Write data to {outFile}")
except:
exit('Output file cannot be opened!')
else:
exit('Output file not provided!') | apache-2.0 |
rlugojr/incubator-trafodion | core/sql/exp/exp_comp.cpp | 38033 | /**********************************************************************
// @@@ START COPYRIGHT @@@
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
// @@@ END COPYRIGHT @@@
**********************************************************************/
/* -*-C++-*-
*****************************************************************************
*
* File: <file>
* Description:
*
*
* Created: 7/10/95
* Language: C++
*
*
*
*
*****************************************************************************
*/
#include "Platform.h"
#include "exp_stdh.h"
#include "exp_clause_derived.h"
#include "exp_datetime.h"
#include "unicode_char_set.h"
#include "wstr.h"
#include "ex_globals.h"
ex_expr::exp_return_type ex_comp_clause::processNulls(char *op_data[],
CollHeap *heap,
ComDiagsArea **diagsArea)
{
if (isSpecialNulls())
{
// special nulls. Nulls are values.
// Null = Null, non-null-value < NULL, etc.
short left_is_null = 0;
short right_is_null = 0;
if (getOperand(1)->getNullFlag() && (!op_data[1]))
left_is_null = -1;
if (getOperand(2)->getNullFlag() && (!op_data[2]))
right_is_null = -1;
Lng32 result = 0;
if ((left_is_null) || (right_is_null))
{
switch (getOperType())
{
case ITM_EQUAL:
result = (left_is_null && right_is_null ? -1 : 0);
break;
case ITM_NOT_EQUAL:
result = (left_is_null && right_is_null ? 0 : -1);
break;
case ITM_GREATER:
result = (right_is_null ? 0 : -1);
break;
case ITM_LESS:
result = (left_is_null ? 0 : -1);
break;
case ITM_GREATER_EQ:
result = (left_is_null ? -1 : 0);
break;
case ITM_LESS_EQ:
result = (right_is_null ? -1 : 0);
break;
}
if (result)
{
// the actual result of this operation is pointed to
// by op_data[2 * MAX_OPERANDS].
*(Lng32 *)op_data[2 * MAX_OPERANDS] = 1; // result is TRUE
}
else
{
*(Lng32 *)op_data[2 * MAX_OPERANDS] = 0; // result is FALSE
if ((getRollupColumnNum() >= 0) &&
(getExeGlobals()))
{
getExeGlobals()->setRollupColumnNum(getRollupColumnNum());
}
}
return ex_expr::EXPR_NULL;
} // one of the operands is a null value.
} // nulls are to be treated as values
for (short i = 1; i < getNumOperands(); i++)
{
// if value is missing,
// then move boolean unknown value to result and return.
if (getOperand(i)->getNullFlag() && (!op_data[i])) // missing value
{
// move null value to result.
*(Lng32 *)op_data[2 * MAX_OPERANDS] = -1;
return ex_expr::EXPR_NULL;
}
}
return ex_expr::EXPR_OK;
}
ex_expr::exp_return_type
ex_comp_clause::processResult(Int32 compare_code, Lng32* result,
CollHeap *heap,
ComDiagsArea** diagsArea)
{
*result = 0;
switch (getOperType())
{
case ITM_EQUAL:
if (compare_code == 0)
*result = 1;
break;
case ITM_NOT_EQUAL:
if (compare_code != 0)
*result = 1;
break;
case ITM_LESS:
if (compare_code < 0)
*result = 1;
break;
case ITM_LESS_EQ:
if (compare_code <= 0)
*result = 1;
break;
case ITM_GREATER:
if (compare_code > 0)
*result = 1;
break;
case ITM_GREATER_EQ:
if (compare_code >= 0)
*result = 1;
break;
default:
// LCOV_EXCL_START
ExRaiseSqlError(heap, diagsArea, EXE_INTERNAL_ERROR);
return ex_expr::EXPR_ERROR;
// LCOV_EXCL_STOP
break;
}
return ex_expr::EXPR_OK;
}
/////////////////////////////////////////////////////////////////
// Compares operand 1 and operand 2. Moves boolean result to
// operand 0. Result is a boolean datatype.
// Result values: 1, TRUE. 0, FALSE.
// -1, NULL (but this shouldn't happen here.
// Nulls have already been processed
// before coming here).
////////////////////////////////////////////////////////////////
ex_expr::exp_return_type ex_comp_clause::eval(char *op_data[],
CollHeap *heap,
ComDiagsArea** diagsArea)
{
ex_expr::exp_return_type retcode = ex_expr::EXPR_OK;
switch (getInstruction())
{
// EQUAL opcode
case EQ_BIN8S_BIN8S:
*(Lng32 *)op_data[0] = (*(Int8 *)op_data[1] == *(Int8 *)op_data[2]);
break;
case EQ_BIN8U_BIN8U:
*(Lng32 *)op_data[0] = (*(UInt8 *)op_data[1] == *(UInt8 *)op_data[2]);
break;
case EQ_BIN16S_BIN16S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] == *(short *)op_data[2]);
break;
case EQ_BIN16S_BIN32S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] == *(Lng32 *)op_data[2]);
break;
case EQ_BIN16S_BIN16U:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] == *(unsigned short *)op_data[2]);
break;
// LCOV_EXCL_START
case EQ_BIN16S_BIN32U:
// NT_PORT (BD 7/11/96) cast to unsigned long
*(Lng32 *)op_data[0] = ((ULng32)*(short *)op_data[1] == *(ULng32 *)op_data[2]);
break;
case EQ_BIN16U_BIN16S:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] == *(short *)op_data[2]);
break;
// LCOV_EXCL_STOP
case EQ_BIN16U_BIN32S:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] == *(Lng32 *)op_data[2]);
break;
case EQ_BIN16U_BIN16U:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] == *(unsigned short *)op_data[2]);
break;
case EQ_BIN16U_BIN32U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] == *(ULng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case EQ_BIN32S_BIN16S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] == *(short *)op_data[2]);
break;
case EQ_BIN32S_BIN32S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] == *(Lng32 *)op_data[2]);
break;
case EQ_BIN32S_BIN16U:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] == *(unsigned short *)op_data[2]);
break;
case EQ_BIN32S_BIN32U:
// NT_PORT (BD 7/11/96) cast to unsigned long
*(Lng32 *)op_data[0] = ((ULng32)*(Lng32 *)op_data[1] == *(ULng32 *)op_data[2]);
break;
case EQ_BIN32U_BIN16S:
// NT_PORT (BD 7/11/96) cast to unsigned long
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] == (ULng32)*(short *)op_data[2]);
break;
case EQ_BIN32U_BIN32S:
// NT_PORT (BD 7/11/96) cast to unsigned long
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] == (ULng32)*(Lng32 *)op_data[2]);
break;
case EQ_BIN32U_BIN16U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] == *(unsigned short *)op_data[2]);
break;
// LCOV_EXCL_STOP
case EQ_BIN32U_BIN32U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] == *(ULng32 *)op_data[2]);
break;
case EQ_BIN64S_BIN64S:
*(Lng32 *)op_data[0] = (*(Int64 *)op_data[1] == *(Int64 *)op_data[2]);
break;
case EQ_BIN64U_BIN64U:
*(Lng32 *)op_data[0] = (*(UInt64 *)op_data[1] == *(UInt64 *)op_data[2]);
break;
case EQ_BIN64U_BIN64S:
*(Lng32 *)op_data[0] = (*(UInt64 *)op_data[1] == *(Int64 *)op_data[2]);
break;
case EQ_BIN64S_BIN64U:
*(Lng32 *)op_data[0] = (*(Int64 *)op_data[1] == *(UInt64 *)op_data[2]);
break;
case EQ_DECU_DECU:
case EQ_DECS_DECS:
case EQ_ASCII_F_F:
case EQ_UNICODE_F_F: // 11/3/97 added for Unicode support
if (str_cmp(op_data[1], op_data[2], (Int32)getOperand(1)->getLength()) == 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case EQ_FLOAT32_FLOAT32:
*(Lng32 *)op_data[0] = (*(float *)op_data[1] == *(float *)op_data[2]);
break;
case EQ_FLOAT64_FLOAT64:
*(Lng32 *)op_data[0] = (*(double *)op_data[1] == *(double *)op_data[2]);
break;
case EQ_DATETIME_DATETIME:
if (((ExpDatetime *) getOperand(1))->compDatetimes(op_data[1],
op_data[2]) == 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
// NOT EQUAL operator
case NE_BIN8S_BIN8S:
*(Lng32 *)op_data[0] = (*(Int8 *)op_data[1] != *(Int8 *)op_data[2]);
break;
case NE_BIN8U_BIN8U:
*(Lng32 *)op_data[0] = (*(UInt8 *)op_data[1] != *(UInt8 *)op_data[2]);
break;
case NE_BIN16S_BIN16S:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(short *)op_data[1] != *(short *)op_data[2]);
break;
// LCOV_EXCL_STOP
case NE_BIN16S_BIN32S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] != *(Lng32 *)op_data[2]);
break;
case NE_BIN16S_BIN16U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(short *)op_data[1] != *(unsigned short *)op_data[2]);
break;
case NE_BIN16S_BIN32U:
// NT_PORT (BD 7/11/96) cast to unsigned long
*(Lng32 *)op_data[0] = ((ULng32)*(short *)op_data[1] != *(ULng32 *)op_data[2]);
break;
case NE_BIN16U_BIN16S:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] != *(short *)op_data[2]);
break;
case NE_BIN16U_BIN32S:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] != *(Lng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case NE_BIN16U_BIN16U:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] != *(unsigned short *)op_data[2]);
break;
case NE_BIN16U_BIN32U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] != *(ULng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case NE_BIN32S_BIN16S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] != *(short *)op_data[2]);
break;
case NE_BIN32S_BIN32S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] != *(Lng32 *)op_data[2]);
break;
case NE_BIN32S_BIN16U:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] != *(unsigned short *)op_data[2]);
break;
case NE_BIN32S_BIN32U:
// NT_PORT (BD 7/11/96) cast to unsigned long
*(Lng32 *)op_data[0] = ((ULng32)*(Lng32 *)op_data[1] != *(ULng32 *)op_data[2]);
break;
case NE_BIN32U_BIN16S:
// NT_PORT (BD 7/11/96) cast to unsigned long
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] != (ULng32)*(short *)op_data[2]);
break;
case NE_BIN32U_BIN32S:
// NT_PORT (BD 7/11/96) cast to unsigned long
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] != (ULng32)*(Lng32 *)op_data[2]);
break;
case NE_BIN32U_BIN16U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] != *(unsigned short *)op_data[2]);
break;
// LCOV_EXCL_STOP
case NE_BIN32U_BIN32U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] != *(ULng32 *)op_data[2]);
break;
case NE_BIN64S_BIN64S:
*(Lng32 *)op_data[0] = (*(Int64 *)op_data[1] != *(Int64 *)op_data[2]);
break;
case NE_BIN64U_BIN64U:
*(Lng32 *)op_data[0] = (*(UInt64 *)op_data[1] != *(UInt64 *)op_data[2]);
break;
case NE_BIN64U_BIN64S:
*(Lng32 *)op_data[0] = (*(UInt64 *)op_data[1] != *(Int64 *)op_data[2]);
break;
case NE_BIN64S_BIN64U:
*(Lng32 *)op_data[0] = (*(Int64 *)op_data[1] != *(UInt64 *)op_data[2]);
break;
case NE_DECU_DECU:
case NE_DECS_DECS:
case NE_ASCII_F_F:
if (str_cmp(op_data[1], op_data[2], (Int32)getOperand(1)->getLength()) != 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case NE_UNICODE_F_F: // 11/3/97: Added for Unicode support
// LCOV_EXCL_START
if (wc_str_cmp((NAWchar*)op_data[1], (NAWchar*)op_data[2],
(Int32)getOperand(1)->getLength() >> 1) != 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
// LCOV_EXCL_STOP
case NE_FLOAT32_FLOAT32:
*(Lng32 *)op_data[0] = (*(float *)op_data[1] != *(float *)op_data[2]);
break;
case NE_FLOAT64_FLOAT64:
*(Lng32 *)op_data[0] = (*(double *)op_data[1] != *(double *)op_data[2]);
break;
case NE_DATETIME_DATETIME:
if (((ExpDatetime *) getOperand(1))->compDatetimes(op_data[1],
op_data[2]) != 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
// LESS THAN opcode
case LT_BIN8S_BIN8S:
*(Lng32 *)op_data[0] = (*(Int8 *)op_data[1] < *(Int8 *)op_data[2]);
break;
case LT_BIN8U_BIN8U:
*(Lng32 *)op_data[0] = (*(UInt8 *)op_data[1] < *(UInt8 *)op_data[2]);
break;
case LT_BIN16S_BIN16S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] < *(short *)op_data[2]);
break;
case LT_BIN16S_BIN32S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] < *(Lng32 *)op_data[2]);
break;
case LT_BIN16S_BIN16U:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] < *(unsigned short *)op_data[2]);
break;
case LT_BIN16S_BIN32U:
// NT_PORT (BD 7/11/96) cast to long in order to handle negative values
*(Lng32 *)op_data[0] = ((Int64)*(short *)op_data[1] < (Int64)*(ULng32 *)op_data[2]);
break;
case LT_BIN16U_BIN16S:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] < *(short *)op_data[2]);
break;
// LCOV_EXCL_STOP
case LT_BIN16U_BIN32S:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] < *(Lng32 *)op_data[2]);
break;
// LCOV_EXCL_START
case LT_BIN16U_BIN16U:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] < *(unsigned short *)op_data[2]);
break;
case LT_BIN16U_BIN32U:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] < *(ULng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case LT_BIN32S_BIN16S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] < *(short *)op_data[2]);
break;
case LT_BIN32S_BIN32S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] < *(Lng32 *)op_data[2]);
break;
case LT_BIN32S_BIN16U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] < *(unsigned short *)op_data[2]);
break;
case LT_BIN32S_BIN32U:
// NT_PORT (BD 7/11/96) cast to long
*(Lng32 *)op_data[0] = ((Int64)*(Lng32 *)op_data[1] < (Int64)*(ULng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case LT_BIN32U_BIN16S:
// NT_PORT (BD 7/11/96) cast to long in order to handle negative values
*(Lng32 *)op_data[0] = ((Int64)*(ULng32 *)op_data[1] < *(short *)op_data[2]);
break;
case LT_BIN32U_BIN32S:
// NT_PORT (BD 7/11/96) cast to long
*(Lng32 *)op_data[0] = ((Int64)*(ULng32 *)op_data[1] < (Int64)*(Lng32 *)op_data[2]);
break;
case LT_BIN32U_BIN16U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] < *(unsigned short *)op_data[2]);
break;
case LT_BIN32U_BIN32U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] < *(ULng32 *)op_data[2]);
break;
case LT_BIN64S_BIN64S:
*(Lng32 *)op_data[0] = (*(Int64 *)op_data[1] < *(Int64 *)op_data[2]);
break;
case LT_BIN64U_BIN64U:
*(Lng32 *)op_data[0] = (*(UInt64 *)op_data[1] < *(UInt64 *)op_data[2]);
break;
case LT_BIN64U_BIN64S:
*(Lng32 *)op_data[0] =
((*(Int64*)op_data[2] < 0) ? 0 :
(*(UInt64 *)op_data[1] < *(Int64 *)op_data[2]));
break;
case LT_BIN64S_BIN64U:
*(Lng32 *)op_data[0] =
((*(Int64*)op_data[1] < 0) ? 1 :
(*(Int64 *)op_data[1] < *(UInt64 *)op_data[2]));
break;
case LT_DECS_DECS:
{
if ((op_data[1][0] & 0200) == 0)
{
// first operand is positive
if ((op_data[2][0] & 0200) == 0)
{
// second operand is positive
if (str_cmp(op_data[1], op_data[2],
(Int32)getOperand(1)->getLength()) < 0) // l < r
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
}
else
{
// second operand is negative
*(Lng32 *)op_data[0] = 0; // +ve not < -ve
}
}
else
{
// first operand is negative
if ((op_data[2][0] & 0200) == 0)
{
// second operand is positive
*(Lng32 *)op_data[0] = 1; // -ve negative always < +ve
}
else
{
// second operand is negative
if (str_cmp(op_data[1], op_data[2],
(Int32)getOperand(1)->getLength()) <= 0) // l <= r
*(Lng32 *)op_data[0] = 0;
else
*(Lng32 *)op_data[0] = 1;
}
} // first operand is negative
}
break;
case LT_DECU_DECU:
case LT_ASCII_F_F:
if (str_cmp(op_data[1], op_data[2], (Int32)getOperand(1)->getLength()) < 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case LT_UNICODE_F_F: // 11/5/97: added for Unicode support
if (wc_str_cmp((NAWchar*)op_data[1], (NAWchar*)op_data[2],
(Int32)getOperand(1)->getLength() >> 1) < 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case LT_FLOAT32_FLOAT32:
*(Lng32 *)op_data[0] = (*(float *)op_data[1] < *(float *)op_data[2]);
break;
case LT_FLOAT64_FLOAT64:
*(Lng32 *)op_data[0] = (*(double *)op_data[1] < *(double *)op_data[2]);
break;
case LT_DATETIME_DATETIME:
if (((ExpDatetime *) getOperand(1))->compDatetimes(op_data[1],
op_data[2]) < 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
// LESS THAN OR EQUAL TO opcode
case LE_BIN8S_BIN8S:
*(Lng32 *)op_data[0] = (*(Int8 *)op_data[1] <= *(Int8 *)op_data[2]);
break;
case LE_BIN8U_BIN8U:
*(Lng32 *)op_data[0] = (*(UInt8 *)op_data[1] <= *(UInt8 *)op_data[2]);
break;
case LE_BIN16S_BIN16S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] <= *(short *)op_data[2]);
break;
case LE_BIN16S_BIN32S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] <= *(Lng32 *)op_data[2]);
break;
case LE_BIN16S_BIN16U:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] <= *(unsigned short *)op_data[2]);
break;
case LE_BIN16S_BIN32U:
// NT_PORT (BD 7/11/96) cast to long in order to handle negative values
*(Lng32 *)op_data[0] = (*(short *)op_data[1] <= (Int64)*(ULng32 *)op_data[2]);
break;
case LE_BIN16U_BIN16S:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] <= *(short *)op_data[2]);
break;
// LCOV_EXCL_STOP
case LE_BIN16U_BIN32S:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] <= *(Lng32 *)op_data[2]);
break;
case LE_BIN16U_BIN16U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] <= *(unsigned short *)op_data[2]);
break;
case LE_BIN16U_BIN32U:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] <= *(ULng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case LE_BIN32S_BIN16S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] <= *(short *)op_data[2]);
break;
case LE_BIN32S_BIN32S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] <= *(Lng32 *)op_data[2]);
break;
case LE_BIN32S_BIN16U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] <= *(unsigned short *)op_data[2]);
break;
case LE_BIN32S_BIN32U:
// NT_PORT (BD 7/11/96) cast to long
*(Lng32 *)op_data[0] = ((Int64)*(Lng32 *)op_data[1] <= (Int64)*(ULng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case LE_BIN32U_BIN16S:
// NT_PORT (BD 7/11/96) cast to long in order to handle negative values
*(Lng32 *)op_data[0] = ((Int64)*(ULng32 *)op_data[1] <= *(short *)op_data[2]);
break;
case LE_BIN32U_BIN32S:
// NT_PORT (BD 7/11/96) cast to unsigned long
*(Lng32 *)op_data[0] = ((Int64)*(ULng32 *)op_data[1] <= (Int64)*(Lng32 *)op_data[2]);
break;
case LE_BIN32U_BIN16U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] <= *(unsigned short *)op_data[2]);
break;
case LE_BIN32U_BIN32U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] <= *(ULng32 *)op_data[2]);
break;
case LE_BIN64S_BIN64S:
*(Lng32 *)op_data[0] = (*(Int64 *)op_data[1] <= *(Int64 *)op_data[2]);
break;
case LE_BIN64U_BIN64U:
*(Lng32 *)op_data[0] = (*(UInt64 *)op_data[1] <= *(UInt64 *)op_data[2]);
break;
case LE_BIN64U_BIN64S:
*(Lng32 *)op_data[0] =
((*(Int64*)op_data[2] < 0) ? 0 :
(*(UInt64 *)op_data[1] <= *(Int64 *)op_data[2]));
break;
case LE_BIN64S_BIN64U:
*(Lng32 *)op_data[0] =
((*(Int64*)op_data[1] < 0) ? 1 :
(*(Int64 *)op_data[1] <= *(UInt64 *)op_data[2]));
break;
case LE_DECS_DECS:
{
if ((op_data[1][0] & 0200) == 0)
{
// first operand is positive
if ((op_data[2][0] & 0200) == 0)
{
// second operand is positive
if (str_cmp(op_data[1], op_data[2],
(Int32)getOperand(1)->getLength()) <= 0) // l <= r
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
}
else
{
// second operand is negative
*(Lng32 *)op_data[0] = 0; // +ve not < -ve
}
}
else
{
// first operand is negative
// LCOV_EXCL_START
if ((op_data[2][0] & 0200) == 0)
{
// second operand is positive
*(Lng32 *)op_data[0] = 1; // -ve negative always < +ve
}
else
{
// second operand is negative
if (str_cmp(op_data[1], op_data[2],
(Int32)getOperand(1)->getLength()) < 0) // l < r
*(Lng32 *)op_data[0] = 0;
else
*(Lng32 *)op_data[0] = 1;
}
// LCOV_EXCL_STOP
} // first operand is negative
}
break;
case LE_DECU_DECU:
case LE_ASCII_F_F:
if (str_cmp(op_data[1], op_data[2], (Int32)getOperand(1)->getLength()) <= 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case LE_UNICODE_F_F: // 11/5/97: added for Unicode support
if (wc_str_cmp((NAWchar*)op_data[1], (NAWchar*)op_data[2],
(Int32)getOperand(1)->getLength() >> 1) <= 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case LE_FLOAT32_FLOAT32:
*(Lng32 *)op_data[0] = (*(float *)op_data[1] <= *(float *)op_data[2]);
break;
case LE_FLOAT64_FLOAT64:
*(Lng32 *)op_data[0] = (*(double *)op_data[1] <= *(double *)op_data[2]);
break;
case LE_DATETIME_DATETIME:
if (((ExpDatetime *) getOperand(1))->compDatetimes(op_data[1],
op_data[2]) <= 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
// GREATER THAN opcode
case GT_BIN8S_BIN8S:
*(Lng32 *)op_data[0] = (*(Int8 *)op_data[1] > *(Int8 *)op_data[2]);
break;
case GT_BIN8U_BIN8U:
*(Lng32 *)op_data[0] = (*(UInt8 *)op_data[1] > *(UInt8 *)op_data[2]);
break;
case GT_BIN16S_BIN16S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] > *(short *)op_data[2]);
break;
case GT_BIN16S_BIN32S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] > *(Lng32 *)op_data[2]);
break;
case GT_BIN16S_BIN16U:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] > *(unsigned short *)op_data[2]);
break;
case GT_BIN16S_BIN32U:
// NT_PORT (BD 7/11/96) cast to long in order to handle negative values
*(Lng32 *)op_data[0] = (*(short *)op_data[1] > (Int64)*(ULng32 *)op_data[2]);
break;
case GT_BIN16U_BIN16S:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] > *(short *)op_data[2]);
break;
case GT_BIN16U_BIN32S:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] > *(Lng32 *)op_data[2]);
break;
case GT_BIN16U_BIN16U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] > *(unsigned short *)op_data[2]);
break;
case GT_BIN16U_BIN32U:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] > *(ULng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case GT_BIN32S_BIN16S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] > *(short *)op_data[2]);
break;
case GT_BIN32S_BIN32S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] > *(Lng32 *)op_data[2]);
break;
case GT_BIN32S_BIN16U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] > *(unsigned short *)op_data[2]);
break;
case GT_BIN32S_BIN32U:
// NT_PORT (BD 7/11/96) cast to long
*(Lng32 *)op_data[0] = ((Int64)*(Lng32 *)op_data[1] > (Int64)*(ULng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case GT_BIN32U_BIN16S:
// NT_PORT (BD 7/11/96) cast to long in order to handle negative values
*(Lng32 *)op_data[0] = ((Int64)*(ULng32 *)op_data[1] > *(short *)op_data[2]);
break;
case GT_BIN32U_BIN32S:
// NT_PORT (BD 7/11/96) cast to long
*(Lng32 *)op_data[0] = ((Int64)*(ULng32 *)op_data[1] > (Int64)*(Lng32 *)op_data[2]);
break;
case GT_BIN32U_BIN16U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] > *(unsigned short *)op_data[2]);
break;
case GT_BIN32U_BIN32U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] > *(ULng32 *)op_data[2]);
break;
case GT_BIN64S_BIN64S:
*(Lng32 *)op_data[0] = (*(Int64 *)op_data[1] > *(Int64 *)op_data[2]);
break;
case GT_BIN64U_BIN64U:
*(Lng32 *)op_data[0] = (*(UInt64 *)op_data[1] > *(UInt64 *)op_data[2]);
break;
case GT_BIN64U_BIN64S:
*(Lng32 *)op_data[0] =
((*(Int64*)op_data[2] < 0) ? 1 :
(*(UInt64 *)op_data[1] > *(Int64 *)op_data[2]));
break;
case GT_BIN64S_BIN64U:
*(Lng32 *)op_data[0] =
((*(Int64*)op_data[1] < 0) ? 0 :
(*(Int64 *)op_data[1] > *(UInt64 *)op_data[2]));
break;
case GT_DECS_DECS:
{
if ((op_data[1][0] & 0200) == 0)
{
// first operand is positive
if ((op_data[2][0] & 0200) == 0)
{
// second operand is positive
if (str_cmp(op_data[1], op_data[2],
(Int32)getOperand(1)->getLength()) > 0) // l > r
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
}
else
{
// second operand is negative
*(Lng32 *)op_data[0] = 1; // +ve always > -ve
}
}
else
{
// first operand is negative
if ((op_data[2][0] & 0200) == 0)
{
// second operand is positive
*(Lng32 *)op_data[0] = 0; // -ve always <= +ve
}
else
{
// second operand is negative
if (str_cmp(op_data[1], op_data[2],
(Int32)getOperand(1)->getLength()) >= 0) // l >= r
*(Lng32 *)op_data[0] = 0;
else
*(Lng32 *)op_data[0] = 1;
}
} // first operand is negative
}
break;
case GT_DECU_DECU:
case GT_ASCII_F_F:
if (str_cmp(op_data[1], op_data[2], (Int32)getOperand(1)->getLength()) > 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case GT_UNICODE_F_F:
// 11/3/97: added for Unicode
// LCOV_EXCL_START
if (wc_str_cmp((NAWchar*)op_data[1], (NAWchar*)op_data[2],
(Int32)(getOperand(1)->getLength()) >>1) > 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
// LCOV_EXCL_STOP
case GT_FLOAT32_FLOAT32:
*(Lng32 *)op_data[0] = (*(float *)op_data[1] > *(float *)op_data[2]);
break;
case GT_FLOAT64_FLOAT64:
*(Lng32 *)op_data[0] = (*(double *)op_data[1] > *(double *)op_data[2]);
break;
case GT_DATETIME_DATETIME:
if (((ExpDatetime *) getOperand(1))->compDatetimes(op_data[1],
op_data[2]) > 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
// GREATER THAN OR EQUAL TO
case GE_BIN8S_BIN8S:
*(Lng32 *)op_data[0] = (*(Int8 *)op_data[1] >= *(Int8 *)op_data[2]);
break;
case GE_BIN8U_BIN8U:
*(Lng32 *)op_data[0] = (*(UInt8 *)op_data[1] >= *(UInt8 *)op_data[2]);
break;
case GE_BIN16S_BIN16S:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] >= *(short *)op_data[2]);
break;
case GE_BIN16S_BIN32S:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(short *)op_data[1] >= *(Lng32 *)op_data[2]);
break;
case GE_BIN16S_BIN16U:
*(Lng32 *)op_data[0] = (*(short *)op_data[1] >= *(unsigned short *)op_data[2]);
break;
// LCOV_EXCL_STOP
case GE_BIN16S_BIN32U:
// NT_PORT (BD 7/11/96) cast to long
*(Lng32 *)op_data[0] = (*(short *)op_data[1] >= (Int64)*(ULng32 *)op_data[2]);
break;
case GE_BIN16U_BIN16S:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] >= *(short *)op_data[2]);
break;
// LCOV_EXCL_STOP
case GE_BIN16U_BIN32S:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] >= *(Lng32 *)op_data[2]);
break;
case GE_BIN16U_BIN16U:
// LCOV_EXCL_START
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] >= *(unsigned short *)op_data[2]);
break;
case GE_BIN16U_BIN32U:
*(Lng32 *)op_data[0] = (*(unsigned short *)op_data[1] >= *(ULng32 *)op_data[2]);
break;
// LCOV_EXCL_STOP
case GE_BIN32S_BIN16S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] >= *(short *)op_data[2]);
break;
case GE_BIN32S_BIN32S:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] >= *(Lng32 *)op_data[2]);
break;
case GE_BIN32S_BIN16U:
*(Lng32 *)op_data[0] = (*(Lng32 *)op_data[1] >= *(unsigned short *)op_data[2]);
break;
case GE_BIN32S_BIN32U:
// NT_PORT (BD 7/11/96) cast to long
*(Lng32 *)op_data[0] = ((Int64)*(Lng32 *)op_data[1] >= (Int64)*(ULng32 *)op_data[2]);
break;
case GE_BIN32U_BIN16S:
// NT_PORT (BD 7/11/96) cast to long in order to handle negative values
*(Lng32 *)op_data[0] = ((Int64)*(ULng32 *)op_data[1] >= *(short *)op_data[2]);
break;
case GE_BIN32U_BIN32S:
// NT_PORT (BD 7/11/96) cast to long
*(Lng32 *)op_data[0] = ((Int64)*(ULng32 *)op_data[1] >= (Int64)*(Lng32 *)op_data[2]);
break;
case GE_BIN32U_BIN16U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] >= *(unsigned short *)op_data[2]);
break;
case GE_BIN32U_BIN32U:
*(Lng32 *)op_data[0] = (*(ULng32 *)op_data[1] >= *(ULng32 *)op_data[2]);
break;
case GE_BIN64S_BIN64S:
*(Lng32 *)op_data[0] = (*(Int64 *)op_data[1] >= *(Int64 *)op_data[2]);
break;
case GE_BIN64U_BIN64U:
*(Lng32 *)op_data[0] = (*(UInt64 *)op_data[1] >= *(UInt64 *)op_data[2]);
break;
case GE_BIN64U_BIN64S:
*(Lng32 *)op_data[0] =
((*(Int64*)op_data[2] < 0) ? 1 :
(*(UInt64 *)op_data[1] >= *(Int64 *)op_data[2]));
break;
case GE_BIN64S_BIN64U:
*(Lng32 *)op_data[0] =
((*(Int64*)op_data[1] < 0) ? 0 :
(*(Int64 *)op_data[1] >= *(UInt64 *)op_data[2]));
break;
case GE_DECS_DECS:
{
if ((op_data[1][0] & 0200) == 0)
{
// first operand is positive
if ((op_data[2][0] & 0200) == 0)
{
// second operand is positive
if (str_cmp(op_data[1], op_data[2],
(Int32)getOperand(1)->getLength()) >= 0) // l >= r
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
}
else
{
// second operand is negative
*(Lng32 *)op_data[0] = 1; // +ve always >= -ve
}
}
else
{
// first operand is negative
if ((op_data[2][0] & 0200) == 0)
{
// second operand is positive
*(Lng32 *)op_data[0] = 0; // -ve always < +ve
}
else
{
// second operand is negative
if (str_cmp(op_data[1], op_data[2],
(Int32)getOperand(1)->getLength()) > 0) // l > r
*(Lng32 *)op_data[0] = 0;
else
*(Lng32 *)op_data[0] = 1;
}
} // first operand is negative
}
break;
case GE_DECU_DECU:
case GE_ASCII_F_F:
if (str_cmp(op_data[1], op_data[2], (Int32)getOperand(1)->getLength()) >= 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case GE_UNICODE_F_F:
// 11/3/97: added for Unicode
if (wc_str_cmp((NAWchar*)op_data[1], (NAWchar*)op_data[2],
(Int32)(getOperand(1)->getLength()) >> 1) >= 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case GE_FLOAT32_FLOAT32:
*(Lng32 *)op_data[0] = (*(float *)op_data[1] >= *(float *)op_data[2]);
break;
case GE_FLOAT64_FLOAT64:
*(Lng32 *)op_data[0] = (*(double *)op_data[1] >= *(double *)op_data[2]);
break;
case GE_DATETIME_DATETIME:
if (((ExpDatetime *) getOperand(1))->compDatetimes(op_data[1],
op_data[2]) >= 0)
*(Lng32 *)op_data[0] = 1;
else
*(Lng32 *)op_data[0] = 0;
break;
case ASCII_COMP:
case EQ_ASCII_COMP:
case GT_ASCII_COMP:
case GE_ASCII_COMP:
case LT_ASCII_COMP:
case LE_ASCII_COMP:
case NE_ASCII_COMP:
{
#pragma nowarn(1506) // warning elimination
Lng32 length1 = getOperand(1)->getLength(op_data[-MAX_OPERANDS + 1]);
Lng32 length2 = getOperand(2)->getLength(op_data[-MAX_OPERANDS + 2]) ;
char padChar = ' ';
if (getCollationEncodeComp())
{
padChar = 0;
}
Int32 compare_code =
charStringCompareWithPad( op_data[1], length1, op_data[2], length2, padChar);
#pragma warn(1506) // warning elimination
retcode = processResult(compare_code, (Lng32 *)op_data[0],
heap, diagsArea);
break;
}
case UNICODE_COMP: // 11/3/95: Unicode
{
#pragma nowarn(1506) // warning elimination
Lng32 length1 = getOperand(1)->getLength(op_data[-MAX_OPERANDS + 1]);
Lng32 length2 = getOperand(2)->getLength(op_data[-MAX_OPERANDS + 2]);
Int32 compare_code =
wcharStringCompareWithPad((NAWchar*)op_data[1], length1>>1,
(NAWchar*)op_data[2], length2>>1,
unicode_char_set::space_char()
);
#pragma warn(1506) // warning elimination
retcode = processResult(compare_code, (Lng32 *)op_data[0],
heap, diagsArea);
break;
}
// boolean comparison
case EQ_BOOL_BOOL:
{
*(Lng32*)op_data[0] = (*(Int8 *)op_data[1] == *(Int8 *)op_data[2]);
}
break;
case NE_BOOL_BOOL:
{
*(Lng32*)op_data[0] = (*(Int8 *)op_data[1] != *(Int8 *)op_data[2]);
}
break;
case COMP_COMPLEX:
*(Lng32 *)op_data[0] =
((ComplexType *)getOperand(1))->comp(getOperType(), getOperand(2), op_data);
break;
case COMP_NOT_SUPPORTED:
{
// this comparison operation not supported.
// See if it could still be evaluated by doing some intermediate
// operations.
// LCOV_EXCL_START
if (evalUnsupportedOperations(op_data, heap, diagsArea) !=
ex_expr::EXPR_OK)
return ex_expr::EXPR_ERROR;
}
break;
default:
ExRaiseSqlError(heap, diagsArea, EXE_INTERNAL_ERROR);
retcode = ex_expr::EXPR_ERROR;
break;
// LCOV_EXCL_STOP
}
if ((getRollupColumnNum() >= 0) &&
(*(Lng32*)op_data[0] == 0) &&
(getExeGlobals()))
{
getExeGlobals()->setRollupColumnNum(getRollupColumnNum());
}
return retcode;
}
// LCOV_EXCL_START
ex_expr::exp_return_type ex_comp_clause::evalUnsupportedOperations(
char *op_data[],
CollHeap *heap,
ComDiagsArea** diagsArea)
{
// if this operation could be done by converting to an
// intermediate datatype, do it.
short op1Type = getOperand(1)->getDatatype();
short op2Type = getOperand(2)->getDatatype();
ExRaiseSqlError(heap, diagsArea, EXE_INTERNAL_ERROR);
return ex_expr::EXPR_ERROR;
}
// LCOV_EXCL_STOP
| apache-2.0 |
johngmyers/airlift | jaxrs/src/main/java/io/airlift/jaxrs/testing/MockRequest.java | 9446 | /*
* Copyright 2010 Proofpoint, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.airlift.jaxrs.testing;
import com.google.common.base.Preconditions;
import javax.ws.rs.core.EntityTag;
import javax.ws.rs.core.Request;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.Response.ResponseBuilder;
import javax.ws.rs.core.Variant;
import java.util.Date;
import java.util.List;
import static java.util.Objects.requireNonNull;
public class MockRequest
implements Request
{
public static ConditionalRequestBuilder head()
{
return new ConditionalRequestBuilder("HEAD");
}
public static ConditionalRequestBuilder head(Variant selectVariant)
{
return new ConditionalRequestBuilder("HEAD", selectVariant);
}
public static ConditionalRequestBuilder get()
{
return new ConditionalRequestBuilder("GET");
}
public static ConditionalRequestBuilder get(Variant selectVariant)
{
return new ConditionalRequestBuilder("GET", selectVariant);
}
public static ConditionalRequestBuilder post()
{
return new ConditionalRequestBuilder("POST");
}
public static ConditionalRequestBuilder post(Variant selectVariant)
{
return new ConditionalRequestBuilder("POST", selectVariant);
}
public static ConditionalRequestBuilder put()
{
return new ConditionalRequestBuilder("PUT");
}
public static ConditionalRequestBuilder put(Variant selectVariant)
{
return new ConditionalRequestBuilder("PUT", selectVariant);
}
public static ConditionalRequestBuilder delete()
{
return new ConditionalRequestBuilder("DELETE");
}
public static ConditionalRequestBuilder delete(Variant selectVariant)
{
return new ConditionalRequestBuilder("DELETE", selectVariant);
}
public static class ConditionalRequestBuilder
{
private final String method;
private final Variant selectVariant;
private ConditionalRequestBuilder(String method)
{
this.method = method;
this.selectVariant = null;
}
private ConditionalRequestBuilder(String method, Variant selectVariant)
{
this.method = method;
this.selectVariant = selectVariant;
}
public MockRequest ifMatch(EntityTag ifMatch)
{
return new MockRequest(method, selectVariant, ifMatch, null, null, null);
}
public MockRequest ifNoneMatch(EntityTag ifNoneMatch)
{
return new MockRequest(method, selectVariant, null, ifNoneMatch, null, null);
}
public MockRequest ifModifiedSince(Date ifModifiedSince)
{
return new MockRequest(method, selectVariant, null, null, ifModifiedSince, null);
}
public MockRequest ifUnmodifiedSince(Date ifUnmodifiedSince)
{
return new MockRequest(method, selectVariant, null, null, null, ifUnmodifiedSince);
}
public MockRequest unconditionally()
{
return new MockRequest(method, selectVariant, null, null, null, null);
}
@Override
public String toString()
{
final StringBuilder sb = new StringBuilder();
sb.append(method);
if (selectVariant != null) {
sb.append("{").append(selectVariant).append('}');
}
return sb.toString();
}
}
private final String method;
private final Variant selectVariant;
private final EntityTag ifMatch;
private final EntityTag ifNoneMatch;
private final Date ifModifiedSince;
private final Date ifUnmodifiedSince;
private MockRequest(String method, Variant selectVariant, EntityTag ifMatch, EntityTag ifNoneMatch, Date ifModifiedSince, Date ifUnmodifiedSince)
{
this.method = method;
this.selectVariant = selectVariant;
this.ifMatch = ifMatch;
this.ifNoneMatch = ifNoneMatch;
this.ifModifiedSince = ifModifiedSince;
this.ifUnmodifiedSince = ifUnmodifiedSince;
}
@Override
public String getMethod()
{
return method;
}
@Override
public Variant selectVariant(List<Variant> variants)
throws IllegalArgumentException
{
requireNonNull(variants, "variants is null");
Preconditions.checkArgument(!variants.isEmpty(), "variants is empty");
return selectVariant;
}
// a call into this method is an indicator that the resource does not exist
// see C007
// http://jcp.org/aboutJava/communityprocess/maintenance/jsr311/311ChangeLog.html
@Override
public ResponseBuilder evaluatePreconditions()
{
// the resource does not exist yet so any If-Match header would result
// in a precondition failed
if (ifMatch != null) {
// we won't find a match. To be consistent with evaluateIfMatch, we
// return a built response
return Response.status(Response.Status.PRECONDITION_FAILED);
}
// since the resource does not exist yet if there is a If-None-Match
// header, then this should return null. if there is no If-None-Match
// header, this should still return null
return null;
}
@Override
public ResponseBuilder evaluatePreconditions(EntityTag eTag)
{
requireNonNull(eTag, "eTag is null");
return firstNonNull(evaluateIfMatch(eTag), evaluateIfNoneMatch(eTag));
}
@Override
public ResponseBuilder evaluatePreconditions(Date lastModified)
{
requireNonNull(lastModified, "lastModified is null");
return firstNonNull(evaluateIfModifiedSince(lastModified), evaluateIfUnmodifiedSince(lastModified));
}
@Override
public ResponseBuilder evaluatePreconditions(Date lastModified, EntityTag eTag)
{
requireNonNull(eTag, "eTag is null");
requireNonNull(lastModified, "lastModified is null");
return firstNonNull(evaluatePreconditions(lastModified), evaluatePreconditions(eTag));
}
private ResponseBuilder evaluateIfMatch(EntityTag eTag)
{
// if request ifMatch is not set, process the request
if (ifMatch == null) {
return null;
}
// if-match is not allowed with weak eTags
if (eTag.isWeak()) {
return Response.status(Response.Status.PRECONDITION_FAILED).tag(eTag);
}
// if the request ifMatch eTag matches the supplied eTag, process the request
if ("*".equals(ifMatch.getValue()) || eTag.getValue().equals(ifMatch.getValue())) {
return null;
}
return Response.status(Response.Status.PRECONDITION_FAILED).tag(eTag);
}
private ResponseBuilder evaluateIfNoneMatch(EntityTag tag)
{
// if request ifNoneMatch is not set, process the request
if (ifNoneMatch == null) {
return null;
}
// if the request ifNoneMatch eTag does NOT match the supplied eTag, process the request
if (!("*".equals(ifNoneMatch.getValue()) || tag.getValue().equals(ifNoneMatch.getValue()))) {
return null;
}
// if this is a GET or HEAD, return not modified otherwise return precondition failed
if ("GET".equalsIgnoreCase(getMethod()) || "HEAD".equalsIgnoreCase(getMethod())) {
return Response.notModified(tag);
}
else {
return Response.status(Response.Status.PRECONDITION_FAILED).tag(tag);
}
}
private ResponseBuilder evaluateIfModifiedSince(Date lastModified)
{
// if request ifModifiedSince is not set, process the request
if (ifModifiedSince == null) {
return null;
}
// if modified since only applies to GET and HEAD; otherwise it process request
if (!("GET".equalsIgnoreCase(method) || "HEAD".equalsIgnoreCase(method))) {
return null;
}
// if the request ifModifiedSince is after last modified, process the request
if (lastModified.after(ifModifiedSince)) {
return null;
}
return Response.notModified();
}
private ResponseBuilder evaluateIfUnmodifiedSince(Date lastModified)
{
// if request ifUnmodifiedSince is not set, process the request
if (ifUnmodifiedSince == null) {
return null;
}
// if the request ifUnmodifiedSince is NOT after last modified, process the request
if (!lastModified.after(ifUnmodifiedSince)) {
return null;
}
return Response.status(Response.Status.PRECONDITION_FAILED);
}
// Guava's version does not allow second to be null
private static <T> T firstNonNull(T first, T second)
{
return (first != null) ? first : second;
}
}
| apache-2.0 |
mifos/1.5.x | application/src/main/java/org/mifos/customers/group/persistence/GroupPersistence.java | 11822 | /*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.customers.group.persistence;
import java.sql.Connection;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.mifos.application.NamedQueryConstants;
import org.mifos.config.ClientRules;
import org.mifos.customers.center.business.CenterBO;
import org.mifos.customers.center.persistence.CenterPersistence;
import org.mifos.customers.exceptions.CustomerException;
import org.mifos.customers.group.GroupTemplate;
import org.mifos.customers.group.business.GroupBO;
import org.mifos.customers.group.util.helpers.GroupConstants;
import org.mifos.customers.office.persistence.OfficePersistence;
import org.mifos.customers.persistence.CustomerPersistence;
import org.mifos.customers.personnel.business.PersonnelBO;
import org.mifos.customers.personnel.persistence.PersonnelPersistence;
import org.mifos.customers.personnel.util.helpers.PersonnelLevel;
import org.mifos.customers.util.helpers.CustomerConstants;
import org.mifos.customers.util.helpers.CustomerLevel;
import org.mifos.customers.util.helpers.CustomerSearchConstants;
import org.mifos.customers.util.helpers.Param;
import org.mifos.framework.exceptions.HibernateSearchException;
import org.mifos.framework.exceptions.PersistenceException;
import org.mifos.framework.exceptions.ValidationException;
import org.mifos.framework.hibernate.helper.QueryFactory;
import org.mifos.framework.hibernate.helper.QueryInputs;
import org.mifos.framework.hibernate.helper.QueryResult;
import org.mifos.framework.hibernate.helper.StaticHibernateUtil;
import org.mifos.framework.persistence.Persistence;
import org.mifos.framework.util.DateTimeService;
import org.mifos.security.util.UserContext;
public class GroupPersistence extends Persistence {
private CenterPersistence centerPersistence = new CenterPersistence();
private CustomerPersistence customerPersistence = new CustomerPersistence();
private PersonnelPersistence personnelPersistence = new PersonnelPersistence();
public GroupBO createGroup(UserContext userContext, GroupTemplate template) throws CustomerException,
PersistenceException, ValidationException {
CenterBO center = null;
if (template.getParentCenterId() != null) {
center = getCenterPersistence().getCenter(template.getParentCenterId());
if (center == null) {
throw new ValidationException(GroupConstants.PARENT_OFFICE_ID);
}
}
PersonnelBO loanOfficer = null;
if (template.getLoanOfficerId() != null) {
loanOfficer = personnelPersistence.getPersonnel(template.getLoanOfficerId());
}
GroupBO group = new GroupBO(userContext, template.getDisplayName(), template.getCustomerStatus(), template
.getExternalId(), template.isTrained(), template.getTrainedDate(), template.getAddress(), template
.getCustomFieldViews(), template.getFees(), loanOfficer, center, new GroupPersistence(), new OfficePersistence());
saveGroup(group);
return group;
}
public GroupBO findBySystemId(String globalCustNum) throws PersistenceException {
Map<String, String> queryParameters = new HashMap<String, String>();
GroupBO group = null;
queryParameters.put("globalCustNum", globalCustNum);
List<GroupBO> queryResult = executeNamedQuery(NamedQueryConstants.GET_GROUP_BY_SYSTEMID, queryParameters);
if (null != queryResult && queryResult.size() > 0) {
group = queryResult.get(0);
}
return group;
}
public boolean isGroupExists(String name, Short officeId) throws PersistenceException {
Map<String, Object> queryParameters = new HashMap<String, Object>();
queryParameters.put(CustomerConstants.DISPLAY_NAME, name);
queryParameters.put(CustomerConstants.OFFICE_ID, officeId);
List queryResult = executeNamedQuery(NamedQueryConstants.GET_GROUP_COUNT_BY_NAME, queryParameters);
return ((Number) queryResult.get(0)).intValue() > 0;
}
public QueryResult search(String searchString, Short userId) throws PersistenceException {
String[] namedQuery = new String[2];
List<Param> paramList = new ArrayList<Param>();
QueryInputs queryInputs = new QueryInputs();
QueryResult queryResult = QueryFactory.getQueryResult(CustomerSearchConstants.GROUPLIST);
PersonnelBO personnel = new PersonnelPersistence().getPersonnel(userId);
String officeSearchId = personnel.getOffice().getSearchId();
if (ClientRules.getCenterHierarchyExists()) {
namedQuery[0] = NamedQueryConstants.GROUP_SEARCH_COUNT_WITH_CENTER;
namedQuery[1] = NamedQueryConstants.GROUP_SEARCHWITH_CENTER;
String[] aliasNames = { "officeName", "groupName", "centerName", "groupId" };
queryInputs.setAliasNames(aliasNames);
} else {
namedQuery[0] = NamedQueryConstants.GROUP_SEARCH_COUNT_WITHOUT_CENTER;
namedQuery[1] = NamedQueryConstants.GROUP_SEARCH_WITHOUT_CENTER;
String[] aliasNames = { "officeName", "groupName", "groupId" };
queryInputs.setAliasNames(aliasNames);
}
paramList.add(typeNameValue("String", "SEARCH_ID", officeSearchId + "%"));
paramList.add(typeNameValue("String", "SEARCH_STRING", searchString + "%"));
paramList.add(typeNameValue("Short", "LEVEL_ID", CustomerLevel.GROUP.getValue()));
paramList.add(typeNameValue("Short", "USER_ID", userId));
paramList.add(typeNameValue("Short", "USER_LEVEL_ID", personnel.getLevelEnum().getValue()));
paramList.add(typeNameValue("Short", "LO_LEVEL_ID", PersonnelLevel.LOAN_OFFICER.getValue()));
queryInputs.setQueryStrings(namedQuery);
queryInputs.setPath("org.mifos.customers.group.util.helpers.GroupSearchResults");
queryInputs.setParamList(paramList);
try {
queryResult.setQueryInputs(queryInputs);
} catch (HibernateSearchException e) {
throw new PersistenceException(e);
}
return queryResult;
}
public QueryResult searchForAddingClientToGroup(String searchString, Short userId) throws PersistenceException {
String[] namedQuery = new String[2];
List<Param> paramList = new ArrayList<Param>();
QueryInputs queryInputs = new QueryInputs();
QueryResult queryResult = QueryFactory.getQueryResult(CustomerSearchConstants.GROUPLIST);
PersonnelBO personnel = new PersonnelPersistence().getPersonnel(userId);
String officeSearchId = personnel.getOffice().getSearchId();
if (ClientRules.getCenterHierarchyExists()) {
namedQuery[0] = NamedQueryConstants.GROUP_SEARCH_COUNT_WITH_CENTER_FOR_ADDING_GROUPMEMBER;
namedQuery[1] = NamedQueryConstants.GROUP_SEARCHWITH_CENTER_FOR_ADDING_GROUPMEMBER;
String[] aliasNames = { "officeName", "groupName", "centerName", "groupId" };
queryInputs.setAliasNames(aliasNames);
} else {
namedQuery[0] = NamedQueryConstants.GROUP_SEARCH_COUNT_WITHOUT_CENTER_FOR_ADDING_GROUPMEMBER;
namedQuery[1] = NamedQueryConstants.GROUP_SEARCH_WITHOUT_CENTER_FOR_ADDING_GROUPMEMBER;
String[] aliasNames = { "officeName", "groupName", "groupId" };
queryInputs.setAliasNames(aliasNames);
}
paramList.add(typeNameValue("String", "SEARCH_ID", officeSearchId + "%"));
paramList.add(typeNameValue("String", "SEARCH_STRING", searchString + "%"));
paramList.add(typeNameValue("Short", "LEVEL_ID", CustomerLevel.GROUP.getValue()));
paramList.add(typeNameValue("Short", "USER_ID", userId));
paramList.add(typeNameValue("Short", "USER_LEVEL_ID", personnel.getLevelEnum().getValue()));
paramList.add(typeNameValue("Short", "LO_LEVEL_ID", PersonnelLevel.LOAN_OFFICER.getValue()));
queryInputs.setQueryStrings(namedQuery);
queryInputs.setPath("org.mifos.customers.group.util.helpers.GroupSearchResults");
queryInputs.setParamList(paramList);
try {
queryResult.setQueryInputs(queryInputs);
} catch (HibernateSearchException e) {
throw new PersistenceException(e);
}
return queryResult;
}
public CenterPersistence getCenterPersistence() {
return centerPersistence;
}
public GroupBO getGroupByCustomerId(Integer customerId) throws PersistenceException {
return (GroupBO) getPersistentObject(GroupBO.class, customerId);
}
// this code is used in the PAR task to improve performance
public boolean updateGroupInfoAndGroupPerformanceHistoryForPortfolioAtRisk(double portfolioAtRisk, Integer groupId)
throws Exception {
boolean result = false;
Connection connection = null;
try {
connection = StaticHibernateUtil.getSessionTL().connection();
connection.setAutoCommit(false);
Statement statement = connection.createStatement();
short userId = 1; // this is bach job, so no user
java.sql.Date currentDate = new DateTimeService().getCurrentJavaSqlDate();
int rows = statement.executeUpdate("UPDATE CUSTOMER SET UPDATED_BY = " + userId + ", UPDATED_DATE='"
+ currentDate + "' WHERE CUSTOMER_ID=" + groupId.toString());
statement.close();
if (rows != 1) {
throw new PersistenceException("Unable to update group table for group id " + groupId.toString());
}
statement = connection.createStatement();
rows = statement.executeUpdate("UPDATE GROUP_PERF_HISTORY SET PORTFOLIO_AT_RISK = " + portfolioAtRisk
+ " WHERE CUSTOMER_ID=" + groupId.toString());
statement.close();
if (rows != 1) {
throw new PersistenceException("Unable to update group performance history for group id "
+ groupId.toString());
}
connection.commit();
result = true;
} catch (Exception ex) {
if (connection != null) {
connection.rollback();
}
throw new PersistenceException(ex);
} finally {
if (connection != null) {
connection.close();
StaticHibernateUtil.closeSession();
}
}
return result;
}
public void saveGroup(GroupBO groupBo) throws CustomerException {
CustomerPersistence customerPersistence = new CustomerPersistence();
customerPersistence.saveCustomer(groupBo);
try {
if (groupBo.getParentCustomer() != null) {
customerPersistence.createOrUpdate(groupBo.getParentCustomer());
}
} catch (PersistenceException pe) {
throw new CustomerException(CustomerConstants.CREATE_FAILED_EXCEPTION, pe);
}
}
}
| apache-2.0 |
knowmetools/km-api | km_api/functional_tests/know_me/subscriptions/test_get_apple_subscription.py | 1403 | from rest_framework import status
from rest_framework.reverse import reverse
from test_utils import serialized_time
URL = reverse("know-me:apple-subscription-detail")
def test_get_anonymous(api_client):
"""
Anonymous users should receive a permissions error if they send a
GET request to the view.
"""
response = api_client.get(URL)
assert response.status_code == status.HTTP_403_FORBIDDEN
def test_get_existing_subscription(
api_client, apple_receipt_factory, user_factory
):
"""
Users should be able to fetch information about their existing
Apple subscription.
"""
# Given James, an authenticated user...
password = "password"
user = user_factory(first_name="James", password=password)
api_client.log_in(user.primary_email.email, password)
# ...who has an existing Apple receipt...
receipt = apple_receipt_factory(subscription__user=user)
# ...then he should be able to view information about his Apple
# receipt.
response = api_client.get(URL)
assert response.status_code == status.HTTP_200_OK
assert response.json() == {
"expiration_time": serialized_time(receipt.expiration_time),
"id": str(receipt.pk),
"receipt_data": receipt.receipt_data,
"time_created": serialized_time(receipt.time_created),
"time_updated": serialized_time(receipt.time_updated),
}
| apache-2.0 |
aws/aws-sdk-java | aws-java-sdk-simpleworkflow/src/main/java/com/amazonaws/services/simpleworkflow/model/transform/RequestCancelExternalWorkflowExecutionInitiatedEventAttributesJsonUnmarshaller.java | 4240 | /*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.simpleworkflow.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* RequestCancelExternalWorkflowExecutionInitiatedEventAttributes JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class RequestCancelExternalWorkflowExecutionInitiatedEventAttributesJsonUnmarshaller implements
Unmarshaller<RequestCancelExternalWorkflowExecutionInitiatedEventAttributes, JsonUnmarshallerContext> {
public RequestCancelExternalWorkflowExecutionInitiatedEventAttributes unmarshall(JsonUnmarshallerContext context) throws Exception {
RequestCancelExternalWorkflowExecutionInitiatedEventAttributes requestCancelExternalWorkflowExecutionInitiatedEventAttributes = new RequestCancelExternalWorkflowExecutionInitiatedEventAttributes();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("workflowId", targetDepth)) {
context.nextToken();
requestCancelExternalWorkflowExecutionInitiatedEventAttributes.setWorkflowId(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("runId", targetDepth)) {
context.nextToken();
requestCancelExternalWorkflowExecutionInitiatedEventAttributes.setRunId(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("decisionTaskCompletedEventId", targetDepth)) {
context.nextToken();
requestCancelExternalWorkflowExecutionInitiatedEventAttributes.setDecisionTaskCompletedEventId(context.getUnmarshaller(Long.class)
.unmarshall(context));
}
if (context.testExpression("control", targetDepth)) {
context.nextToken();
requestCancelExternalWorkflowExecutionInitiatedEventAttributes.setControl(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return requestCancelExternalWorkflowExecutionInitiatedEventAttributes;
}
private static RequestCancelExternalWorkflowExecutionInitiatedEventAttributesJsonUnmarshaller instance;
public static RequestCancelExternalWorkflowExecutionInitiatedEventAttributesJsonUnmarshaller getInstance() {
if (instance == null)
instance = new RequestCancelExternalWorkflowExecutionInitiatedEventAttributesJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
akzeitions/HerokuTeste | traccar-web/web/app/view/Report.js | 2933 | /*
* Copyright 2015 - 2016 Anton Tananaev (anton@traccar.org)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
Ext.define('Traccar.view.Report', {
extend: 'Ext.panel.Panel',
xtype: 'reportView',
requires: [
'Traccar.view.ReportController'
],
controller: 'report',
title: Strings.reportTitle,
tools: [{
type: 'close',
tooltip: Strings.sharedHide,
handler: 'hideReports'
}],
tbar: {
scrollable: true,
items: [{
xtype: 'tbtext',
html: Strings.sharedType
}, {
xtype: 'combobox',
reference: 'reportTypeField',
store: 'ReportTypes',
displayField: 'name',
valueField: 'key',
editable: false,
listeners: {
change: 'onTypeChange'
}
}, '-', {
text: Strings.reportConfigure,
handler: 'onConfigureClick'
}, '-', {
text: Strings.reportShow,
reference: 'showButton',
disabled: true,
handler: 'onReportClick'
}, {
text: Strings.reportExport,
reference: 'exportButton',
disabled: true,
handler: 'onReportClick'
}, {
text: Strings.reportClear,
handler: 'onClearClick'
}]
},
layout: 'card',
items: [{
xtype: 'grid',
itemId: 'grid',
listeners: {
selectionchange: 'onSelectionChange'
},
forceFit: true,
columns: {
defaults: {
minWidth: Traccar.Style.columnWidthNormal
},
items: [
]
},
style: Traccar.Style.reportGridStyle
}, {
xtype: 'cartesian',
itemId: 'chart',
plugins: {
ptype: 'chartitemevents',
moveEvents: true
},
store: 'ReportRoute',
axes: [{
title: Strings.reportChart,
type: 'numeric',
position: 'left'
}, {
type: 'time',
position: 'bottom',
fields: ['fixTime']
}],
listeners: {
itemclick: 'onChartMarkerClick'
},
insetPadding: Traccar.Style.chartPadding
}]
});
| apache-2.0 |
achoraev/RunnerMeter | SportsMeter/src/main/java/com/runner/sportsmeter/models/Coordinates.java | 1227 | package com.runner.sportsmeter.models;
import com.parse.*;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* Created by angelr on 22-Oct-15.
*/
@ParseClassName("Coordinates")
public class Coordinates extends ParseObject {
public Coordinates(){}
public ParseUser getCurrentUser() {
return getParseUser("user");
}
public void setCurrentUser(ParseUser currentUser) {
put("user", currentUser);
}
public ParseGeoPoint getStartAndEndPoint() {
return getParseGeoPoint("startAndEndPoint");
}
public void setStartAndEndPoint(ParseGeoPoint startAndEndPoint) {
put("startAndEndPoint", startAndEndPoint);
}
public List<ParseGeoPoint> getStartAndEndCoordinates() {
return getList("startAndEndCoordinates");
}
public void setStartAndEndCoordinates(ArrayList<ParseGeoPoint> geoPoints) {
addAll("startAndEndCoordinates", geoPoints);
}
public void setAcl(ParseACL acl) {
put("ACL", acl);
}
public Date getCreatedAt(){
return getDate("createdAt");
}
public static ParseQuery<Coordinates> getQuery() {
return ParseQuery.getQuery(Coordinates.class);
}
}
| apache-2.0 |
0x1mason/appium-dotnet-driver | appium-dotnet-driver/Appium/Interfaces/IFindByIosUIAutomation.cs | 891 | using System.Collections.ObjectModel;
namespace OpenQA.Selenium.Appium.src.Appium.Interfaces
{
interface IFindByIosUIAutomation
{
/// <summary>
/// Finds the first of elements that match the Ios UIAutomation selector supplied
/// </summary>
/// <param name="selector">an Ios UIAutomation selector</param>
/// <returns>IWebElement object so that you can interact that object</returns>
IWebElement FindElementByIosUIAutomation(string selector);
/// <summary>
/// Finds a list of elements that match the Ios UIAutomation selector supplied
/// </summary>
/// <param name="selector">an Ios UIAutomation selector</param>
/// <returns>IWebElement object so that you can interact that object</returns>
ReadOnlyCollection<IWebElement> FindElementsByIosUIAutomation(string selector);
}
}
| apache-2.0 |
OSUCartography/JMapProjLib | src/com/jhlabs/map/proj/Eckert6Projection.java | 1950 | /*
Copyright 2010 Bernhard Jenny
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.jhlabs.map.proj;
import com.jhlabs.map.MapMath;
import java.awt.geom.Point2D;
public class Eckert6Projection extends PseudoCylindricalProjection {
private static final double n = 2.570796326794896619231321691;
private static final double C_y = Math.sqrt((2) / n);
private static final double C_x = C_y / 2;
private static final int MAX_ITER = 8;
private static final double LOOP_TOL = 1e-7;
public Point2D.Double project(double lam, double phi, Point2D.Double xy) {
int i;
double k, V;
k = n * Math.sin(phi);
for (i = MAX_ITER; i > 0;) {
phi -= V = (phi + Math.sin(phi) - k) / (1 + Math.cos(phi));
if (Math.abs(V) < LOOP_TOL) {
break;
}
--i;
}
if (i == 0) {
throw new ProjectionException("F_ERROR");
}
xy.x = C_x * lam * (1 + Math.cos(phi));
xy.y = C_y * phi;
return xy;
}
public Point2D.Double projectInverse(double x, double y, Point2D.Double lp) {
y /= C_y;
lp.y = MapMath.asin((y + Math.sin(y)) / n);
lp.x = x / (C_x * (1 + Math.cos(y)));
return lp;
}
public boolean hasInverse() {
return true;
}
public boolean isEqualArea() {
return true;
}
public String toString() {
return "Eckert VI";
}
}
| apache-2.0 |
brettfo/roslyn | src/Features/Core/Portable/FindUsages/IRemoteFindUsagesService.cs | 8549 | // Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
#nullable disable
using System;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using System.Runtime.Serialization;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.FindSymbols;
using Microsoft.CodeAnalysis.Remote;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.FindUsages
{
internal interface IRemoteFindUsagesService
{
internal interface ICallback
{
ValueTask AddItemsAsync(int count);
ValueTask ItemCompletedAsync();
ValueTask ReportMessageAsync(string message);
ValueTask ReportProgressAsync(int current, int maximum);
ValueTask SetSearchTitleAsync(string title);
ValueTask OnDefinitionFoundAsync(SerializableDefinitionItem definition);
ValueTask OnReferenceFoundAsync(SerializableSourceReferenceItem reference);
}
ValueTask FindReferencesAsync(
PinnedSolutionInfo solutionInfo,
SerializableSymbolAndProjectId symbolAndProjectId,
FindReferencesSearchOptions options,
CancellationToken cancellationToken);
ValueTask FindImplementationsAsync(
PinnedSolutionInfo solutionInfo,
SerializableSymbolAndProjectId symbolAndProjectId,
CancellationToken cancellationToken);
}
internal sealed class FindUsagesServerCallback : IRemoteFindUsagesService.ICallback
{
private readonly Solution _solution;
private readonly IFindUsagesContext _context;
private readonly Dictionary<int, DefinitionItem> _idToDefinition = new();
public FindUsagesServerCallback(Solution solution, IFindUsagesContext context)
{
_solution = solution;
_context = context;
}
public ValueTask AddItemsAsync(int count)
=> _context.ProgressTracker.AddItemsAsync(count);
public ValueTask ItemCompletedAsync()
=> _context.ProgressTracker.ItemCompletedAsync();
public ValueTask ReportMessageAsync(string message)
=> _context.ReportMessageAsync(message);
[Obsolete]
public ValueTask ReportProgressAsync(int current, int maximum)
=> _context.ReportProgressAsync(current, maximum);
public ValueTask SetSearchTitleAsync(string title)
=> _context.SetSearchTitleAsync(title);
public ValueTask OnDefinitionFoundAsync(SerializableDefinitionItem definition)
{
var id = definition.Id;
var rehydrated = definition.Rehydrate(_solution);
lock (_idToDefinition)
{
_idToDefinition.Add(id, rehydrated);
}
return _context.OnDefinitionFoundAsync(rehydrated);
}
public ValueTask OnReferenceFoundAsync(SerializableSourceReferenceItem reference)
=> _context.OnReferenceFoundAsync(reference.Rehydrate(_solution, GetDefinition(reference.DefinitionId)));
private DefinitionItem GetDefinition(int definitionId)
{
lock (_idToDefinition)
{
Contract.ThrowIfFalse(_idToDefinition.ContainsKey(definitionId));
return _idToDefinition[definitionId];
}
}
}
[DataContract]
internal readonly struct SerializableDocumentSpan
{
[DataMember(Order = 0)]
public readonly DocumentId DocumentId;
[DataMember(Order = 1)]
public readonly TextSpan SourceSpan;
public SerializableDocumentSpan(DocumentId documentId, TextSpan sourceSpan)
{
DocumentId = documentId;
SourceSpan = sourceSpan;
}
public static SerializableDocumentSpan Dehydrate(DocumentSpan documentSpan)
=> new(documentSpan.Document.Id, documentSpan.SourceSpan);
public DocumentSpan Rehydrate(Solution solution)
=> new(solution.GetDocument(DocumentId), SourceSpan);
}
[DataContract]
internal readonly struct SerializableDefinitionItem
{
[DataMember(Order = 0)]
public readonly int Id;
[DataMember(Order = 1)]
public readonly ImmutableArray<string> Tags;
[DataMember(Order = 2)]
public readonly ImmutableArray<TaggedText> DisplayParts;
[DataMember(Order = 3)]
public readonly ImmutableArray<TaggedText> NameDisplayParts;
[DataMember(Order = 4)]
public readonly ImmutableArray<TaggedText> OriginationParts;
[DataMember(Order = 5)]
public readonly ImmutableArray<SerializableDocumentSpan> SourceSpans;
[DataMember(Order = 6)]
public readonly ImmutableDictionary<string, string> Properties;
[DataMember(Order = 7)]
public readonly ImmutableDictionary<string, string> DisplayableProperties;
[DataMember(Order = 8)]
public readonly bool DisplayIfNoReferences;
public SerializableDefinitionItem(
int id,
ImmutableArray<string> tags,
ImmutableArray<TaggedText> displayParts,
ImmutableArray<TaggedText> nameDisplayParts,
ImmutableArray<TaggedText> originationParts,
ImmutableArray<SerializableDocumentSpan> sourceSpans,
ImmutableDictionary<string, string> properties,
ImmutableDictionary<string, string> displayableProperties,
bool displayIfNoReferences)
{
Id = id;
Tags = tags;
DisplayParts = displayParts;
NameDisplayParts = nameDisplayParts;
OriginationParts = originationParts;
SourceSpans = sourceSpans;
Properties = properties;
DisplayableProperties = displayableProperties;
DisplayIfNoReferences = displayIfNoReferences;
}
public static SerializableDefinitionItem Dehydrate(int id, DefinitionItem item)
=> new(id,
item.Tags,
item.DisplayParts,
item.NameDisplayParts,
item.OriginationParts,
item.SourceSpans.SelectAsArray(ss => SerializableDocumentSpan.Dehydrate(ss)),
item.Properties,
item.DisplayableProperties,
item.DisplayIfNoReferences);
public DefinitionItem Rehydrate(Solution solution)
=> new DefinitionItem.DefaultDefinitionItem(
Tags,
DisplayParts,
NameDisplayParts,
OriginationParts,
SourceSpans.SelectAsArray(ss => ss.Rehydrate(solution)),
Properties,
DisplayableProperties,
DisplayIfNoReferences);
}
[DataContract]
internal readonly struct SerializableSourceReferenceItem
{
[DataMember(Order = 0)]
public readonly int DefinitionId;
[DataMember(Order = 1)]
public readonly SerializableDocumentSpan SourceSpan;
[DataMember(Order = 2)]
public readonly SymbolUsageInfo SymbolUsageInfo;
[DataMember(Order = 3)]
public readonly ImmutableDictionary<string, string> AdditionalProperties;
public SerializableSourceReferenceItem(
int definitionId,
SerializableDocumentSpan sourceSpan,
SymbolUsageInfo symbolUsageInfo,
ImmutableDictionary<string, string> additionalProperties)
{
DefinitionId = definitionId;
SourceSpan = sourceSpan;
SymbolUsageInfo = symbolUsageInfo;
AdditionalProperties = additionalProperties;
}
public static SerializableSourceReferenceItem Dehydrate(int definitionId, SourceReferenceItem item)
=> new(definitionId,
SerializableDocumentSpan.Dehydrate(item.SourceSpan),
item.SymbolUsageInfo,
item.AdditionalProperties);
public SourceReferenceItem Rehydrate(Solution solution, DefinitionItem definition)
=> new(definition,
SourceSpan.Rehydrate(solution),
SymbolUsageInfo,
AdditionalProperties.ToImmutableDictionary(t => t.Key, t => t.Value));
}
}
| apache-2.0 |
codinuum/cca | samples/java/1/IntrospectionHelper.java | 58443 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.tools.ant;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Enumeration;
import java.util.Hashtable;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.apache.tools.ant.types.EnumeratedAttribute;
import org.apache.tools.ant.taskdefs.PreSetDef;
/**
* Helper class that collects the methods a task or nested element
* holds to set attributes, create nested elements or hold PCDATA
* elements.
* The class is final as it has a private constructor.
*/
public final class IntrospectionHelper {
/**
* EMPTY_MAP was added in java 1.3 (EMPTY_SET and EMPTY_LIST
* is in java 1.2!)
*/
private static final Map EMPTY_MAP
= Collections.unmodifiableMap(new HashMap(0));
/**
* Helper instances we've already created (Class.getName() to IntrospectionHelper).
*/
private static final Map HELPERS = new Hashtable();
/**
* Map from primitive types to wrapper classes for use in
* createAttributeSetter (Class to Class). Note that char
* and boolean are in here even though they get special treatment
* - this way we only need to test for the wrapper class.
*/
private static final Map PRIMITIVE_TYPE_MAP = new HashMap(8);
// Set up PRIMITIVE_TYPE_MAP
static {
Class[] primitives = {Boolean.TYPE, Byte.TYPE, Character.TYPE,
Short.TYPE, Integer.TYPE, Long.TYPE,
Float.TYPE, Double.TYPE};
Class[] wrappers = {Boolean.class, Byte.class, Character.class,
Short.class, Integer.class, Long.class,
Float.class, Double.class};
for (int i = 0; i < primitives.length; i++) {
PRIMITIVE_TYPE_MAP.put (primitives[i], wrappers[i]);
}
}
private static final int MAX_REPORT_NESTED_TEXT = 20;
private static final String ELLIPSIS = "...";
/**
* Map from attribute names to attribute types
* (String to Class).
*/
private Hashtable attributeTypes = new Hashtable();
/**
* Map from attribute names to attribute setter methods
* (String to AttributeSetter).
*/
private Hashtable attributeSetters = new Hashtable();
/**
* Map from attribute names to nested types
* (String to Class).
*/
private Hashtable nestedTypes = new Hashtable();
/**
* Map from attribute names to methods to create nested types
* (String to NestedCreator).
*/
private Hashtable nestedCreators = new Hashtable();
/**
* Vector of methods matching add[Configured](Class) pattern.
*/
private List addTypeMethods = new ArrayList();
/**
* The method to invoke to add PCDATA.
*/
private Method addText = null;
/**
* The class introspected by this instance.
*/
private Class bean;
/**
* Sole constructor, which is private to ensure that all
* IntrospectionHelpers are created via {@link #getHelper(Class) getHelper}.
* Introspects the given class for bean-like methods.
* Each method is examined in turn, and the following rules are applied:
* <p>
* <ul>
* <li>If the method is <code>Task.setLocation(Location)</code>,
* <code>Task.setTaskType(String)</code>
* or <code>TaskContainer.addTask(Task)</code>, it is ignored. These
* methods are handled differently elsewhere.
* <li><code>void addText(String)</code> is recognised as the method for
* adding PCDATA to a bean.
* <li><code>void setFoo(Bar)</code> is recognised as a method for
* setting the value of attribute <code>foo</code>, so long as
* <code>Bar</code> is non-void and is not an array type. Non-String
* parameter types always overload String parameter types, but that is
* the only guarantee made in terms of priority.
* <li><code>Foo createBar()</code> is recognised as a method for
* creating a nested element called <code>bar</code> of type
* <code>Foo</code>, so long as <code>Foo</code> is not a primitive or
* array type.
* <li><code>void addConfiguredFoo(Bar)</code> is recognised as a
* method for storing a pre-configured element called
* <code>foo</code> and of type <code>Bar</code>, so long as
* <code>Bar</code> is not an array, primitive or String type.
* <code>Bar</code> must have an accessible constructor taking no
* arguments.
* <li><code>void addFoo(Bar)</code> is recognised as a method for storing
* an element called <code>foo</code> and of type <code>Bar</code>, so
* long as <code>Bar</code> is not an array, primitive or String type.
* <code>Bar</code> must have an accessible constructor taking no
* arguments. This is distinct from the 'addConfigured' idiom in that
* the nested element is added to the parent immediately after it is
* constructed; in practice this means that <code>addFoo(Bar)</code> should
* do little or nothing with its argument besides storing it for later use.
* </ul>
* Note that only one method is retained to create/set/addConfigured/add
* any element or attribute.
*
* @param bean The bean type to introspect.
* Must not be <code>null</code>.
*
* @see #getHelper(Class)
*/
private IntrospectionHelper(final Class bean) {
this.bean = bean;
Method[] methods = bean.getMethods();
for (int i = 0; i < methods.length; i++) {
final Method m = methods[i];
final String name = m.getName();
Class returnType = m.getReturnType();
Class[] args = m.getParameterTypes();
// check of add[Configured](Class) pattern
if (args.length == 1 && java.lang.Void.TYPE.equals(returnType)
&& ("add".equals(name) || "addConfigured".equals(name))) {
insertAddTypeMethod(m);
continue;
}
// not really user settable properties on tasks/project components
if (org.apache.tools.ant.ProjectComponent.class.isAssignableFrom(
bean)
&& args.length == 1 && isHiddenSetMethod(name, args[0])) {
continue;
}
// hide addTask for TaskContainers
if (isContainer() && args.length == 1 && "addTask".equals(name)
&& org.apache.tools.ant.Task.class.equals(args[0])) {
continue;
}
if ("addText".equals(name) && java.lang.Void.TYPE.equals(returnType)
&& args.length == 1 && java.lang.String.class.equals(args[0])) {
addText = methods[i];
} else if (name.startsWith("set")
&& java.lang.Void.TYPE.equals(returnType)
&& args.length == 1 && !args[0].isArray()) {
String propName = getPropertyName(name, "set");
if (attributeSetters.get(propName) != null) {
if (java.lang.String.class.equals(args[0])) {
/*
Ignore method m, as there is an overloaded
form of this method that takes in a
non-string argument, which gains higher
priority.
*/
continue;
}
/*
If the argument is not a String or Location,
and if there
is an overloaded form of this method already defined,
we just override that with the new one.
This mechanism does not guarantee any specific order
in which the methods will be selected: so any code
that depends on the order in which "set" methods have
been defined, is not guaranteed to be selected in any
particular order.
*/
}
AttributeSetter as = createAttributeSetter(m, args[0], propName);
if (as != null) {
attributeTypes.put(propName, args[0]);
attributeSetters.put(propName, as);
}
} else if (name.startsWith("create") && !returnType.isArray()
&& !returnType.isPrimitive() && args.length == 0) {
String propName = getPropertyName(name, "create");
// Check if a create of this property is already present
// add takes preference over create for CB purposes
if (nestedCreators.get(propName) == null) {
nestedTypes.put(propName, returnType);
nestedCreators.put(propName, new CreateNestedCreator(m));
}
} else if (name.startsWith("addConfigured")
&& java.lang.Void.TYPE.equals(returnType) && args.length == 1
&& !java.lang.String.class.equals(args[0])
&& !args[0].isArray() && !args[0].isPrimitive()) {
try {
Constructor constructor = null;
try {
constructor = args[0].getConstructor(new Class[] {});
} catch (NoSuchMethodException ex) {
constructor =
args[0].getConstructor(new Class[] {Project.class});
}
String propName = getPropertyName(name, "addConfigured");
nestedTypes.put(propName, args[0]);
nestedCreators.put(propName, new AddNestedCreator(m,
constructor, AddNestedCreator.ADD_CONFIGURED));
} catch (NoSuchMethodException nse) {
// ignore
}
} else if (name.startsWith("add")
&& java.lang.Void.TYPE.equals(returnType) && args.length == 1
&& !java.lang.String.class.equals(args[0])
&& !args[0].isArray() && !args[0].isPrimitive()) {
try {
Constructor constructor = null;
try {
constructor = args[0].getConstructor(new Class[] {});
} catch (NoSuchMethodException ex) {
constructor =
args[0].getConstructor(new Class[] {Project.class});
}
String propName = getPropertyName(name, "add");
if (nestedTypes.get(propName) != null) {
/*
* Ignore this method as there is an addConfigured
* form of this method that has a higher
* priority
*/
continue;
}
nestedTypes.put(propName, args[0]);
nestedCreators.put(propName, new AddNestedCreator(m,
constructor, AddNestedCreator.ADD));
} catch (NoSuchMethodException nse) {
// ignore
}
}
}
}
/**
* Certain set methods are part of the Ant core interface to tasks and
* therefore not to be considered for introspection
*
* @param name the name of the set method
* @param type the type of the set method's parameter
* @return true if the given set method is to be hidden.
*/
private boolean isHiddenSetMethod(String name, Class type) {
if ("setLocation".equals(name)
&& org.apache.tools.ant.Location.class.equals(type)) {
return true;
}
if ("setTaskType".equals(name)
&& java.lang.String.class.equals(type)) {
return true;
}
return false;
}
/**
* Returns a helper for the given class, either from the cache
* or by creating a new instance.
*
* @param c The class for which a helper is required.
* Must not be <code>null</code>.
*
* @return a helper for the specified class
*/
public static synchronized IntrospectionHelper getHelper(Class c) {
return getHelper(null, c);
}
/**
* Returns a helper for the given class, either from the cache
* or by creating a new instance.
*
* The method will make sure the helper will be cleaned up at the end of
* the project, and only one instance will be created for each class.
*
* @param p the project instance.
* @param c The class for which a helper is required.
* Must not be <code>null</code>.
*
* @return a helper for the specified class
*/
public static IntrospectionHelper getHelper(Project p, Class c) {
IntrospectionHelper ih = (IntrospectionHelper) HELPERS.get(c.getName());
// If a helper cannot be found, or if the helper is for another
// classloader, create a new IH
if (ih == null || ih.bean != c) {
ih = new IntrospectionHelper(c);
if (p != null) {
// #30162: do *not* cache this if there is no project, as we
// cannot guarantee that the cache will be cleared.
HELPERS.put(c.getName(), ih);
}
}
return ih;
}
/**
* Sets the named attribute in the given element, which is part of the
* given project.
*
* @param p The project containing the element. This is used when files
* need to be resolved. Must not be <code>null</code>.
* @param element The element to set the attribute in. Must not be
* <code>null</code>.
* @param attributeName The name of the attribute to set. Must not be
* <code>null</code>.
* @param value The value to set the attribute to. This may be interpreted
* or converted to the necessary type if the setter method
* doesn't just take a string. Must not be <code>null</code>.
*
* @exception BuildException if the introspected class doesn't support
* the given attribute, or if the setting
* method fails.
*/
public void setAttribute(Project p, Object element, String attributeName,
String value) throws BuildException {
AttributeSetter as
= (AttributeSetter) attributeSetters.get(
attributeName.toLowerCase(Locale.US));
if (as == null) {
if (element instanceof DynamicAttributeNS) {
DynamicAttributeNS dc = (DynamicAttributeNS) element;
String uriPlusPrefix =
ProjectHelper.extractUriFromComponentName(attributeName);
String uri =
ProjectHelper.extractUriFromComponentName(uriPlusPrefix);
String localName =
ProjectHelper.extractNameFromComponentName(attributeName);
String qName = ("".equals(uri)
? localName : (uri + ":" + localName));
dc.setDynamicAttribute(uri, localName, qName, value);
return;
} else if (element instanceof DynamicAttribute) {
DynamicAttribute dc = (DynamicAttribute) element;
dc.setDynamicAttribute(attributeName.toLowerCase(Locale.US), value);
return;
} else {
if (attributeName.indexOf(':') != -1) {
return; // Ignore attribute from unknown uri's
}
String msg = getElementName(p, element)
+ " doesn't support the \"" + attributeName
+ "\" attribute.";
throw new UnsupportedAttributeException(msg, attributeName);
}
}
try {
as.set(p, element, value);
} catch (IllegalAccessException ie) {
// impossible as getMethods should only return public methods
throw new BuildException(ie);
} catch (InvocationTargetException ite) {
Throwable t = ite.getTargetException();
if (t instanceof BuildException) {
throw (BuildException) t;
}
throw new BuildException(t);
}
}
/**
* Adds PCDATA to an element, using the element's
* <code>void addText(String)</code> method, if it has one. If no
* such method is present, a BuildException is thrown if the
* given text contains non-whitespace.
*
* @param project The project which the element is part of.
* Must not be <code>null</code>.
* @param element The element to add the text to.
* Must not be <code>null</code>.
* @param text The text to add.
* Must not be <code>null</code>.
*
* @exception BuildException if non-whitespace text is provided and no
* method is available to handle it, or if
* the handling method fails.
*/
public void addText(Project project, Object element, String text)
throws BuildException {
if (addText == null) {
text = text.trim();
// Element doesn't handle text content
if (text.length() == 0) {
// Only whitespace - ignore
return;
} else {
// Not whitespace - fail
String msg = project.getElementName(element)
+ " doesn't support nested text data (\""
+ condenseText(text) + "\").";
throw new BuildException(msg);
}
}
try {
addText.invoke(element, new Object[] {text});
} catch (IllegalAccessException ie) {
// impossible as getMethods should only return public methods
throw new BuildException(ie);
} catch (InvocationTargetException ite) {
Throwable t = ite.getTargetException();
if (t instanceof BuildException) {
throw (BuildException) t;
}
throw new BuildException(t);
}
}
/**
* Utility method to throw a NotSupported exception
*
* @param project the Project instance.
* @param parent the object which doesn't support a requested element
* @param elementName the name of the Element which is trying to be created.
*/
public void throwNotSupported(Project project, Object parent,
String elementName) {
String msg = project.getElementName(parent)
+ " doesn't support the nested \"" + elementName + "\" element.";
throw new UnsupportedElementException(msg, elementName);
}
private NestedCreator getNestedCreator(
Project project, String parentUri, Object parent,
String elementName, UnknownElement child) throws BuildException {
String uri = ProjectHelper.extractUriFromComponentName(elementName);
String name = ProjectHelper.extractNameFromComponentName(elementName);
if (uri.equals(ProjectHelper.ANT_CORE_URI)) {
uri = "";
}
if (parentUri.equals(ProjectHelper.ANT_CORE_URI)) {
parentUri = "";
}
NestedCreator nc = null;
if (uri.equals(parentUri) || uri.equals("")) {
nc = (NestedCreator) nestedCreators.get(
name.toLowerCase(Locale.US));
}
if (nc == null) {
nc = createAddTypeCreator(project, parent, elementName);
}
if (nc == null && parent instanceof DynamicElementNS) {
DynamicElementNS dc = (DynamicElementNS) parent;
String qName = (child == null ? name : child.getQName());
final Object nestedElement =
dc.createDynamicElement(
(child == null ? "" : child.getNamespace()),
name, qName);
if (nestedElement != null) {
nc = new NestedCreator(null) {
Object create(
Project project, Object parent, Object ignore) {
return nestedElement;
}
};
}
}
if (nc == null && parent instanceof DynamicElement) {
DynamicElement dc = (DynamicElement) parent;
final Object nestedElement =
dc.createDynamicElement(name.toLowerCase(Locale.US));
if (nestedElement != null) {
nc = new NestedCreator(null) {
Object create(
Project project, Object parent, Object ignore) {
return nestedElement;
}
};
}
}
if (nc == null) {
throwNotSupported(project, parent, elementName);
}
return nc;
}
/**
* Creates a named nested element. Depending on the results of the
* initial introspection, either a method in the given parent instance
* or a simple no-arg constructor is used to create an instance of the
* specified element type.
*
* @param project Project to which the parent object belongs.
* Must not be <code>null</code>. If the resulting
* object is an instance of ProjectComponent, its
* Project reference is set to this parameter value.
* @param parent Parent object used to create the instance.
* Must not be <code>null</code>.
* @param elementName Name of the element to create an instance of.
* Must not be <code>null</code>.
*
* @return an instance of the specified element type
* @deprecated since 1.6.x.
* This is not a namespace aware method.
*
* @exception BuildException if no method is available to create the
* element instance, or if the creating method
* fails.
*/
public Object createElement(Project project, Object parent,
String elementName) throws BuildException {
NestedCreator nc = getNestedCreator(project, "", parent, elementName, null);
try {
Object nestedElement = nc.create(project, parent, null);
if (project != null) {
project.setProjectReference(nestedElement);
}
return nestedElement;
} catch (IllegalAccessException ie) {
// impossible as getMethods should only return public methods
throw new BuildException(ie);
} catch (InstantiationException ine) {
// impossible as getMethods should only return public methods
throw new BuildException(ine);
} catch (InvocationTargetException ite) {
Throwable t = ite.getTargetException();
if (t instanceof BuildException) {
throw (BuildException) t;
}
throw new BuildException(t);
}
}
/**
* returns an object that creates and stores an object
* for an element of a parent.
*
* @param project Project to which the parent object belongs.
* @param parentUri The namespace uri of the parent object.
* @param parent Parent object used to create the creator object to
* create and store and instance of a subelement.
* @param elementName Name of the element to create an instance of.
* @param ue The unknown element associated with the element.
* @return a creator object to create and store the element instance.
*/
public Creator getElementCreator(
Project project, String parentUri, Object parent, String elementName,
UnknownElement ue) {
NestedCreator nc = getNestedCreator(
project, parentUri, parent, elementName, ue);
return new Creator(project, parent, nc);
}
/**
* Indicates whether the introspected class is a dynamic one,
* supporting arbitrary nested elements and/or attributes.
*
* @return <code>true<code> if the introspected class is dynamic;
* <code>false<code> otherwise.
* @since Ant 1.6.3
*
* @see DynamicElement
* @see DynamicElementNS
*/
public boolean isDynamic() {
return DynamicElement.class.isAssignableFrom(bean)
|| DynamicElementNS.class.isAssignableFrom(bean);
}
/**
* Indicates whether the introspected class is a task container,
* supporting arbitrary nested tasks/types.
*
* @return <code>true<code> if the introspected class is a container;
* <code>false<code> otherwise.
* @since Ant 1.6.3
*
* @see TaskContainer
*/
public boolean isContainer() {
return TaskContainer.class.isAssignableFrom(bean);
}
/**
* Indicates if this element supports a nested element of the
* given name.
*
* @param elementName the name of the nested element being checked
*
* @return true if the given nested element is supported
*/
public boolean supportsNestedElement(String elementName) {
return nestedCreators.containsKey(elementName.toLowerCase(Locale.US))
|| isDynamic()
|| addTypeMethods.size() != 0;
}
/**
* Indicate if this element supports a nested element of the
* given name.
*
* @param parentUri the uri of the parent
* @param elementName the name of the nested element being checked
*
* @return true if the given nested element is supported
*/
public boolean supportsNestedElement(String parentUri, String elementName) {
if (parentUri.equals(ProjectHelper.ANT_CORE_URI)) {
parentUri = "";
}
String uri = ProjectHelper.extractUriFromComponentName(elementName);
if (uri.equals(ProjectHelper.ANT_CORE_URI)) {
uri = "";
}
String name = ProjectHelper.extractNameFromComponentName(elementName);
return (
nestedCreators.containsKey(name.toLowerCase(Locale.US))
&& (uri.equals(parentUri) || "".equals(uri)))
|| isDynamic() || addTypeMethods.size() != 0;
}
/**
* Stores a named nested element using a storage method determined
* by the initial introspection. If no appropriate storage method
* is available, this method returns immediately.
*
* @param project Ignored in this implementation.
* May be <code>null</code>.
*
* @param parent Parent instance to store the child in.
* Must not be <code>null</code>.
*
* @param child Child instance to store in the parent.
* Should not be <code>null</code>.
*
* @param elementName Name of the child element to store.
* May be <code>null</code>, in which case
* this method returns immediately.
*
* @exception BuildException if the storage method fails.
*/
public void storeElement(Project project, Object parent, Object child,
String elementName) throws BuildException {
if (elementName == null) {
return;
}
NestedCreator ns = (NestedCreator) nestedCreators.get(
elementName.toLowerCase(Locale.US));
if (ns == null) {
return;
}
try {
ns.store(parent, child);
} catch (IllegalAccessException ie) {
// impossible as getMethods should only return public methods
throw new BuildException(ie);
} catch (InstantiationException ine) {
// impossible as getMethods should only return public methods
throw new BuildException(ine);
} catch (InvocationTargetException ite) {
Throwable t = ite.getTargetException();
if (t instanceof BuildException) {
throw (BuildException) t;
}
throw new BuildException(t);
}
}
/**
* Returns the type of a named nested element.
*
* @param elementName The name of the element to find the type of.
* Must not be <code>null</code>.
*
* @return the type of the nested element with the specified name.
* This will never be <code>null</code>.
*
* @exception BuildException if the introspected class does not
* support the named nested element.
*/
public Class getElementType(String elementName)
throws BuildException {
Class nt = (Class) nestedTypes.get(elementName);
if (nt == null) {
throw new UnsupportedElementException("Class "
+ bean.getName() + " doesn't support the nested \""
+ elementName + "\" element.", elementName);
}
return nt;
}
/**
* Returns the type of a named attribute.
*
* @param attributeName The name of the attribute to find the type of.
* Must not be <code>null</code>.
*
* @return the type of the attribute with the specified name.
* This will never be <code>null</code>.
*
* @exception BuildException if the introspected class does not
* support the named attribute.
*/
public Class getAttributeType(String attributeName)
throws BuildException {
Class at = (Class) attributeTypes.get(attributeName);
if (at == null) {
throw new UnsupportedAttributeException("Class "
+ bean.getName() + " doesn't support the \""
+ attributeName + "\" attribute.", attributeName);
}
return at;
}
/**
* Returns the addText method when the introspected
* class supports nested text.
*
* @return the method on this introspected class that adds nested text.
* Cannot be <code>null</code>.
* @throws BuildException if the introspected class does not
* support the nested text.
* @since Ant 1.6.3
*/
public Method getAddTextMethod()
throws BuildException {
if (!supportsCharacters()) {
throw new BuildException("Class " + bean.getName()
+ " doesn't support nested text data.");
}
return addText;
}
/**
* Returns the adder or creator method of a named nested element.
*
* @param elementName The name of the attribute to find the setter
* method of. Must not be <code>null</code>.
* @return the method on this introspected class that adds or creates this
* nested element. Can be <code>null</code> when the introspected
* class is a dynamic configurator!
* @throws BuildException if the introspected class does not
* support the named nested element.
* @since Ant 1.6.3
*/
public Method getElementMethod(String elementName)
throws BuildException {
Object creator = nestedCreators.get(elementName);
if (creator == null) {
throw new UnsupportedElementException("Class "
+ bean.getName() + " doesn't support the nested \""
+ elementName + "\" element.", elementName);
}
return ((NestedCreator) creator).method;
}
/**
* Returns the setter method of a named attribute.
*
* @param attributeName The name of the attribute to find the setter
* method of. Must not be <code>null</code>.
* @return the method on this introspected class that sets this attribute.
* This will never be <code>null</code>.
* @throws BuildException if the introspected class does not
* support the named attribute.
* @since Ant 1.6.3
*/
public Method getAttributeMethod(String attributeName)
throws BuildException {
Object setter = attributeSetters.get(attributeName);
if (setter == null) {
throw new UnsupportedAttributeException("Class "
+ bean.getName() + " doesn't support the \""
+ attributeName + "\" attribute.", attributeName);
}
return ((AttributeSetter) setter).method;
}
/**
* Returns whether or not the introspected class supports PCDATA.
*
* @return whether or not the introspected class supports PCDATA.
*/
public boolean supportsCharacters() {
return addText != null;
}
/**
* Returns an enumeration of the names of the attributes supported
* by the introspected class.
*
* @return an enumeration of the names of the attributes supported
* by the introspected class.
* @see #getAttributeMap
*/
public Enumeration getAttributes() {
return attributeSetters.keys();
}
/**
* Returns a read-only map of attributes supported
* by the introspected class.
*
* @return an attribute name to attribute <code>Class</code>
* unmodifiable map. Can be empty, but never <code>null</code>.
* @since Ant 1.6.3
*/
public Map getAttributeMap() {
return (attributeTypes.size() < 1)
? EMPTY_MAP : Collections.unmodifiableMap(attributeTypes);
}
/**
* Returns an enumeration of the names of the nested elements supported
* by the introspected class.
*
* @return an enumeration of the names of the nested elements supported
* by the introspected class.
* @see #getNestedElementMap
*/
public Enumeration getNestedElements() {
return nestedTypes.keys();
}
/**
* Returns a read-only map of nested elements supported
* by the introspected class.
*
* @return a nested-element name to nested-element <code>Class</code>
* unmodifiable map. Can be empty, but never <code>null</code>.
* @since Ant 1.6.3
*/
public Map getNestedElementMap() {
return (nestedTypes.size() < 1)
? EMPTY_MAP : Collections.unmodifiableMap(nestedTypes);
}
/**
* Returns a read-only list of extension points supported
* by the introspected class.
* <p>
* A task/type or nested element with void methods named <code>add()<code>
* or <code>addConfigured()</code>, taking a single class or interface
* argument, supports extensions point. This method returns the list of
* all these <em>void add[Configured](type)</em> methods.
*
* @return a list of void, single argument add() or addConfigured()
* <code>Method<code>s of all supported extension points.
* These methods are sorted such that if the argument type of a
* method derives from another type also an argument of a method
* of this list, the method with the most derived argument will
* always appear first. Can be empty, but never <code>null</code>.
* @since Ant 1.6.3
*/
public List getExtensionPoints() {
return (addTypeMethods.size() < 1) ? Collections.EMPTY_LIST
: Collections.unmodifiableList(addTypeMethods);
}
/**
* Creates an implementation of AttributeSetter for the given
* attribute type. Conversions (where necessary) are automatically
* made for the following types:
* <ul>
* <li>String (left as it is)
* <li>Character/char (first character is used)
* <li>Boolean/boolean
* ({@link Project#toBoolean(String) Project.toBoolean(String)} is used)
* <li>Class (Class.forName is used)
* <li>File (resolved relative to the appropriate project)
* <li>Path (resolve relative to the appropriate project)
* <li>EnumeratedAttribute (uses its own
* {@link EnumeratedAttribute#setValue(String) setValue} method)
* <li>Other primitive types (wrapper classes are used with constructors
* taking String)
* </ul>
*
* If none of the above covers the given parameters, a constructor for the
* appropriate class taking a String parameter is used if it is available.
*
* @param m The method to invoke on the bean when the setter is invoked.
* Must not be <code>null</code>.
* @param arg The type of the single argument of the bean's method.
* Must not be <code>null</code>.
* @param attrName the name of the attribute for which the setter is being
* created.
*
* @return an appropriate AttributeSetter instance, or <code>null</code>
* if no appropriate conversion is available.
*/
private AttributeSetter createAttributeSetter(final Method m,
Class arg,
final String attrName) {
// use wrappers for primitive classes, e.g. int and
// Integer are treated identically
final Class reflectedArg = PRIMITIVE_TYPE_MAP.containsKey(arg)
? (Class) PRIMITIVE_TYPE_MAP.get(arg) : arg;
// simplest case - setAttribute expects String
if (java.lang.String.class.equals(reflectedArg)) {
return new AttributeSetter(m) {
public void set(Project p, Object parent, String value)
throws InvocationTargetException, IllegalAccessException {
m.invoke(parent, (Object[]) (new String[] {value}));
}
};
// char and Character get special treatment - take the first character
} else if (java.lang.Character.class.equals(reflectedArg)) {
return new AttributeSetter(m) {
public void set(Project p, Object parent, String value)
throws InvocationTargetException, IllegalAccessException {
if (value.length() == 0) {
throw new BuildException("The value \"\" is not a "
+ "legal value for attribute \"" + attrName + "\"");
}
m.invoke(parent, (Object[])
(new Character[] {new Character(value.charAt(0))}));
}
};
// boolean and Boolean get special treatment because we
// have a nice method in Project
} else if (java.lang.Boolean.class.equals(reflectedArg)) {
return new AttributeSetter(m) {
public void set(Project p, Object parent, String value)
throws InvocationTargetException, IllegalAccessException {
m.invoke(parent, (Object[]) (
new Boolean[] {Project.toBoolean(value)
? Boolean.TRUE : Boolean.FALSE}));
}
};
// Class doesn't have a String constructor but a decent factory method
} else if (java.lang.Class.class.equals(reflectedArg)) {
return new AttributeSetter(m) {
public void set(Project p, Object parent, String value)
throws InvocationTargetException, IllegalAccessException, BuildException {
try {
m.invoke(parent, new Object[] {Class.forName(value)});
} catch (ClassNotFoundException ce) {
throw new BuildException(ce);
}
}
};
// resolve relative paths through Project
} else if (java.io.File.class.equals(reflectedArg)) {
return new AttributeSetter(m) {
public void set(Project p, Object parent, String value)
throws InvocationTargetException, IllegalAccessException {
m.invoke(parent, new Object[] {p.resolveFile(value)});
}
};
// EnumeratedAttributes have their own helper class
} else if (EnumeratedAttribute.class.isAssignableFrom(reflectedArg)) {
return new AttributeSetter(m) {
public void set(Project p, Object parent, String value)
throws InvocationTargetException, IllegalAccessException, BuildException {
try {
EnumeratedAttribute ea =
(EnumeratedAttribute) reflectedArg.newInstance();
ea.setValue(value);
m.invoke(parent, new Object[] {ea});
} catch (InstantiationException ie) {
throw new BuildException(ie);
}
}
};
// worst case. look for a public String constructor and use it
// also supports new Whatever(Project, String) as for Path or Reference
// This is used (deliberately) for all primitives/wrappers other than
// char and boolean
} else {
boolean includeProject;
Constructor c;
try {
// First try with Project.
c = reflectedArg.getConstructor(new Class[] {Project.class, String.class});
includeProject = true;
} catch (NoSuchMethodException nme) {
// OK, try without.
try {
c = reflectedArg.getConstructor(new Class[] {String.class});
includeProject = false;
} catch (NoSuchMethodException nme2) {
// Well, no matching constructor.
return null;
}
}
final boolean finalIncludeProject = includeProject;
final Constructor finalConstructor = c;
return new AttributeSetter(m) {
public void set(Project p, Object parent, String value)
throws InvocationTargetException, IllegalAccessException, BuildException {
try {
Object[] args = (finalIncludeProject)
? new Object[] {p, value} : new Object[] {value};
Object attribute = finalConstructor.newInstance(args);
if (p != null) {
p.setProjectReference(attribute);
}
m.invoke(parent, new Object[] {attribute});
} catch (InstantiationException ie) {
throw new BuildException(ie);
}
}
};
}
}
/**
* Returns a description of the type of the given element in
* relation to a given project. This is used for logging purposes
* when the element is asked to cope with some data it has no
* way of handling.
*
* @param project The project the element is defined in.
* Must not be <code>null</code>.
*
* @param element The element to describe.
* Must not be <code>null</code>.
*
* @return a description of the element type
*/
protected String getElementName(Project project, Object element) {
return project.getElementName(element);
}
/**
* Extracts the name of a property from a method name by subtracting
* a given prefix and converting into lower case. It is up to calling
* code to make sure the method name does actually begin with the
* specified prefix - no checking is done in this method.
*
* @param methodName The name of the method in question.
* Must not be <code>null</code>.
* @param prefix The prefix to remove.
* Must not be <code>null</code>.
*
* @return the lower-cased method name with the prefix removed.
*/
private String getPropertyName(String methodName, String prefix) {
return methodName.substring(prefix.length()).toLowerCase(Locale.US);
}
/**
* creator - allows use of create/store external
* to IntrospectionHelper.
* The class is final as it has a private constructor.
*/
public static final class Creator {
private NestedCreator nestedCreator;
private Object parent;
private Project project;
private Object nestedObject;
private String polyType;
/**
* Creates a new Creator instance.
* This object is given to the UnknownElement to create
* objects for sub-elements. UnknownElement calls
* create to create an object, the object then gets
* configured and then UnknownElement calls store.
* SetPolyType may be used to override the type used
* to create the object with. SetPolyType gets called
* before create.
*
* @param project the current project
* @param parent the parent object to create the object in
* @param nestedCreator the nested creator object to use
*/
private Creator(
Project project, Object parent, NestedCreator nestedCreator) {
this.project = project;
this.parent = parent;
this.nestedCreator = nestedCreator;
}
/**
* Used to override the class used to create the object.
*
* @param polyType a ant component type name
*/
public void setPolyType(String polyType) {
this.polyType = polyType;
}
/**
* Create an object using this creator, which is determined
* by introspection.
*
* @return the created object
*/
public Object create() {
if (polyType != null) {
if (!nestedCreator.isPolyMorphic()) {
throw new BuildException(
"Not allowed to use the polymorphic form"
+ " for this element");
}
ComponentHelper helper =
ComponentHelper.getComponentHelper(project);
nestedObject = helper.createComponent(polyType);
if (nestedObject == null) {
throw new BuildException(
"Unable to create object of type " + polyType);
}
}
try {
nestedObject = nestedCreator.create(
project, parent, nestedObject);
if (project != null) {
project.setProjectReference(nestedObject);
}
return nestedObject;
} catch (IllegalAccessException ex) {
throw new BuildException(ex);
} catch (InstantiationException ex) {
throw new BuildException(ex);
} catch (IllegalArgumentException ex) {
if (polyType != null) {
throw new BuildException(
"Invalid type used " + polyType);
}
throw ex;
} catch (InvocationTargetException ex) {
Throwable t = ex.getTargetException();
if (t instanceof BuildException) {
throw (BuildException) t;
}
throw new BuildException(t);
}
}
/**
* @return the real object (used currently only
* for preset def).
*/
public Object getRealObject() {
return nestedCreator.getRealObject();
}
/**
* Stores the nested element object using a storage method
* determined by introspection.
*
*/
public void store() {
try {
nestedCreator.store(parent, nestedObject);
} catch (IllegalAccessException ex) {
throw new BuildException(ex);
} catch (InstantiationException ex) {
throw new BuildException(ex);
} catch (IllegalArgumentException ex) {
if (polyType != null) {
throw new BuildException(
"Invalid type used " + polyType);
}
throw ex;
} catch (InvocationTargetException ex) {
Throwable t = ex.getTargetException();
if (t instanceof BuildException) {
throw (BuildException) t;
}
throw new BuildException(t);
}
}
}
/**
* Internal interface used to create nested elements. Not documented
* in detail for reasons of source code readability.
*/
private abstract static class NestedCreator {
Method method; // the method called to add/create the nested element
NestedCreator(Method m) {
this.method = m;
}
boolean isPolyMorphic() {
return false;
}
Object getRealObject() {
return null;
}
abstract Object create(Project project, Object parent, Object child)
throws InvocationTargetException,
IllegalAccessException,
InstantiationException;
void store(Object parent, Object child)
throws InvocationTargetException,
IllegalAccessException,
InstantiationException {
// DO NOTHING
}
}
private class CreateNestedCreator extends NestedCreator {
CreateNestedCreator(Method m) {
super(m);
}
Object create(Project project, Object parent, Object ignore)
throws InvocationTargetException, IllegalAccessException {
return method.invoke(parent, new Object[] {});
}
}
/** Version to use for addXXX and addConfiguredXXX */
private class AddNestedCreator extends NestedCreator {
static final int ADD = 1;
static final int ADD_CONFIGURED = 2;
protected Constructor constructor;
protected int behavior;
AddNestedCreator(Method m, Constructor c, int behavior) {
super(m);
this.constructor = c;
this.behavior = behavior;
}
boolean isPolyMorphic() {
return true;
}
Object create(Project project, Object parent, Object child)
throws InvocationTargetException,
IllegalAccessException, InstantiationException {
if (child == null) {
child = constructor.newInstance(
(constructor.getParameterTypes().length == 0)
? new Object[] {} : new Object[] {project});
}
if (child instanceof PreSetDef.PreSetDefinition) {
child = ((PreSetDef.PreSetDefinition) child)
.createObject(project);
}
if (behavior == ADD) {
istore(parent, child);
}
return child;
}
void store(Object parent, Object child)
throws InvocationTargetException,
IllegalAccessException, InstantiationException {
if (behavior == ADD_CONFIGURED) {
istore(parent, child);
}
}
private void istore(Object parent, Object child)
throws InvocationTargetException,
IllegalAccessException, InstantiationException {
method.invoke(parent, new Object[] {child});
}
}
/**
* Internal interface used to setting element attributes. Not documented
* in detail for reasons of source code readability.
*/
private abstract static class AttributeSetter {
Method method; // the method called to set the attribute
AttributeSetter(Method m) {
this.method = m;
}
abstract void set(Project p, Object parent, String value)
throws InvocationTargetException,
IllegalAccessException,
BuildException;
}
/**
* Clears the static cache of on build finished.
*/
public static void clearCache() {
HELPERS.clear();
}
/**
*
*/
private NestedCreator createAddTypeCreator(
Project project, Object parent, String elementName)
throws BuildException {
if (addTypeMethods.size() == 0) {
return null;
}
ComponentHelper helper = ComponentHelper.getComponentHelper(project);
Object addedObject = null;
Method addMethod = null;
Class clazz = helper.getComponentClass(elementName);
if (clazz == null) {
return null;
}
addMethod = findMatchingMethod(clazz, addTypeMethods);
if (addMethod == null) {
return null;
}
addedObject = helper.createComponent(elementName);
if (addedObject == null) {
return null;
}
Object rObject = addedObject;
if (addedObject instanceof PreSetDef.PreSetDefinition) {
rObject = ((PreSetDef.PreSetDefinition) addedObject).createObject(
project);
}
final Object nestedObject = addedObject;
final Object realObject = rObject;
return new NestedCreator(addMethod) {
Object create(Project project, Object parent, Object ignore)
throws InvocationTargetException, IllegalAccessException {
if (!method.getName().endsWith("Configured")) {
method.invoke(parent, new Object[] {realObject});
}
return nestedObject;
}
Object getRealObject() {
return realObject;
}
void store(Object parent, Object child)
throws InvocationTargetException, IllegalAccessException,
InstantiationException {
if (method.getName().endsWith("Configured")) {
method.invoke(parent, new Object[] {realObject});
}
}
};
}
/**
* Inserts an add or addConfigured method into
* the addTypeMethods array. The array is
* ordered so that the more derived classes
* are first.
* If both add and addConfigured are present, the addConfigured
* will take priority.
* @param method the <code>Method</code> to insert.
*/
private void insertAddTypeMethod(Method method) {
Class argClass = method.getParameterTypes()[0];
for (int c = 0; c < addTypeMethods.size(); ++c) {
Method current = (Method) addTypeMethods.get(c);
if (current.getParameterTypes()[0].equals(argClass)) {
if (method.getName().equals("addConfigured")) {
// add configured replaces the add method
addTypeMethods.set(c, method);
}
return; // Already present
}
if (current.getParameterTypes()[0].isAssignableFrom(
argClass)) {
addTypeMethods.add(c, method);
return; // higher derived
}
}
addTypeMethods.add(method);
}
/**
* Search the list of methods to find the first method
* that has a parameter that accepts the nested element object.
* @param paramClass the <code>Class</code> type to search for.
* @param methods the <code>List</code> of methods to search.
* @return a matching <code>Method</code>; null if none found.
*/
private Method findMatchingMethod(Class paramClass, List methods) {
Class matchedClass = null;
Method matchedMethod = null;
for (int i = 0; i < methods.size(); ++i) {
Method method = (Method) methods.get(i);
Class methodClass = method.getParameterTypes()[0];
if (methodClass.isAssignableFrom(paramClass)) {
if (matchedClass == null) {
matchedClass = methodClass;
matchedMethod = method;
} else {
if (!methodClass.isAssignableFrom(matchedClass)) {
throw new BuildException("ambiguous: types "
+ matchedClass.getName() + " and "
+ methodClass.getName() + " match "
+ paramClass.getName());
}
}
}
}
return matchedMethod;
}
private String condenseText(final String text) {
if (text.length() <= MAX_REPORT_NESTED_TEXT) {
return text;
}
int ends = (MAX_REPORT_NESTED_TEXT - ELLIPSIS.length()) / 2;
return new StringBuffer(text).replace(ends, text.length() - ends,
ELLIPSIS).toString();
}
}
| apache-2.0 |
prady00/Laravel-Swagger-REST | vendor/jlapp/swaggervel/src/Jlapp/Swaggervel/routes.php | 3587 | <?php
use Swagger\Swagger;
Route::any(Config::get('swaggervel::app.doc-route').'/{page?}', function($page='api-docs.json') {
$filePath = Config::get('swaggervel::app.doc-dir') . "/{$page}";
if (File::extension($filePath) === "") {
$filePath .= ".json";
}
if (!File::Exists($filePath)) {
App::abort(404, "Cannot find {$filePath}");
}
$content = File::get($filePath);
return Response::make($content, 200, array(
'Content-Type' => 'application/json'
));
});
Route::get('api-docs', function() {
if (Config::get('swaggervel::app.generateAlways')) {
$appDir = base_path()."/".Config::get('swaggervel::app.app-dir');
$docDir = Config::get('swaggervel::app.doc-dir');
if (!File::exists($docDir) || is_writable($docDir)) {
// delete all existing documentation
if (File::exists($docDir)) {
File::deleteDirectory($docDir);
}
File::makeDirectory($docDir);
$defaultBasePath = Config::get('swaggervel::app.default-base-path');
$defaultApiVersion = Config::get('swaggervel::app.default-api-version');
$defaultSwaggerVersion = Config::get('swaggervel::app.default-swagger-version');
$excludeDirs = Config::get('swaggervel::app.excludes');
$swagger = new Swagger($appDir, $excludeDirs);
$resourceList = $swagger->getResourceList(array(
'output' => 'array',
'apiVersion' => $defaultApiVersion,
'swaggerVersion' => $defaultSwaggerVersion,
));
$resourceOptions = array(
'output' => 'json',
'defaultSwaggerVersion' => $resourceList['swaggerVersion'],
'defaultBasePath' => $defaultBasePath
);
$output = array();
foreach ($swagger->getResourceNames() as $resourceName) {
$json = $swagger->getResource($resourceName, $resourceOptions);
$resourceName = str_replace(DIRECTORY_SEPARATOR, '-', ltrim($resourceName, DIRECTORY_SEPARATOR));
$output[$resourceName] = $json;
}
$filename = $docDir . '/api-docs.json';
file_put_contents($filename, Swagger::jsonEncode($resourceList, true));
foreach ($output as $name => $json) {
$name = str_replace(DIRECTORY_SEPARATOR, '-', ltrim($name, DIRECTORY_SEPARATOR));
$filename = $docDir . '/'.$name . '.json';
file_put_contents($filename, $json);
}
}
}
if (Config::get('swaggervel::app.behind-reverse-proxy')) {
$proxy = Request::server('REMOTE_ADDR');
Request::setTrustedProxies(array($proxy));
}
Blade::setEscapedContentTags('{{{', '}}}');
Blade::setContentTags('{{', '}}');
//need the / at the end to avoid CORS errors on Homestead systems.
$response = Response::make(
View::make('swaggervel::index', array(
'secure' => Request::secure(),
'urlToDocs' => url(Config::get('swaggervel::app.doc-route')),
'requestHeaders' => Config::get('swaggervel::app.requestHeaders') )
),
200
);
if (Config::has('swaggervel::app.viewHeaders')) {
foreach (Config::get('swaggervel::app.viewHeaders') as $key => $value) {
$response->header($key, $value);
}
}
return $response;
});
| apache-2.0 |
JNOSQL/artemis | artemis-document/src/main/java/org/jnosql/artemis/document/DocumentRepositoryAsyncSupplier.java | 980 | /*
* Copyright (c) 2019 Otávio Santana and others
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
* The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html
* and the Apache License v2.0 is available at http://www.opensource.org/licenses/apache2.0.php.
*
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
*
* Otavio Santana
*/
package org.jnosql.artemis.document;
import org.jnosql.artemis.RepositoryAsync;
import java.util.function.Supplier;
/**
* A supplier to {@link RepositoryAsync} that implements document, that injects from {@link org.jnosql.artemis.ConfigurationUnit}
*
* @param <R> a repository type
*/
public interface DocumentRepositoryAsyncSupplier<R extends RepositoryAsync<?, ?>> extends Supplier<R> {
}
| apache-2.0 |
phosphene/rails4.x-test-demo | spec/support/devise.rb | 135 | RSpec.configure do |config|
config.include Devise::TestHelpers, :type => :controller
config.use_transactional_fixtures = false
end
| apache-2.0 |
axbaretto/beam | bistro/remote/RemoteWorkers.cpp | 31148 | /*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
#include "bistro/bistro/remote/RemoteWorkers.h"
#include <thrift/lib/cpp2/protocol/DebugProtocol.h>
#include "bistro/bistro/if/gen-cpp2/common_types_custom_protocol.h"
#include "bistro/bistro/remote/RemoteWorker.h"
#include "bistro/bistro/remote/RemoteWorkerUpdate.h"
#include "bistro/bistro/remote/WorkerSetID.h"
#include "bistro/bistro/utils/Exception.h"
DEFINE_int32(
CAUTION_startup_wait_for_workers, -1,
"At startup, the scheduler has to wait for workers to connect, so that "
"we do not accidentally re-start tasks that are already running elsewhere. "
"The default of -1 computes a 'minimum safe wait' from your healthcheck "
"and worker loss timeouts. DANGER 1: If you reduce these timeouts from one "
"scheduler run to the next, the new default ('safe') wait may not be long "
"enough, so either increase this when reducing timeouts, or take down "
"all your workers. DANGER 2: Do not use an 'unsafe' value. Imagine a "
"network partition (e.g. your discovery service is down), which makes some "
"workers unable to connect longer than your unsafe initial wait. Then, "
"it's a virual certainty that the scheduler will at some point be (a) out "
"of initial wait, (b) will not have all of the workers that have running "
"tasks -- and therefore it will double-start some already-running tasks. "
"NOTE 1: As a precaution, the scheduler will not exit initial wait while "
"it has 'new' workers that have not yet replied with their running tasks. "
"NOTE 2: Once the initial wait time passes, the scheduler becomes "
"eligible to exit initial wait, even if the healthy workers's worker set "
"consensus does not perfectly match our non-MUST_DIE worker set -- at the "
"time of writing, I see no benefit to waiting longer. For more detail, "
"see README.worker_set_consensus."
);
DEFINE_int32(
CAUTION_worker_suicide_backoff_safety_margin_sec, 60,
"When a worker commits suicide, it will TERM-wait-KILLs its tasks. Since "
"task termination is not instant, the scheduler needs a safety margin to "
"add into its 'safe initial wait' calculation, and also into the "
"'minimum backoff for tasks of lost workers'. This margin must also "
"indirectly account for the fact that Bistro's workers poll running tasks "
"tasks only once every 'poll_ms' -- so keep this margin large."
);
DEFINE_int32(
CAUTION_worker_suicide_task_kill_wait_ms, 5000,
"The scheduler (NOT the worker -- the worker ignores this flag) includes "
"this value with every task it starts. When, in the future, the worker "
"commits suicide (i.e. due to a too-long network partition, or a scheduler "
"request), it will SIGTERM the task, wait this long, and then SIGKILL. "
"The scheduler needs this value to add into its 'safe initial wait' "
"calculation, and into the 'minimum backoff for tasks of lost workers'. "
"CAUTION: Therefore, if you lower this delay on a scheduler with running "
"tasks, you will significantly increase the risk that the scheduler "
"will start a second copy of a task before the previous one has exited."
);
namespace facebook { namespace bistro {
using namespace std;
using apache::thrift::debugString;
namespace {
template <typename... Args>
bool consensusFail(const RemoteWorker& w, Args&&... args) {
if (!w.hasBeenHealthy()) {
LOG(WARNING) << "Worker " << w.getBistroWorker().shard
<< " is not yet able to achieve consensus: "
<< folly::to<std::string>(std::forward<Args>(args)...);
}
return false;
}
} // anonymous namespace
// A section of remote/README.worker_set_consensus documents this call.
bool RemoteWorkers::consensusPermitsBecomingHealthy(const RemoteWorker& w)
const {
CHECK_EQ(
nonMustDieWorkerSetID_.hash.numWorkers, initialWorkerSetIDs_.size()
);
CHECK_GE(
initialWorkerSetIDs_.size(), indirectVersionsOfNonMustDieWorkers_.size()
);
// Is this worker aware of every non-MUST_DIE worker?
if (!w.workerSetID().has_value()) {
return consensusFail(w, "It has no WorkerSetID");
}
// Future: think if this can be relaxed to test indirectWorkerSetID().
if (*w.workerSetID() != nonMustDieWorkerSetID_) {
return consensusFail(
w, "It has WorkerSetID ", debugString(*w.workerSetID()), " while "
"the scheduler has ", debugString(nonMustDieWorkerSetID_)
);
}
// Does every non-MUST_DIE worker have an indirect version? -- we must
// test this, since the version is set only after a worker first echoes
// this scheduler's WorkerSetID. If any worker lacks it, we don't have
// a safe consensus -- that worker might declare consensus with itself.
if (indirectVersionsOfNonMustDieWorkers_.size()
!= nonMustDieWorkerSetID_.hash.numWorkers) {
return consensusFail(
w, "Scheduler has ", indirectVersionsOfNonMustDieWorkers_.size(),
" non-MUST_DIE workers with indirect versions versus ",
nonMustDieWorkerSetID_.hash.numWorkers, " non-MUST_DIE workers"
);
}
// Does each non-MUST_DIE worker's WorkerSetID indirectly require this
// worker? "first associated" == "first WorkerSetID containing this
// worker", since RemoteWorkers::processHeartbeat guarantees it.
if (!w.firstAssociatedWorkerSetID().has_value()) {
return consensusFail(
w, "It has not yet echoed any WorkerSetID from the scheduler"
);
}
if (WorkerSetIDEarlierThan()(
indirectVersionsOfNonMustDieWorkers_.begin()->first,
w.firstAssociatedWorkerSetID()->version
)) {
return consensusFail(
w, "It first appeared in the following WorkerSetID: ",
w.firstAssociatedWorkerSetID()->version, " but an earlier one "
"is indirectly required by all non-MUST_DIE workers: ",
indirectVersionsOfNonMustDieWorkers_.begin()->first
);
}
if (!w.hasBeenHealthy()) {
LOG(INFO) << "Worker " << w.getBistroWorker().shard << " has not been "
<< "healthy, but WorkerSetID consensus allows it.";
}
return true;
}
namespace {
void addToVersionShardSet(
RemoteWorkers::VersionShardSet* vss,
const cpp2::WorkerSetID& id,
const std::string& shard) {
CHECK(vss->emplace(id.version, shard).second)
<< "Duplicate: v" << id.version << " in shard " << shard;
}
void removeFromVersionShardSet(
RemoteWorkers::VersionShardSet* vss,
const cpp2::WorkerSetID& id,
const std::string& shard) {
CHECK_NE(0, vss->erase({id.version, shard}))
<< "Not found: v" << id.version << " in shard " << shard;
}
} // anonymous namespace
void RemoteWorkers::updateIndirectWorkerSetVersion(
RemoteWorker* w,
const cpp2::WorkerSetID& new_id) {
auto& maybe_id = w->indirectWorkerSetID();
CHECK(!maybe_id.has_value() || maybe_id->schedulerID == schedulerID_);
if (maybe_id.has_value() &&
!WorkerSetIDEarlierThan()(maybe_id->version, new_id.version)) {
return; // Nothing to update
}
const auto& bw = w->getBistroWorker();
if (maybe_id.has_value()) {
// Remove the current denormalized entry, since the version has changed.
removeFromVersionShardSet(
&indirectVersionsOfNonMustDieWorkers_, *maybe_id, bw.shard
);
}
// Denormalize w's indirect version for quickly finding the smallest one.
addToVersionShardSet(
&indirectVersionsOfNonMustDieWorkers_, new_id, bw.shard
);
// Update the RemoteWorker.
maybe_id = new_id;
}
folly::Optional<cpp2::SchedulerHeartbeatResponse>
RemoteWorkers::processHeartbeat(
RemoteWorkerUpdate* update,
const cpp2::BistroWorker& worker,
const cpp2::WorkerSetID& worker_set_id) {
// It's best not to add the bad worker to the pool, so check outside of
// RemoteWorker. At present, we do not tell that worker to commit
// suicide, so version mismatches will cause lots of logspam.
enforceWorkerSchedulerProtocolVersion(
worker.protocolVersion, cpp2::common_constants::kProtocolVersion()
);
const auto& shard = worker.shard;
auto worker_it = workerPool_.find(shard);
if (worker_it == workerPool_.end()) {
// Even though we're adding another element, the "nextShard_"
// round-robin iterators need not be updated, since new elements should
// be distributed randomly throughout the hash tables.
auto res = workerPool_.emplace(shard, std::make_shared<RemoteWorker>(
update->curTime(),
worker,
worker_set_id,
schedulerID_,
// The following 3 callbacks maintain WorkerSetID-related state.
//
// New worker -- first heartbeat from this instance ID.
[this](const RemoteWorker& w) {
initialWorkerSetIDs_.emplace(w.initialWorkerSetID());
// Cannot update indirectVersionsOfNonMustDieWorkers_ until
// w.workerSetID() gets set -- wait for the worker to echo it.
CHECK(!w.workerSetID().has_value());
CHECK(!w.indirectWorkerSetID().has_value());
CHECK(!w.firstAssociatedWorkerSetID().has_value());
// Add the new worker to the non-MUST_DIE set. This would cause a
// consensus to emerge while some workers are in the NEW state, but
// that's not a problem because updateInitialWait() explicitly
// prohibits leaving initial wait while any workers are NEW.
const auto& bw = w.getBistroWorker();
addWorkerIDToHash(&nonMustDieWorkerSetID_.hash, bw.id);
++nonMustDieWorkerSetID_.version;
// Update history_ with this new set.
HistoryStep hist_step;
hist_step.added.emplace(bw.shard);
CHECK(history_.emplace(
nonMustDieWorkerSetID_.version, std::move(hist_step)
).second);
},
// Dead worker: either became MUST_DIE or got bumped by another worker.
[this](const RemoteWorker& w) {
auto ws_it = initialWorkerSetIDs_.find(w.initialWorkerSetID());
CHECK(ws_it != initialWorkerSetIDs_.end());
initialWorkerSetIDs_.erase(ws_it);
const auto& bw = w.getBistroWorker();
// If `w` had an indirect version, delete its denormalization.
if (auto id_ptr = w.indirectWorkerSetID().get_pointer()) {
CHECK(id_ptr->schedulerID == schedulerID_);
removeFromVersionShardSet(
&indirectVersionsOfNonMustDieWorkers_, *id_ptr, bw.shard
);
}
// Remove the dead worker from the non-MUST_DIE set, update history_.
removeWorkerIDFromHash(&nonMustDieWorkerSetID_.hash, bw.id);
++nonMustDieWorkerSetID_.version;
HistoryStep hist_step;
hist_step.removed = bw.shard;
CHECK(history_.emplace(
nonMustDieWorkerSetID_.version, std::move(hist_step)
).second);
},
// A worker's WorkerSetID is getting a new version (a new echo arrived).
[this](RemoteWorker& w, const cpp2::WorkerSetID& w_set_id) {
// Guaranteed by RemoteWorker
CHECK(schedulerID_ == w_set_id.schedulerID)
<< debugString(schedulerID_) << " != "
<< debugString(w_set_id.schedulerID);
// Update indirectVersionsOfNonMustDieWorkers_ and
// w.indirectWorkerSetID_, if this update changes this worker's
// indirect version.
updateIndirectWorkerSetVersion(&w, w_set_id);
// No need to update initialWorkerSetIDs_ or nonMustDieWorkerSetID_
// or history_, since the scheduler's worker set has not changed.
}
));
// Add the same pointer to the right host worker pool
CHECK(mutableHostWorkerPool(worker.machineLock.hostname).emplace(
shard, res.first->second
).second) << "Worker pool for hostname " << worker.machineLock.hostname
<< " already had " << " shard " << shard;
worker_it = res.first;
}
// If the hostname changes, we will move the worker to the new host pool
const auto old_hostname =
worker_it->second->getBistroWorker().machineLock.hostname;
// Update the worker's state (also update the hostname if needed)
auto response = worker_it->second->processHeartbeat(
update,
worker,
worker_set_id,
consensusPermitsBecomingHealthy(*worker_it->second)
);
// Check for hostname changes **after** handling the heartbeat, since the
// heartbeat could have been rejected. NB: I could also have ensured
// res.hasValue() here, but it seems fine to just test the symptom.
const auto& new_hostname =
worker_it->second->getBistroWorker().machineLock.hostname;
if (new_hostname != old_hostname) {
// This might "invalidate" the nextShard_ iterator, but it's okay
// since the getNextWorker() implementation is robust.
CHECK_EQ(1, mutableHostWorkerPool(old_hostname).erase(shard))
<< "Inconsistency: did not find shard " << shard
<< " in the worker pool for its hostname " << old_hostname;
CHECK(mutableHostWorkerPool(new_hostname).emplace(
shard, worker_it->second
).second)
<< "Changing hostname " << old_hostname << " to " << new_hostname
<< ": target already had shard " << shard;
}
// NB: We cannot call updateInitialWait() here since it relies on knowing
// whether any of the workers are new, and doing that here makes each
// heartbeat take O(# workers).
if (response.has_value()) {
// The above callbacks maintain a key invariant: The worker itself must
// always be part of the WorkerSetID in our reply -- this ensures that
// RemoteWorker::firstAssociatedWorkerSetID_ is always the first
// WorkerSetID that contains this worker.
response->workerSetID = nonMustDieWorkerSetID_;
}
return response;
}
namespace {
void mergeHistoryStep(
const RemoteWorkers::HistoryStep& step,
std::unordered_set<std::string>* added) {
if (step.removed.has_value()) {
auto it = added->find(*step.removed);
// We never merge steps from mid-history, so removed must always cancel.
CHECK(it != added->end()) << "Worker was never added: " << *step.removed;
added->erase(it);
}
for (const auto& shard : step.added) {
CHECK(added->insert(shard).second) << "Worker exists: " << shard;
}
}
} // anonymous namespace
void RemoteWorkers::pruneUnusedHistoryVersions() {
// Here is a typical timeline of a worker's first few seconds:
// - It delivers the first heartbeat (the WorkerSetID is typically that
// of another scheduler, with which it was previously associated).
// - The scheduler's replies with the first WorkerSetID, which contains
// this worker. This is to be echoed in the next heartbeat.
//
// Before the worker's next heartbeat arrives, the scheduler may try to
// prune its history. However, there is no way to prune safely at this
// point, since the "in-flight" WorkerSetID can easily have a history
// version which is currently unreferenced. There is no good workaround,
// either, since we don't want to count as referenced all the versions we
// had ever sent in replies, and pruning those would double the code
// complexity. So, instead, we just postpone pruning history until all
// current workers are established.
CHECK_GE(
initialWorkerSetIDs_.size(), indirectVersionsOfNonMustDieWorkers_.size()
);
auto limbo_workers =
initialWorkerSetIDs_.size() - indirectVersionsOfNonMustDieWorkers_.size();
if (limbo_workers > 0) {
LOG(WARNING) << "Not pruning history until " << limbo_workers
<< " workers echo their first WorkerSetID from this scheduler";
return;
}
folly::Optional<int64_t> first_referenced_version;
for (const auto& p : workerPool_) {
// It's easier to exclude MUST_DIE workers, since our other logic
// excludes MUST_DIE, too (see the history_.clear() clause below).
// README.worker_set_consensus explains why it is safe to exclude them.
if (p.second->getState() == RemoteWorkerState::State::MUST_DIE) {
continue;
}
if (auto wsid_ptr = p.second->workerSetID().get_pointer()) {
// We tested for limbo workers above, so this must be true.
CHECK(wsid_ptr);
if (!first_referenced_version.has_value() ||
WorkerSetIDEarlierThan()(
wsid_ptr->version, *first_referenced_version)) {
first_referenced_version = wsid_ptr->version;
}
}
}
if (!first_referenced_version) {
// There are no non-MUST_DIE workers with a workerSetID(), and there are
// no "limbo workers" (checked at start of function), so there must be
// none at all.
CHECK(initialWorkerSetIDs_.size() == 0);
history_.clear(); // No non-MUST_DIE workers, no need for a history.
return;
}
// Tally up all the removed / added workers from unused versions, and
// stuff them into the first referenced version.
std::unordered_set<std::string> added;
for (
// IMPORTANT: We rely on std::map's robustness to iterator invalidation.
// The iterator type is explicit, so that this breaks on any changes to
// the declared type of history_.
std::map<int64_t, HistoryStep, WorkerSetIDEarlierThan>::iterator it
= history_.begin();
it != history_.end();
) {
// This lets us safely delete cur_it. `it` might now be at .end().
auto cur_it = it++;
auto& p = *cur_it;
if (WorkerSetIDEarlierThan()(p.first, *first_referenced_version)) {
mergeHistoryStep(p.second, &added);
// Remove the unused version entry. This is safe, since std::map's
// iterators are stable, and we already incremented `it`.
history_.erase(cur_it);
// cur_it and p are now invalid, so hurry out of this loop iteration.
continue;
}
// Found the first version that should not be pruned.
CHECK(first_referenced_version == p.first) << "First referenced "
<< "WorkerSetID version was not found in the history"; // See below.
mergeHistoryStep(p.second, &added);
p.second.added = std::move(added);
p.second.removed.reset();
break; // Unused versions pruned and merged.
}
// Versions are added to history_ by a RemoteWorker callback just as the
// worker connects (and well before it echoes the new WorkerSetID back,
// setting w.workerSetID()). In that gap of time, the version would be at
// risk of being pruned -- but the `limbo_workers` check at the start
// prevents it. Then, once w.workerSetID() is set, its version only
// increases. Therefore, we will never prune a version that is unused
// now, but will be used in the future, and this check will never fire.
CHECK(added.empty()) << "First referenced WorkerSetID "
<< " version was not found in the history";
}
// For each worker, find the highest version required by any worker in its
// indirect set. This is one step of an iterative label propagation
// algorithm, whose goal is to lower-bound the transitive closure of
// `RemoteWorker::workerSetID` as `RemoteWorker::indirectWorkerSetID`. This
// transitive closure is the recursive union of the closures of each of the
// workers in my `workerSetID` -- i.e. we follow all the `workerSetID`
// paths we can find that start at the current worker. See
// README.worker_set_consensus for more details.
//
// To do this efficiently, we go through all available worker set versions
// from oldest to newest, while simultaneously walking through non-MUST_DIE
// workers from oldest `indirectWorkerSetID` to the newest.
//
// Since both traversals are sorted by version, we end up with all the
// workers that match the given history version. We can then do one update
// step for each of the workers, potentially increasing its
// `indirectWorkerSetID`. This is safe, since `std::map` has stable
// iterators. As a side effect, we can end up updating a single worker
// multiple times, as increasing its `indirectWorkerSetID` will cause it to
// match a history version after the one it just matched.
void RemoteWorkers::propagateIndirectWorkerSets() {
auto indir_it = indirectVersionsOfNonMustDieWorkers_.begin();
// As we move forward in history, store the latest versions of the current
// workers' indirect worker sets. This can transiently include MUST_DIE
// workers (they are currently MUST_DIE, but we are not yet at the step in
// history_ where they are removed), but the `first` (version) field will
// always be current.
VersionShardSet vss;
for (const auto& hp : history_) {
// We should only rarely run out of workers -- the latest version would
// have to be unreferenced by any worker, meaning that a worker `w` just
// became MUST_DIE, and no other workers's `indirectWorkerSetID` has
// caught up to the version where `w` became MUST_DIE.
if (indir_it == indirectVersionsOfNonMustDieWorkers_.end()) {
break; // No more workers to update.
}
CHECK(!WorkerSetIDEarlierThan()(indir_it->first, hp.first))
<< "Worker refers to version not in history -- history v" << hp.first
<< " came before v" << indir_it->first << " from " << indir_it->second;
// Compute the highest indirect version referenced by workers in the
// current history step. Note that this will propagate workerSetID
// dependencies through MUST_DIE workers, but this is harmless as per
// the note in README.worker_set_consensus.
if (auto shard_p = hp.second.removed.get_pointer()) { // Remove, then add
const auto& w = *mutableWorkerOrAbort(*shard_p); // Look up shard
// It is harmless to leave out from `vss` these "in-limbo" workers
// that have not yet received their first WorkerSetID from this
// scheduler. Firstly, we are, propagating *conservative* estimates
// of indirect WorkerSetID versions -- and if we magically knew the
// value for this worker, it could at best *increase* the maximum
// version in `vss`. Secondly, once the worker gets a version, the
// next iteration will propagate it properly, fixing the omission.
if (w.indirectWorkerSetID().has_value()) {
CHECK(w.indirectWorkerSetID()->schedulerID == schedulerID_);
removeFromVersionShardSet(&vss, *w.indirectWorkerSetID(), *shard_p);
}
}
for (const auto& shard : hp.second.added) {
const auto& w = *mutableWorkerOrAbort(shard); // Look up shard
if (!w.indirectWorkerSetID().has_value()) {
continue; // See comment above
}
CHECK(w.indirectWorkerSetID()->schedulerID == schedulerID_);
addToVersionShardSet(&vss, *w.indirectWorkerSetID(), shard);
}
// For all workers having the current version (from `hp`) as their
// `indirectWorkerSet`, replace this set with the highest-versioned
// `indirectWorkerSet` in `vss`.
while (
indir_it != indirectVersionsOfNonMustDieWorkers_.end() &&
!WorkerSetIDEarlierThan()(hp.first, indir_it->first)
) {
// Empty vss means that this is a version in history with *no* workers
// connected. A worker can clearly never get such a version as its
// workerSetID(), which means that when vss is empty, there must be no
// workers to update and we don't enter the loop.
CHECK(!vss.empty());
// We never delete intermediate versions from history_
CHECK_EQ(hp.first, indir_it->first);
auto it = indir_it; // Only use `it` and not `indir_it` below.
++indir_it; // So we can safely remove `it`.
// Get a WorkerSetID for the highest-version `indirectWorkerSet`
// assigned to any worker at the current step in history (`hp`). The
// easiest way to look up that worker is by shard.
auto new_id =
mutableWorkerOrAbort(vss.rbegin()->second)->indirectWorkerSetID();
// Can't end up in `vss` without having an indirectWorkerSetID version.
CHECK(new_id.has_value());
CHECK_EQ(vss.rbegin()->first, new_id->version);
CHECK(new_id->schedulerID == schedulerID_);
// Find the worker to update.
auto& w = *mutableWorkerOrAbort(it->second);
// Can't end up in indirectVersionsOfNonMustDieWorkers_ without having
// an indirectWorkerSetID version.
CHECK_EQ(it->first, w.indirectWorkerSetID()->version);
CHECK_EQ(it->second, w.getBistroWorker().shard);
// Since we're about to update `w`, we must also do `vss` -- the `vss`
// update step above searches for the latest `indirectWorkerSetID`.
auto vss_it = vss.find({it->first, it->second});
if (vss_it != vss.end()) {
vss.erase(vss_it);
addToVersionShardSet(&vss, *new_id, it->second);
}
// Propagate to the current worker the highest-version
// `indirectWorkerSetID` of all the workers in its current
// `indirectWorkerSetID`. Also updates
// `indirectVersionsOfNonMustDieWorkers_`
//
// CAUTION: This invalidates `it`.
updateIndirectWorkerSetVersion(&w, *new_id);
CHECK(new_id->version == w.indirectWorkerSetID()->version)
<< debugString(new_id->version) << " != "
<< debugString(w.indirectWorkerSetID()->version);
}
}
CHECK(indir_it == indirectVersionsOfNonMustDieWorkers_.end())
<< "indirectWorkerSetID version " << indir_it->first << " in shard "
<< indir_it->second << " exceeds the maximum history version of "
<< (history_.empty() ? -1 : history_.rbegin()->first);
}
void RemoteWorkers::updateInitialWait(RemoteWorkerUpdate* update) {
// Future: Maybe remove everything in update->suicideWorkers_ from the
// worker pools?
std::string msg;
if (!inInitialWait_) {
update->setInitialWaitMessage(std::move(msg));
return;
}
const time_t kMinSafeWait =
RemoteWorkerState::maxHealthcheckGap() +
RemoteWorkerState::loseUnhealthyWorkerAfter() +
RemoteWorkerState::workerCheckInterval() + // extra safety gap
RemoteWorkerState::workerSuicideBackoffSafetyMarginSec() +
(RemoteWorkerState::workerSuicideTaskKillWaitMs() / 1000) + 1;
time_t min_start_time = update->curTime();
if (FLAGS_CAUTION_startup_wait_for_workers < 0) {
min_start_time -= kMinSafeWait;
} else {
min_start_time -= FLAGS_CAUTION_startup_wait_for_workers;
if (RemoteWorkerState::maxHealthcheckGap()
> FLAGS_CAUTION_startup_wait_for_workers) {
msg += folly::to<std::string>(
"DANGER! DANGER! Your --CAUTION_startup_wait_for_workers ",
"of ", FLAGS_CAUTION_startup_wait_for_workers,
" is lower than the max healthcheck gap of ",
RemoteWorkerState::maxHealthcheckGap(), ", which makes it very ",
"likely that you will start second copies of tasks that are ",
"already running (unless your heartbeat interval is much smaller). "
);
} else if (kMinSafeWait > FLAGS_CAUTION_startup_wait_for_workers) {
msg += folly::to<std::string>(
"Your custom --CAUTION_startup_wait_for_workers is ",
"less than the minimum safe value of ", kMinSafeWait,
" -- this increases the risk of starting second copies of tasks ",
"that were already running. "
);
}
}
// Are exactly the same workers connected to the scheduler now, as before
// the restart?
bool initial_worker_set_id_consensus =
// The initial worker set ID is the same for all non-MUST_DIE workers,
initialWorkerSetIDs_.end()
== initialWorkerSetIDs_.upper_bound(*initialWorkerSetIDs_.begin())
// ... and it matches our non-MUST_DIE worker set, meaning that exactly
// the same workers are connected now as the scheduler had before its
// restart.
&& nonMustDieWorkerSetID_.hash == initialWorkerSetIDs_.begin()->hash;
if (!initial_worker_set_id_consensus) {
msg += "No initial worker set ID consensus. ";
}
// The scheduler is eligible to exit initial wait if:
// (i) there are no NEW workers, AND
// (ii) --min_startup_wait_for_workers has expired, AND
// (iii) EITHER the wait expired, OR all connected workers have the same
// initial WorkerSetID, which matches the non-MUST_DIE worker set.
//
// If the wait expires, we deliberately do not wait for the WorkerSetID
// consensus, for two reasons. Firstly, people "who know what they are
// doing" need to be able to manually shorten the initial wait. Secondly,
// if the initial wait is safe, there is really no benefit to waiting for
// the consensus -- but it *can* needlessly slow down startup if some
// workers become unhealthy.
if (min_start_time < startTime_ && !initial_worker_set_id_consensus) {
msg += "Waiting for all workers to connect before running tasks.";
// If we are eligible to exit initial wait, but are still querying running
// tasks, then one of the 'new' workers (while transiently unresponsive)
// might be running tasks the scheduler does not know about. To be safe,
// stay in initial wait until all getRunningTasks succeed.
//
// This test is why we cannot call updateInitialWait from processHeartbeat.
} else if (!update->newWorkers().empty()) {
msg += folly::to<std::string>(
"Ready to exit initial wait, but not all workers' running tasks were "
"fetched; not allowing tasks to start until all are fetched."
);
} else {
inInitialWait_ = false;
msg = "";
}
update->setInitialWaitMessage(std::move(msg));
}
void RemoteWorkers::updateState(RemoteWorkerUpdate* update) {
// Important to check this, but it's silly to check it inside the loop.
CHECK(FLAGS_healthcheck_period > 0)
<< "--healthcheck_period must be positive";
// It shouldn't matter much whether we prune or propagate first, but doing
// it before updateState() means that workers should get healthy faster.
pruneUnusedHistoryVersions();
propagateIndirectWorkerSets();
for (auto& pair : workerPool_) {
pair.second->updateState(
update, consensusPermitsBecomingHealthy(*pair.second)
);
}
// Must come after updateState() since it relies on update->newWorkers()
updateInitialWait(update);
}
void RemoteWorkers::initializeRunningTasks(
const cpp2::BistroWorker& w,
const std::vector<cpp2::RunningTask>& running_tasks) {
auto worker = mutableWorkerOrAbort(w.shard);
// applyUpdate in another thread could have won (#5176536)
if (worker->getState() != RemoteWorkerState::State::NEW) {
LOG(WARNING) << "Ignoring running tasks for non-new " << w.shard;
return;
}
worker->initializeRunningTasks(running_tasks);
}
const RemoteWorker*
RemoteWorkers::RoundRobinWorkerPool::getNextWorker() {
if (this->empty()) {
LOG(WARNING) << "No workers in the '" << name_ << "' pool";
return nullptr;
}
auto ret = this->find(nextShard_);
if (ret == this->end()) {
ret = this->begin();
}
auto* worker = ret->second.get();
nextShard_ = (++ret == this->end()) ? this->begin()->first : ret->first;
return worker;
}
RemoteWorkers::RoundRobinWorkerPool&
RemoteWorkers::mutableHostWorkerPool(const string& host) {
auto host_worker_it = hostToWorkerPool_.find(host);
if (host_worker_it == hostToWorkerPool_.end()) {
host_worker_it = hostToWorkerPool_.emplace(
host, RoundRobinWorkerPool(host) // Construct only if needed
).first;
}
return host_worker_it->second;
}
}}
| apache-2.0 |
ysb33r/asciidoctorj | asciidoctorj-core/src/test/java/org/asciidoctor/converter/WhenConverterIsRegistered.java | 5946 | package org.asciidoctor.converter;
import org.asciidoctor.Asciidoctor;
import org.asciidoctor.MemoryLogHandler;
import org.asciidoctor.OptionsBuilder;
import org.asciidoctor.SafeMode;
import org.asciidoctor.arquillian.api.Unshared;
import org.asciidoctor.jruby.internal.JRubyAsciidoctor;
import org.asciidoctor.util.ClasspathResources;
import org.jboss.arquillian.junit.Arquillian;
import org.jboss.arquillian.test.api.ArquillianResource;
import org.junit.After;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.junit.runner.RunWith;
import java.io.File;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.logging.Logger;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.hasSize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
@RunWith(Arquillian.class)
public class WhenConverterIsRegistered {
@ArquillianResource(Unshared.class)
private Asciidoctor asciidoctor;
@ArquillianResource
private ClasspathResources classpath;
@ArquillianResource
private TemporaryFolder tmp;
@After
public void cleanUp() {
asciidoctor.javaConverterRegistry().unregisterAll();
}
@Test
public void shouldCleanUpRegistry() {
asciidoctor.javaConverterRegistry().unregisterAll();
assertThat(asciidoctor.javaConverterRegistry().converters().keySet(), empty());
}
@Test
public void shouldRegisterAndExecuteGivenConverter() {
asciidoctor.javaConverterRegistry().register(TextConverter.class, "test");
String result = asciidoctor.convert("== Hello\n\nWorld!\n\n- a\n- b", OptionsBuilder.options().backend("test"));
assertThat(result, is("== Hello ==\n\nWorld!\n\n-> a\n-> b\n"));
}
@Test
public void shouldRegisterWithBackendNameFromAnnotation() {
// Register as default converter
asciidoctor.javaConverterRegistry().register(TextConverter.class);
asciidoctor.javaConverterRegistry().register(DummyConverter.class);
String result = asciidoctor.convert("== Hello\n\nWorld!\n\n- a\n- b", OptionsBuilder.options().backend(TextConverter.DEFAULT_FORMAT));
assertThat(result, is("== Hello ==\n\nWorld!\n\n-> a\n-> b\n"));
}
@Test
public void shouldUseDefaultBackend() {
// Register as default converter
asciidoctor.javaConverterRegistry().register(DummyConverter.class);
String result = asciidoctor.convert("== Hello\n\nWorld!\n\n- a\n- b", OptionsBuilder.options().backend("Undefined"));
assertThat(result, is("Dummy"));
}
private MemoryLogHandler registerMemoryLogHandler() {
final Logger logger = Logger.getLogger("asciidoctor");
final MemoryLogHandler handler = new MemoryLogHandler();
logger.addHandler(handler);
return handler;
}
@Test
public void shouldBeAbleToLog() {
MemoryLogHandler handler = registerMemoryLogHandler();
try {
asciidoctor.javaConverterRegistry().register(TextConverter.class);
String result = asciidoctor.convert("== Hello\n\nWorld!\n\n- a\n- b", OptionsBuilder.options().backend(TextConverter.DEFAULT_FORMAT));
assertThat(handler.getLogRecords(), hasSize(1));
assertThat(handler.getLogRecords().get(0).getMessage(), is("Now we're logging"));
} finally {
final Logger logger = Logger.getLogger("asciidoctor");
logger.removeHandler(handler);
}
}
@Test
public void shouldReturnConverterRegisteredWithAnnotation() {
asciidoctor.javaConverterRegistry().register(TextConverter.class);
assertEquals(TextConverter.class, asciidoctor.javaConverterRegistry().converters().get(TextConverter.DEFAULT_FORMAT));
}
@Test
public void shouldReturnRegisteredConverter() {
asciidoctor.javaConverterRegistry().register(TextConverter.class, "test2");
assertEquals(TextConverter.class, asciidoctor.javaConverterRegistry().converters().get("test2"));
}
@Test
public void shouldRegisterConverterViaConverterRegistryExecutor() throws Exception {
ClassLoader oldTCCL = Thread.currentThread().getContextClassLoader();
try {
Thread.currentThread().setContextClassLoader(new URLClassLoader(new URL[]{classpath.getResource("serviceloadertest/3").toURI().toURL()}));
asciidoctor = JRubyAsciidoctor.create();
String result = asciidoctor.convert("== Hello\n\nWorld!\n\n- a\n- b", OptionsBuilder.options().backend("extensiontext"));
assertThat(result, is("== Hello ==\n\nWorld!\n\n-> a\n-> b\n"));
} finally {
Thread.currentThread().setContextClassLoader(oldTCCL);
}
}
@Test
public void shouldWriteFileWithSuffixFromConverterWithAnnotation() throws Exception {
asciidoctor.javaConverterRegistry().register(TextConverter.class);
File todir = tmp.newFolder();
asciidoctor.convertFile(classpath.getResource("simple.adoc"), OptionsBuilder.options().backend(TextConverter.DEFAULT_FORMAT).toDir(todir).safe(SafeMode.UNSAFE));
assertThat(new File(todir, "simple.html").exists(), is(false));
assertThat(new File(todir, "simple.txt").exists(), is(true));
}
@Test
public void shouldWriteFileWithSuffixFromConverterThatInvokesSetOutfileSuffix() throws Exception {
asciidoctor.javaConverterRegistry().register(TextConverterWithSuffix.class);
File todir = tmp.newFolder();
asciidoctor.convertFile(classpath.getResource("simple.adoc"), OptionsBuilder.options().backend(TextConverterWithSuffix.DEFAULT_FORMAT).toDir(todir).safe(SafeMode.UNSAFE));
assertThat(new File(todir, "simple.html").exists(), is(false));
assertThat(new File(todir, "simple.text").exists(), is(true));
}
}
| apache-2.0 |
yahoo/pulsar | pulsar-client/src/main/java/org/apache/pulsar/client/impl/TypedMessageBuilderImpl.java | 11134 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl;
import static com.google.common.base.Preconditions.checkArgument;
import static org.apache.pulsar.client.util.TypeCheckUtil.checkType;
import com.google.common.base.Preconditions;
import java.nio.ByteBuffer;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.TimeUnit;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.MessageId;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.client.api.Schema;
import org.apache.pulsar.client.api.TypedMessageBuilder;
import org.apache.pulsar.client.impl.schema.KeyValueSchema;
import org.apache.pulsar.client.impl.transaction.TransactionImpl;
import org.apache.pulsar.common.api.proto.MessageMetadata;
import org.apache.pulsar.common.schema.KeyValueEncodingType;
import org.apache.pulsar.common.schema.SchemaType;
public class TypedMessageBuilderImpl<T> implements TypedMessageBuilder<T> {
private static final long serialVersionUID = 0L;
private static final ByteBuffer EMPTY_CONTENT = ByteBuffer.allocate(0);
private transient final ProducerBase<?> producer;
private transient final MessageMetadata msgMetadata = new MessageMetadata();
private transient final Schema<T> schema;
private transient ByteBuffer content;
private transient final TransactionImpl txn;
public TypedMessageBuilderImpl(ProducerBase<?> producer, Schema<T> schema) {
this(producer, schema, null);
}
public TypedMessageBuilderImpl(ProducerBase<?> producer,
Schema<T> schema,
TransactionImpl txn) {
this.producer = producer;
this.schema = schema;
this.content = EMPTY_CONTENT;
this.txn = txn;
}
private long beforeSend() {
if (txn == null) {
return -1L;
}
msgMetadata.setTxnidLeastBits(txn.getTxnIdLeastBits());
msgMetadata.setTxnidMostBits(txn.getTxnIdMostBits());
return -1L;
}
@Override
public MessageId send() throws PulsarClientException {
try {
// enqueue the message to the buffer
CompletableFuture<MessageId> sendFuture = sendAsync();
if (!sendFuture.isDone()) {
// the send request wasn't completed yet (e.g. not failing at enqueuing), then attempt to triggerFlush it out
producer.triggerFlush();
}
return sendFuture.get();
} catch (Exception e) {
throw PulsarClientException.unwrap(e);
}
}
@Override
public CompletableFuture<MessageId> sendAsync() {
Message<T> message = getMessage();
CompletableFuture<MessageId> sendFuture;
if (txn != null) {
sendFuture = producer.internalSendWithTxnAsync(message, txn);
txn.registerSendOp(sendFuture);
} else {
sendFuture = producer.internalSendAsync(message);
}
return sendFuture;
}
@Override
public TypedMessageBuilder<T> key(String key) {
if (schema.getSchemaInfo().getType() == SchemaType.KEY_VALUE) {
KeyValueSchema kvSchema = (KeyValueSchema) schema;
checkArgument(!(kvSchema.getKeyValueEncodingType() == KeyValueEncodingType.SEPARATED),
"This method is not allowed to set keys when in encoding type is SEPARATED");
if (key == null) {
msgMetadata.setNullPartitionKey(true);
return this;
}
}
msgMetadata.setPartitionKey(key);
msgMetadata.setPartitionKeyB64Encoded(false);
return this;
}
@Override
public TypedMessageBuilder<T> keyBytes(byte[] key) {
if (schema instanceof KeyValueSchema && schema.getSchemaInfo().getType() == SchemaType.KEY_VALUE) {
KeyValueSchema kvSchema = (KeyValueSchema) schema;
checkArgument(!(kvSchema.getKeyValueEncodingType() == KeyValueEncodingType.SEPARATED),
"This method is not allowed to set keys when in encoding type is SEPARATED");
if (key == null) {
msgMetadata.setNullPartitionKey(true);
return this;
}
}
msgMetadata.setPartitionKey(Base64.getEncoder().encodeToString(key));
msgMetadata.setPartitionKeyB64Encoded(true);
return this;
}
@Override
public TypedMessageBuilder<T> orderingKey(byte[] orderingKey) {
msgMetadata.setOrderingKey(orderingKey);
return this;
}
@Override
public TypedMessageBuilder<T> value(T value) {
if (value == null) {
msgMetadata.setNullValue(true);
return this;
}
if (value instanceof org.apache.pulsar.common.schema.KeyValue
&& schema.getSchemaInfo() != null && schema.getSchemaInfo().getType() == SchemaType.KEY_VALUE) {
KeyValueSchema kvSchema = (KeyValueSchema) schema;
org.apache.pulsar.common.schema.KeyValue kv = (org.apache.pulsar.common.schema.KeyValue) value;
if (kvSchema.getKeyValueEncodingType() == KeyValueEncodingType.SEPARATED) {
// set key as the message key
if (kv.getKey() != null) {
msgMetadata.setPartitionKey(
Base64.getEncoder().encodeToString(kvSchema.getKeySchema().encode(kv.getKey())));
msgMetadata.setPartitionKeyB64Encoded(true);
} else {
this.msgMetadata.setNullPartitionKey(true);
}
// set value as the payload
if (kv.getValue() != null) {
this.content = ByteBuffer.wrap(kvSchema.getValueSchema().encode(kv.getValue()));
} else {
this.msgMetadata.setNullValue(true);
}
return this;
}
}
this.content = ByteBuffer.wrap(schema.encode(value));
return this;
}
@Override
public TypedMessageBuilder<T> property(String name, String value) {
checkArgument(name != null, "Need Non-Null name");
checkArgument(value != null, "Need Non-Null value for name: " + name);
msgMetadata.addProperty()
.setKey(name)
.setValue(value);
return this;
}
@Override
public TypedMessageBuilder<T> properties(Map<String, String> properties) {
for (Map.Entry<String, String> entry : properties.entrySet()) {
checkArgument(entry.getKey() != null, "Need Non-Null key");
checkArgument(entry.getValue() != null, "Need Non-Null value for key: " + entry.getKey());
msgMetadata.addProperty()
.setKey(entry.getKey())
.setValue(entry.getValue());
}
return this;
}
@Override
public TypedMessageBuilder<T> eventTime(long timestamp) {
checkArgument(timestamp > 0, "Invalid timestamp : '%s'", timestamp);
msgMetadata.setEventTime(timestamp);
return this;
}
@Override
public TypedMessageBuilder<T> sequenceId(long sequenceId) {
checkArgument(sequenceId >= 0);
msgMetadata.setSequenceId(sequenceId);
return this;
}
@Override
public TypedMessageBuilder<T> replicationClusters(List<String> clusters) {
Preconditions.checkNotNull(clusters);
msgMetadata.clearReplicateTo();
msgMetadata.addAllReplicateTos(clusters);
return this;
}
@Override
public TypedMessageBuilder<T> disableReplication() {
msgMetadata.clearReplicateTo();
msgMetadata.addReplicateTo("__local__");
return this;
}
@Override
public TypedMessageBuilder<T> deliverAfter(long delay, TimeUnit unit) {
return deliverAt(System.currentTimeMillis() + unit.toMillis(delay));
}
@Override
public TypedMessageBuilder<T> deliverAt(long timestamp) {
msgMetadata.setDeliverAtTime(timestamp);
return this;
}
@SuppressWarnings("unchecked")
@Override
public TypedMessageBuilder<T> loadConf(Map<String, Object> config) {
config.forEach((key, value) -> {
switch (key) {
case CONF_KEY:
this.key(checkType(value, String.class));
break;
case CONF_PROPERTIES:
this.properties(checkType(value, Map.class));
break;
case CONF_EVENT_TIME:
this.eventTime(checkType(value, Long.class));
break;
case CONF_SEQUENCE_ID:
this.sequenceId(checkType(value, Long.class));
break;
case CONF_REPLICATION_CLUSTERS:
this.replicationClusters(checkType(value, List.class));
break;
case CONF_DISABLE_REPLICATION:
boolean disableReplication = checkType(value, Boolean.class);
if (disableReplication) {
this.disableReplication();
}
break;
case CONF_DELIVERY_AFTER_SECONDS:
this.deliverAfter(checkType(value, Long.class), TimeUnit.SECONDS);
break;
case CONF_DELIVERY_AT:
this.deliverAt(checkType(value, Long.class));
break;
default:
throw new RuntimeException("Invalid message config key '" + key + "'");
}
});
return this;
}
public MessageMetadata getMetadataBuilder() {
return msgMetadata;
}
public Message<T> getMessage() {
beforeSend();
return MessageImpl.create(msgMetadata, content, schema);
}
public long getPublishTime() {
return msgMetadata.getPublishTime();
}
public boolean hasKey() {
return msgMetadata.hasPartitionKey();
}
public String getKey() {
return msgMetadata.getPartitionKey();
}
public ByteBuffer getContent() {
return content;
}
}
| apache-2.0 |
Fokko/incubator-airflow | airflow/operators/papermill_operator.py | 2297 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import Dict, Optional
import attr
import papermill as pm
from airflow.lineage.entities import File
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
@attr.s(auto_attribs=True)
class NoteBook(File):
type_hint: Optional[str] = "jupyter_notebook"
parameters: Dict = {}
meta_schema: str = __name__ + '.NoteBook'
class PapermillOperator(BaseOperator):
"""
Executes a jupyter notebook through papermill that is annotated with parameters
:param input_nb: input notebook (can also be a NoteBook or a File inlet)
:type input_nb: str
:param output_nb: output notebook (can also be a NoteBook or File outlet)
:type output_nb: str
:param parameters: the notebook parameters to set
:type parameters: dict
"""
@apply_defaults
def __init__(self,
input_nb: str,
output_nb: str,
parameters: Dict,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.inlets.append(NoteBook(url=input_nb,
parameters=parameters))
self.outlets.append(NoteBook(url=output_nb))
def execute(self, context):
for i in range(len(self.inlets)):
pm.execute_notebook(self.inlets[i].url, self.outlets[i].url,
parameters=self.inlets[i].parameters,
progress_bar=False, report_mode=True)
| apache-2.0 |
appbels/Org | Org/Schema/Thing/Place/CivicStructure/PlaceOfWorship.class.php | 1060 | <?php
namespace Org\Schema\Thing\Place\CivicStructure;
/**
* Class PlaceOfWorship
* Place of worship, such as a church, synagogue, or mosque.
* @author AppBels <app.bels@gmail.com>
* @name PlaceOfWorship
* @namespace Org\Schema\Thing\Place\CivicStructure
* @package Org\Schema
* @see https://schema.org/PlaceOfWorship
* Date 10/04/2017
*/
class PlaceOfWorship extends \Org\Schema\Thing\Place\CivicStructure
{
/**
* PlaceOfWorship constructor.
* @access public
* @see \Org\Schema\Thing\Place\CivicStructure::__construct()
*/
public function __construct ()
{
parent::__construct();
}
/**
* PlaceOfWorship toString.
* @access public
* @see \Org\Schema\Thing\Place\CivicStructure::__toString()
*
* @return string
*/
public function __toString ()
{
return parent::__toString();
}
/**
* PlaceOfWorship destructor.
* @access public
* @see \Org\Schema\Thing\Place\CivicStructure::__destruct()
*/
public function __destruct ()
{
parent::__destruct();
}
}
?> | apache-2.0 |
iamjarvo/couchbase-jruby-client | test/mock.rb | 2310 | class CouchbaseServer
attr_accessor :host, :port, :num_nodes, :buckets_spec
def real?
true
end
def initialize(params = {})
@host, @port = ENV['COUCHBASE_SERVER'].split(':')
@port = @port.to_i
if @host.nil? || @host.empty? || @port == 0
raise ArgumentError, 'Check COUCHBASE_SERVER variable. It should be hostname:port'
end
@config = MultiJson.load(open("http://#{@host}:#{@port}/pools/default"))
@num_nodes = @config['nodes'].size
@buckets_spec = params[:buckets_spec] || 'default:' # "default:,protected:secret,cache::memcache"
end
def start
# flush all buckets
@buckets_spec.split(',') do |bucket|
name, password, _ = bucket.split(':')
connection = Couchbase.new(:hostname => @host,
:port => @port,
:username => name,
:bucket => name,
:password => password)
connection.flush
end
end
def stop; end
end
require "#{File.dirname(__FILE__)}/CouchbaseMock.jar"
class CouchbaseMock
attr_accessor :host, :port, :num_nodes, :buckets_spec, :num_vbuckets
def real?
false
end
def initialize(params = {})
@host = 'localhost'
@port = 8091
@num_nodes = 1
@num_vbuckets = 4096
@buckets_spec = 'default:' # "default:,protected:secret,cache::memcache"
params.each do |key, value|
send("#{key}=", value)
end
yield self if block_given?
if @num_vbuckets < 1 || (@num_vbuckets & (@num_vbuckets - 1) != 0)
raise ArgumentError, 'Number of vbuckets should be a power of two and greater than zero'
end
@mock = Java::OrgCouchbaseMock::CouchbaseMock.new(@host, @port, @num_nodes, @num_vbuckets, @buckets_spec)
end
def start
@mock.start
@mock.waitForStartup
end
def stop
@mock.stop
end
end
def start_mock(params = {})
mock = nil
if ENV['COUCHBASE_SERVER']
mock = CouchbaseServer.new(params)
if (params[:port] && mock.port != params[:port]) ||
(params[:host] && mock.host != params[:host]) ||
mock.buckets_spec != 'default:'
skip("Unable to configure real cluster. Requested config is: #{params.inspect}")
end
else
mock = CouchbaseMock.new(params)
end
mock.start
mock
end
| apache-2.0 |
Esri/arcgis-pro-sdk-community-samples | Map-Exploration/OverlayGroundSurface/ExportGeometry.cs | 2772 | /*
Copyright 2020 Esri
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using ArcGIS.Core.CIM;
using ArcGIS.Core.Data;
using ArcGIS.Core.Geometry;
using ArcGIS.Desktop.Catalog;
using ArcGIS.Desktop.Core;
using ArcGIS.Desktop.Editing;
using ArcGIS.Desktop.Extensions;
using ArcGIS.Desktop.Framework;
using ArcGIS.Desktop.Framework.Contracts;
using ArcGIS.Desktop.Framework.Dialogs;
using ArcGIS.Desktop.Framework.Threading.Tasks;
using ArcGIS.Desktop.Mapping;
namespace OverlayGroundSurface
{
internal class ExportGeometries : Button
{
protected override void OnClick()
{
try
{
if (Module1.Geometries == null || Module1.Geometries.Count <= 0)
{
MessageBox.Show($@"You have to first render a geometry before you can export the Geometry");
return;
}
var bpf = new BrowseProjectFilter("esri_browseDialogFilters_json_file")
{
Name = "Specify JSON file to export Geometries to"
};
var saveItemDialog = new SaveItemDialog { BrowseFilter = bpf };
var result = saveItemDialog.ShowDialog();
if (result.Value == false) return;
var jsonPath = $@"{saveItemDialog.FilePath}.json";
var folder = System.IO.Path.GetDirectoryName(jsonPath);
if (!System.IO.Directory.Exists(folder)) System.IO.Directory.CreateDirectory(folder);
var exists = System.IO.File.Exists(jsonPath);
if (exists)
{
var isYes = MessageBox.Show($@"The export will write over the existing file {jsonPath}", "Override File", System.Windows.MessageBoxButton.YesNo);
if (isYes != System.Windows.MessageBoxResult.Yes) return;
System.IO.File.Delete(jsonPath);
}
GeometryBag bag = GeometryBagBuilder.CreateGeometryBag(Module1.Geometries,
Module1.Geometries[0].SpatialReference);
System.IO.File.WriteAllText(jsonPath, bag.ToJson());
MessageBox.Show($@"Export saved to {jsonPath}");
}
catch (Exception ex)
{
MessageBox.Show($@"Export Exception: {ex}");
}
}
}
}
| apache-2.0 |
vitorgv/spring-security | messaging/src/main/java/org/springframework/security/messaging/util/matcher/SimpMessageTypeMatcher.java | 2677 | /*
* Copyright 2002-2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.springframework.security.messaging.util.matcher;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHeaders;
import org.springframework.messaging.simp.SimpMessageHeaderAccessor;
import org.springframework.messaging.simp.SimpMessageType;
import org.springframework.util.Assert;
import org.springframework.util.ObjectUtils;
/**
* A {@link MessageMatcher} that matches if the provided {@link Message} has a
* type that is the same as the {@link SimpMessageType} that was specified in
* the constructor.
*
* @since 4.0
* @author Rob Winch
*
*/
public class SimpMessageTypeMatcher implements MessageMatcher<Object> {
private final SimpMessageType typeToMatch;
/**
* Creates a new instance
*
* @param typeToMatch
* the {@link SimpMessageType} that will result in a match.
* Cannot be null.
*/
public SimpMessageTypeMatcher(SimpMessageType typeToMatch) {
Assert.notNull(typeToMatch, "typeToMatch cannot be null");
this.typeToMatch = typeToMatch;
}
public boolean matches(Message<? extends Object> message) {
MessageHeaders headers = message.getHeaders();
SimpMessageType messageType = SimpMessageHeaderAccessor
.getMessageType(headers);
return typeToMatch == messageType;
}
@Override
public boolean equals(Object other) {
if (this == other) {
return true;
}
if (!(other instanceof SimpMessageTypeMatcher)) {
return false;
}
SimpMessageTypeMatcher otherMatcher = (SimpMessageTypeMatcher) other;
return ObjectUtils.nullSafeEquals(this.typeToMatch,
otherMatcher.typeToMatch);
}
public int hashCode() {
// Using nullSafeHashCode for proper array hashCode handling
return ObjectUtils.nullSafeHashCode(this.typeToMatch);
}
@Override
public String toString() {
return "SimpMessageTypeMatcher [typeToMatch=" + typeToMatch + "]";
}
} | apache-2.0 |
edom/web | concurrent/lock/src/main/java/com/spacetimecat/concurrent/semaphore/Semaphore.java | 553 | package com.spacetimecat.concurrent.semaphore;
/**
* <p>
* Counting semaphore.
* </p>
*/
public interface Semaphore
{
/**
* <p>
* Acquire a unit.
* </p>
*
* @return
* true if we now own a unit;
* false if the semaphore did not have any unit left to give to us.
*/
boolean acquire ();
/**
* <p>
* Release a unit.
* </p>
*
* @throws SemaphoreException
* if this detects a {@code release} without matching {@link #acquire()}
*/
void release ();
}
| apache-2.0 |
wiacekm/gatling | gatling-app/src/main/scala/io/gatling/app/classloader/FileSystemBackedClassLoader.scala | 4079 | /*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.app.classloader
import java.io.InputStream
import java.net.{ URL, URLConnection }
import java.nio.file.Path
import java.security.cert.Certificate
import java.security.{ CodeSource, ProtectionDomain }
import scala.collection.mutable
import io.gatling.commons.util.Io._
import io.gatling.commons.util.PathHelper._
private[classloader] class FileSystemBackedClassLoader(root: Path, parent: ClassLoader)
extends ClassLoader(parent) {
def classNameToPath(name: String): Path =
if (name endsWith ".class") name
else name.replace('.', '/') + ".class"
def dirNameToPath(name: String): Path =
name.replace('.', '/')
def findPath(path: Path): Option[Path] = {
val fullPath = root / path
if (fullPath.exists) Some(fullPath) else None
}
override def findResource(name: String): URL = findPath(name).map { path =>
new URL(null, "repldir:" + path, (url: URL) => new URLConnection(url) {
override def connect(): Unit = ()
override def getInputStream: InputStream = path.inputStream
})
}.orNull
override def getResourceAsStream(name: String): InputStream = findPath(name) match {
case Some(path) => path.inputStream
case None => super.getResourceAsStream(name)
}
private def classAsStream(className: String): Option[InputStream] =
Option(getResourceAsStream(className.replaceAll("""\.""", "/") + ".class"))
def classBytes(name: String): Array[Byte] = findPath(classNameToPath(name)) match {
case Some(path) => path.inputStream.toByteArray()
case None => classAsStream(name) match {
case Some(stream) => stream.toByteArray()
case None => Array.empty
}
}
override def findClass(name: String): Class[_] = {
val bytes = classBytes(name)
if (bytes.length == 0) throw new ClassNotFoundException(name)
else defineClass(name, bytes, 0, bytes.length, protectionDomain)
}
private val pckgs = mutable.Map[String, Package]()
private lazy val protectionDomain = {
val cl = Thread.currentThread.getContextClassLoader
val resource = cl.getResource("scala/runtime/package.class")
if (resource == null || resource.getProtocol != "jar") null else {
val s = resource.getPath
val n = s.lastIndexOf('!')
if (n < 0) null else {
val path = s.substring(0, n)
new ProtectionDomain(new CodeSource(new URL(path), null.asInstanceOf[Array[Certificate]]), null, this, null)
}
}
}
override def definePackage(name: String, specTitle: String,
specVersion: String, specVendor: String,
implTitle: String, implVersion: String,
implVendor: String, sealBase: URL): Package = {
throw new UnsupportedOperationException()
}
override def getPackage(name: String): Package = findPath(dirNameToPath(name)) match {
case None => super.getPackage(name)
case _ => pckgs.getOrElseUpdate(name, {
val constructor = classOf[Package].getDeclaredConstructor(
classOf[String], classOf[String], classOf[String],
classOf[String], classOf[String], classOf[String],
classOf[String], classOf[URL], classOf[ClassLoader]
)
constructor.setAccessible(true)
constructor.newInstance(name, null, null, null, null, null, null, null, this)
})
}
override def getPackages: Array[Package] =
root.deepDirs().map(path => getPackage(path.toString)).toArray
}
| apache-2.0 |
MartinBechtle/jcanary | jcanary-api/src/main/java/com/martinbechtle/jcanary/api/Dependency.java | 1908 | package com.martinbechtle.jcanary.api;
import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import static com.martinbechtle.jrequire.Require.notEmpty;
import static com.martinbechtle.jrequire.Require.notNull;
/**
* Represents a dependency of the system. It's identified with a name, a {@link DependencyType} and {@link DependencyImportance}.
*
* @author Martin Bechtle
*/
public class Dependency {
private final DependencyImportance importance;
private final DependencyType type;
private final String name;
public Dependency(DependencyImportance importance, DependencyType type, String name) {
this.importance = notNull(importance);
this.type = notNull(type);
this.name = notEmpty(name);
}
public DependencyImportance getImportance() {
return importance;
}
public DependencyType getType() {
return type;
}
public String getName() {
return name;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof Dependency)) {
return false;
}
Dependency that = (Dependency) o;
return new EqualsBuilder()
.append(importance, that.importance)
.append(type, that.type)
.append(name, that.name)
.isEquals();
}
@Override
public int hashCode() {
return new HashCodeBuilder(17, 37)
.append(importance)
.append(type)
.append(name)
.toHashCode();
}
@Override
public String toString() {
return "Dependency{" +
"importance=" + importance +
", type=" + type +
", name='" + name + '\'' +
'}';
}
}
| apache-2.0 |
scorpionvicky/elasticsearch | server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/FieldHighlightContext.java | 2034 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.fetch.subphase.highlight;
import org.apache.lucene.search.Query;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.search.fetch.FetchContext;
import org.elasticsearch.search.fetch.FetchSubPhase;
public class FieldHighlightContext {
public final String fieldName;
public final SearchHighlightContext.Field field;
public final MappedFieldType fieldType;
public final FetchContext context;
public final FetchSubPhase.HitContext hitContext;
public final Query query;
public final boolean forceSource;
public FieldHighlightContext(String fieldName,
SearchHighlightContext.Field field,
MappedFieldType fieldType,
FetchContext context,
FetchSubPhase.HitContext hitContext,
Query query,
boolean forceSource) {
this.fieldName = fieldName;
this.field = field;
this.fieldType = fieldType;
this.context = context;
this.hitContext = hitContext;
this.query = query;
this.forceSource = forceSource;
}
}
| apache-2.0 |
OpenFeign/feign | core/src/main/java/feign/template/QueryTemplate.java | 6953 | /*
* Copyright 2012-2022 The Feign Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package feign.template;
import feign.CollectionFormat;
import feign.Util;
import feign.template.Template.EncodingOptions;
import feign.template.Template.ExpansionOptions;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
* Template for a Query String parameter.
*/
public final class QueryTemplate {
private static final String UNDEF = "undef";
private List<Template> values;
private final Template name;
private final CollectionFormat collectionFormat;
private boolean pure = false;
/**
* Create a new Query Template.
*
* @param name of the query parameter.
* @param values in the template.
* @param charset for the template.
* @return a QueryTemplate.
*/
public static QueryTemplate create(String name, Iterable<String> values, Charset charset) {
return create(name, values, charset, CollectionFormat.EXPLODED, true);
}
public static QueryTemplate create(String name,
Iterable<String> values,
Charset charset,
CollectionFormat collectionFormat) {
return create(name, values, charset, collectionFormat, true);
}
/**
* Create a new Query Template.
*
* @param name of the query parameter.
* @param values in the template.
* @param charset for the template.
* @param collectionFormat to use.
* @param decodeSlash if slash characters should be decoded
* @return a QueryTemplate
*/
public static QueryTemplate create(String name,
Iterable<String> values,
Charset charset,
CollectionFormat collectionFormat,
boolean decodeSlash) {
if (Util.isBlank(name)) {
throw new IllegalArgumentException("name is required.");
}
if (values == null) {
throw new IllegalArgumentException("values are required");
}
/* remove all empty values from the array */
Collection<String> remaining = StreamSupport.stream(values.spliterator(), false)
.filter(Util::isNotBlank)
.collect(Collectors.toList());
return new QueryTemplate(name, remaining, charset, collectionFormat, decodeSlash);
}
/**
* Append a value to the Query Template.
*
* @param queryTemplate to append to.
* @param values to append.
* @return a new QueryTemplate with value appended.
*/
public static QueryTemplate append(QueryTemplate queryTemplate,
Iterable<String> values,
CollectionFormat collectionFormat,
boolean decodeSlash) {
List<String> queryValues = new ArrayList<>(queryTemplate.getValues());
queryValues.addAll(StreamSupport.stream(values.spliterator(), false)
.filter(Util::isNotBlank)
.collect(Collectors.toList()));
return create(queryTemplate.getName(), queryValues, StandardCharsets.UTF_8,
collectionFormat, decodeSlash);
}
/**
* Create a new Query Template.
*
* @param name of the query parameter.
* @param values for the parameter.
* @param collectionFormat to use.
*/
private QueryTemplate(
String name,
Iterable<String> values,
Charset charset,
CollectionFormat collectionFormat,
boolean decodeSlash) {
this.values = new CopyOnWriteArrayList<>();
this.name = new Template(name, ExpansionOptions.ALLOW_UNRESOLVED, EncodingOptions.REQUIRED,
!decodeSlash, charset);
this.collectionFormat = collectionFormat;
/* parse each value into a template chunk for resolution later */
for (String value : values) {
if (value.isEmpty()) {
/* skip */
continue;
}
this.values.add(
new Template(
value,
ExpansionOptions.REQUIRED,
EncodingOptions.REQUIRED,
!decodeSlash,
charset));
}
if (this.values.isEmpty()) {
/* in this case, we have a pure parameter */
this.pure = true;
}
}
public List<String> getValues() {
return Collections.unmodifiableList(this.values.stream()
.map(Template::toString)
.collect(Collectors.toList()));
}
public List<String> getVariables() {
List<String> variables = new ArrayList<>(this.name.getVariables());
for (Template template : this.values) {
variables.addAll(template.getVariables());
}
return Collections.unmodifiableList(variables);
}
public String getName() {
return name.toString();
}
@Override
public String toString() {
return this.queryString(this.name.toString(), this.getValues());
}
/**
* Expand this template. Unresolved variables are removed. If all values remain unresolved, the
* result is an empty string.
*
* @param variables containing the values for expansion.
* @return the expanded template.
*/
public String expand(Map<String, ?> variables) {
String name = this.name.expand(variables);
if (this.pure) {
return name;
}
List<String> expanded = new ArrayList<>();
for (Template template : this.values) {
String result = template.expand(variables);
if (result == null) {
continue;
}
/*
* check for an iterable result, and if one is there, we need to split it into individual
* values
*/
if (result.contains(",")) {
/* we need to split it */
expanded.addAll(Arrays.asList(result.split(",")));
} else {
expanded.add(result);
}
}
return this.queryString(name, Collections.unmodifiableList(expanded));
}
private String queryString(String name, List<String> values) {
if (this.pure) {
return name;
}
if (!values.isEmpty()) {
return this.collectionFormat.join(name, values, StandardCharsets.UTF_8).toString();
}
/* nothing to return, all values are unresolved */
return null;
}
}
| apache-2.0 |
sandymariscal22/BrandsCsCart | public_html/app/schemas/exim/products.php | 16418 | <?php
/***************************************************************************
* *
* (c) 2004 Vladimir V. Kalynyak, Alexey V. Vinokurov, Ilya M. Shalnev *
* *
* This is commercial software, only users who have purchased a valid *
* license and accept to the terms of the License Agreement can install *
* and use this program. *
* *
****************************************************************************
* PLEASE READ THE FULL TEXT OF THE SOFTWARE LICENSE AGREEMENT IN THE *
* "copyright.txt" FILE PROVIDED WITH THIS DISTRIBUTION PACKAGE. *
****************************************************************************/
use Tygh\Registry;
include_once(Registry::get('config.dir.schemas') . 'exim/products.functions.php');
include_once(Registry::get('config.dir.schemas') . 'exim/features.functions.php');
$schema = array(
'section' => 'products',
'name' => __('products'),
'pattern_id' => 'products',
'key' => array('product_id'),
'order' => 0,
'table' => 'products',
'references' => array(
'product_descriptions' => array(
'reference_fields' => array('product_id' => '#key', 'lang_code' => '#lang_code'),
'join_type' => 'LEFT'
),
'product_prices' => array(
'reference_fields' => array('product_id' => '#key', 'lower_limit' => 1, 'usergroup_id' => 0),
'join_type' => 'LEFT'
),
'images_links' => array(
'reference_fields' => array('object_id' => '#key', 'object_type' => 'product', 'type' => 'M'),
'join_type' => 'LEFT',
'import_skip_db_processing' => true
),
'companies' => array(
'reference_fields' => array('company_id' => '&company_id'),
'join_type' => 'LEFT',
'import_skip_db_processing' => true
)
),
'condition' => array(
'use_company_condition' => true,
),
'pre_processing' => array(
'reset_inventory' => array(
'function' => 'fn_exim_reset_inventory',
'args' => array('@reset_inventory'),
),
'check_product_code' => array(
'function' => 'fn_check_product_code',
'args' => array('$import_data'),
'import_only' => true,
)
),
'post_processing' => array(
'send_product_notifications' => array(
'function' => 'fn_exim_send_product_notifications',
'args' => array('$primary_object_ids', '$import_data', '$auth'),
'import_only' => true,
),
),
'import_get_primary_object_id' => array(
'fill_products_alt_keys' => array(
'function' => 'fn_import_fill_products_alt_keys',
'args' => array('$pattern', '$alt_keys', '$object', '$skip_get_primary_object_id'),
'import_only' => true,
),
),
'import_process_data' => array(
'unset_product_id' => array(
'function' => 'fn_import_unset_product_id',
'args' => array('$object'),
'import_only' => true,
),
),
'range_options' => array(
'selector_url' => 'products.manage',
'object_name' => __('products'),
),
'notes' => array(
'text_exim_import_options_note',
'text_exim_import_features_note',
'text_exim_import_images_note',
'text_exim_import_files_note',
),
'options' => array(
'lang_code' => array(
'title' => 'language',
'type' => 'languages',
'default_value' => array(DEFAULT_LANGUAGE),
),
'category_delimiter' => array(
'title' => 'category_delimiter',
'description' => 'text_category_delimiter',
'type' => 'input',
'default_value' => '///'
),
'features_delimiter' => array(
'title' => 'features_delimiter',
'description' => 'text_features_delimiter',
'type' => 'input',
'default_value' => '///'
),
'images_path' => array(
'title' => 'images_directory',
'description' => 'text_images_directory',
'type' => 'input',
'default_value' => 'exim/backup/images/',
'notes' => __('text_file_editor_notice', array('[href]' => fn_url('file_editor.manage?active_section=files&selected_path=/'))),
),
'files_path' => array(
'title' => 'files_directory',
'description' => 'text_files_directory',
'type' => 'input',
'default_value' => 'exim/backup/downloads/',
'notes' => __('text_file_editor_notice', array('[href]' => fn_url('file_editor.manage?active_section=files&selected_path=/'))),
),
'delete_files' => array(
'title' => 'drop_existing_data',
'type' => 'checkbox',
'import_only' => true
),
'reset_inventory' => array(
'title' => 'reset_inventory',
'description' => 'text_reset_inventory_description',
'type' => 'checkbox',
'import_only' => true
),
'price_dec_sign_delimiter' => array(
'title' => 'price_dec_sign_delimiter',
'description' => 'text_price_dec_sign_delimiter',
'type' => 'input',
'default_value' => '.'
),
),
'export_fields' => array(
'Product code' => array(
'db_field' => 'product_code',
'alt_key' => true,
'required' => true,
'alt_field' => 'product_id'
),
'Language' => array(
'table' => 'product_descriptions',
'db_field' => 'lang_code',
'type' => 'languages',
'required' => true,
'multilang' => true
),
'Product id' => array(
'db_field' => 'product_id'
),
'Category' => array(
'process_get' => array('fn_exim_get_product_categories', '#key', 'M', '@category_delimiter', '#lang_code'),
'process_put' => array('fn_exim_set_product_categories', '#key', 'M', '#this', '@category_delimiter'),
'multilang' => true,
'linked' => false, // this field is not linked during import-export
'default' => 'Products' // default value applies only when we creating new record
),
'List price' => array(
'db_field' => 'list_price',
'convert_put' => array('fn_exim_import_price', '#this', '@price_dec_sign_delimiter'),
'process_get' => array('fn_exim_export_price', '#this', '@price_dec_sign_delimiter'),
),
'Price' => array(
'table' => 'product_prices',
'db_field' => 'price',
'convert_put' => array('fn_exim_import_price', '#this', '@price_dec_sign_delimiter'),
'process_put' => array('fn_import_product_price', '#key', '#this', '#new'),
'process_get' => array('fn_exim_export_price', '#this', '@price_dec_sign_delimiter'),
),
'Status' => array(
'db_field' => 'status'
),
'Quantity' => array(
'db_field' => 'amount'
),
'Weight' => array(
'db_field' => 'weight'
),
'Min quantity' => array(
'db_field' => 'min_qty'
),
'Max quantity' => array(
'db_field' => 'max_qty'
),
'Quantity step' => array(
'db_field' => 'qty_step'
),
'List qty count' => array(
'db_field' => 'list_qty_count'
),
'Shipping freight' => array(
'db_field' => 'shipping_freight',
'convert_put' => array('fn_exim_import_price', '#this', '@price_dec_sign_delimiter'),
'process_get' => array('fn_exim_export_price', '#this', '@price_dec_sign_delimiter'),
),
'Date added' => array(
'db_field' => 'timestamp',
'process_get' => array('fn_timestamp_to_date', '#this'),
'convert_put' => array('fn_date_to_timestamp', '#this'),
'return_result' => true
),
'Downloadable' => array(
'db_field' => 'is_edp',
),
'Files' => array(
'process_get' => array('fn_exim_export_file', '#key', '@files_path'),
'process_put' => array('fn_exim_import_file', '#key', '#this', '@files_path', '@delete_files'),
'linked' => false, // this field is not linked during import-export
),
'Ship downloadable' => array(
'db_field' => 'edp_shipping',
),
'Inventory tracking' => array(
'db_field' => 'tracking',
),
'Out of stock actions' => array(
'db_field' => 'out_of_stock_actions',
),
'Free shipping' => array(
'db_field' => 'free_shipping',
),
'Feature comparison' => array(
'db_field' => 'feature_comparison',
),
'Zero price action' => array(
'db_field' => 'zero_price_action',
),
'Thumbnail' => array(
'table' => 'images_links',
'db_field' => 'image_id',
'use_put_from' => '%Detailed image%',
'process_get' => array('fn_export_image', '#this', 'product', '@images_path')
),
'Detailed image' => array(
'db_field' => 'detailed_id',
'table' => 'images_links',
'process_get' => array('fn_export_image', '#this', 'detailed', '@images_path'),
'process_put' => array('fn_import_images', '@images_path', '%Thumbnail%', '#this', '0', 'M', '#key', 'product')
),
'Product name' => array(
'table' => 'product_descriptions',
'db_field' => 'product',
'multilang' => true
),
'Description' => array(
'table' => 'product_descriptions',
'db_field' => 'full_description',
'multilang' => true
),
'Short description' => array(
'table' => 'product_descriptions',
'db_field' => 'short_description',
'multilang' => true
),
'Meta keywords' => array(
'table' => 'product_descriptions',
'db_field' => 'meta_keywords',
'multilang' => true
),
'Meta description' => array(
'table' => 'product_descriptions',
'db_field' => 'meta_description',
'multilang' => true
),
'Search words' => array(
'table' => 'product_descriptions',
'db_field' => 'search_words',
'multilang' => true
),
'Page title' => array(
'table' => 'product_descriptions',
'db_field' => 'page_title',
'multilang' => true
),
'Taxes' => array(
'db_field' => 'tax_ids',
'process_get' => array('fn_exim_get_taxes', '#this', '#lang_code'),
'process_put' => array('fn_exim_set_taxes', '#key', '#this'),
'multilang' => true,
'return_result' => true
),
'Features' => array(
'process_get' => array('fn_exim_get_product_features', '#key', '@features_delimiter', '#lang_code'),
'process_put' => array('fn_exim_set_product_features', '#key', '#this', '@features_delimiter', '#lang_code'),
'linked' => false, // this field is not linked during import-export
),
'Options' => array(
'process_get' => array('fn_exim_get_product_options', '#key', '#lang_code'),
'process_put' => array('fn_exim_set_product_options', '#key', '#this', '#lang_code'),
'linked' => false, // this field is not linked during import-export
),
'Secondary categories' => array(
'process_get' => array('fn_exim_get_product_categories', '#key', 'A', '@category_delimiter', '#lang_code'),
'process_put' => array('fn_exim_set_product_categories', '#key', 'A', '#this', '@category_delimiter'),
'multilang' => true,
'linked' => false, // this field is not linked during import-export
),
'Product URL' => array(
'process_get' => array('fn_exim_get_product_url', '#key', '#lang_code'),
'multilang' => true,
'linked' => false,
'export_only' => true,
),
'Image URL' => array(
'process_get' => array('fn_exim_get_image_url', '#key', 'product', 'M', true, false, '#lang_code'),
'multilang' => true,
'db_field' => 'image_id',
'table' => 'images_links',
'export_only' => true,
),
'Detailed image URL' => array(
'process_get' => array('fn_exim_get_detailed_image_url', '#key', 'product', 'M', '#lang_code'),
'db_field' => 'detailed_id',
'table' => 'images_links',
'export_only' => true,
),
'Items in box' => array(
'process_get' => array('fn_exim_get_items_in_box', '#key'),
'process_put' => array('fn_exim_put_items_in_box', '#key', '#this'),
'linked' => false, // this field is not linked during import-export
),
'Box size' => array(
'process_get' => array('fn_exim_get_box_size', '#key'),
'process_put' => array('fn_exim_put_box_size', '#key', '#this'),
'linked' => false, // this field is not linked during import-export
),
),
);
if (!fn_allowed_for('ULTIMATE:FREE') && Registry::get('config.tweaks.disable_localizations') == false) {
$schema['export_fields']['Localizations'] = array(
'db_field' => 'localization',
'process_get' => array('fn_exim_get_localizations', '#this', '#lang_code'),
'process_put' => array('fn_exim_set_localizations', '#key', '#this'),
'return_result' => true,
'multilang' => true,
);
}
$company_schema = array(
'table' => 'companies',
'db_field' => 'company',
'process_put' => array('fn_exim_set_product_company', '#key', '#this')
);
if (fn_allowed_for('ULTIMATE')) {
$schema['export_fields']['Store'] = $company_schema;
$schema['export_fields']['Price']['process_put'] = array('fn_import_product_price', '#key', '#this', '#new', '%Store%');
if (!Registry::get('runtime.company_id')) {
$schema['export_fields']['Store']['required'] = true;
$schema['export_fields']['Category']['process_put'] = array('fn_exim_set_product_categories', '#key', 'M', '#this', '@category_delimiter', '%Store%');
$schema['export_fields']['Features']['process_put'] = array('fn_exim_set_product_features', '#key', '#this', '@features_delimiter', '#lang_code', '%Store%');
$schema['export_fields']['Secondary categories']['process_put'] = array('fn_exim_set_product_categories', '#key', 'A', '#this', '@category_delimiter', '%Store%');
}
$schema['import_process_data']['check_product_company_id'] = array(
'function' => 'fn_import_check_product_company_id',
'args' => array('$primary_object_id', '$object', '$pattern', '$options', '$processed_data', '$processing_groups', '$skip_record'),
'import_only' => true,
);
}
if (fn_allowed_for('MULTIVENDOR')) {
$schema['export_fields']['Vendor'] = $company_schema;
if (!Registry::get('runtime.company_id')) {
$schema['export_fields']['Vendor']['required'] = true;
} else {
$schema['import_process_data']['mve_import_check_product_data'] = array(
'function' => 'fn_mve_import_check_product_data',
'args' => array('$object', '$primary_object_id','$options', '$processed_data', '$skip_record'),
'import_only' => true,
);
$schema['import_process_data']['mve_import_check_object_id'] = array(
'function' => 'fn_mve_import_check_object_id',
'args' => array('$primary_object_id', '$processed_data', '$skip_record'),
'import_only' => true,
);
}
}
return $schema;
| apache-2.0 |
webadvancedservicescom/magento | app/code/Magento/Newsletter/Controller/Adminhtml/Subscriber/ExportXml.php | 836 | <?php
/**
*
* @copyright Copyright (c) 2014 X.commerce, Inc. (http://www.magentocommerce.com)
*/
namespace Magento\Newsletter\Controller\Adminhtml\Subscriber;
use Magento\Framework\App\ResponseInterface;
use Magento\Framework\App\Filesystem\DirectoryList;
class ExportXml extends \Magento\Newsletter\Controller\Adminhtml\Subscriber
{
/**
* Export subscribers grid to XML format
*
* @return ResponseInterface
*/
public function execute()
{
$this->_view->loadLayout();
$fileName = 'subscribers.xml';
$content = $this->_view->getLayout()->getChildBlock('adminhtml.newslettrer.subscriber.grid', 'grid.export');
return $this->_fileFactory->create(
$fileName,
$content->getExcelFile($fileName),
DirectoryList::VAR_DIR
);
}
}
| apache-2.0 |
Craftware/Kornell | kornell-gwt/src/main/java/kornell/gui/client/KornellConstantsHelper.java | 702 | package kornell.gui.client;
import java.util.MissingResourceException;
import com.google.gwt.core.client.GWT;
import kornell.core.error.KornellErrorTO;
public class KornellConstantsHelper {
private static KornellConstants constants = GWT.create(KornellConstants.class);
public static String getMessage(String key) {
try {
String errorMessage = constants.getString(key);
return errorMessage;
} catch (MissingResourceException e) {
return "Message not set for key [" + key + "]";
}
}
public static String getErrorMessage(KornellErrorTO kornellErrorTO) {
return getMessage(kornellErrorTO.getMessageKey());
}
}
| apache-2.0 |
tokee/lucene | src/test/org/apache/lucene/index/TestIndexWriterMergePolicy.java | 9702 | package org.apache.lucene.index;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import org.apache.lucene.analysis.WhitespaceAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexWriterConfig.OpenMode;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util._TestUtil;
import org.apache.lucene.util.LuceneTestCase;
public class TestIndexWriterMergePolicy extends LuceneTestCase {
// Test the normal case
public void testNormalCase() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.setMergePolicy(new LogDocMergePolicy(writer));
for (int i = 0; i < 100; i++) {
addDoc(writer);
checkInvariants(writer);
}
writer.close();
}
// Test to see if there is over merge
public void testNoOverMerge() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
writer.setMergePolicy(new LogDocMergePolicy(writer));
boolean noOverMerge = false;
for (int i = 0; i < 100; i++) {
addDoc(writer);
checkInvariants(writer);
if (writer.getNumBufferedDocuments() + writer.getSegmentCount() >= 18) {
noOverMerge = true;
}
}
assertTrue(noOverMerge);
writer.close();
}
// Test the case where flush is forced after every addDoc
public void testForceFlush() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
LogDocMergePolicy mp = new LogDocMergePolicy(writer);
mp.setMinMergeDocs(100);
writer.setMergePolicy(mp);
for (int i = 0; i < 100; i++) {
addDoc(writer);
writer.close();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer.setMergePolicy(mp);
mp.setMinMergeDocs(100);
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
checkInvariants(writer);
}
writer.close();
}
// Test the case where mergeFactor changes
public void testMergeFactorChange() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
writer.setMergePolicy(new LogDocMergePolicy(writer));
for (int i = 0; i < 250; i++) {
addDoc(writer);
checkInvariants(writer);
}
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(5);
// merge policy only fixes segments on levels where merges
// have been triggered, so check invariants after all adds
for (int i = 0; i < 10; i++) {
addDoc(writer);
}
checkInvariants(writer);
writer.close();
}
// Test the case where both mergeFactor and maxBufferedDocs change
public void testMaxBufferedDocsChange() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(101));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(101);
writer.setMergePolicy(new LogDocMergePolicy(writer));
// leftmost* segment has 1 doc
// rightmost* segment has 100 docs
for (int i = 1; i <= 100; i++) {
for (int j = 0; j < i; j++) {
addDoc(writer);
checkInvariants(writer);
}
writer.close();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(101));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(101);
writer.setMergePolicy(new LogDocMergePolicy(writer));
}
writer.close();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer.setMergePolicy(new LogDocMergePolicy(writer));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(10);
// merge policy only fixes segments on levels where merges
// have been triggered, so check invariants after all adds
for (int i = 0; i < 100; i++) {
addDoc(writer);
}
checkInvariants(writer);
for (int i = 100; i < 1000; i++) {
addDoc(writer);
}
writer.commit();
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
writer.commit();
checkInvariants(writer);
writer.close();
}
// Test the case where a merge results in no doc at all
public void testMergeDocCount0() throws IOException {
Directory dir = new RAMDirectory();
IndexWriter writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT)).setMaxBufferedDocs(10));
writer.setMergePolicy(new LogDocMergePolicy(writer));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(100);
for (int i = 0; i < 250; i++) {
addDoc(writer);
checkInvariants(writer);
}
writer.close();
IndexReader reader = IndexReader.open(dir, false);
reader.deleteDocuments(new Term("content", "aaa"));
reader.close();
writer = new IndexWriter(dir, new IndexWriterConfig(TEST_VERSION_CURRENT, new WhitespaceAnalyzer(TEST_VERSION_CURRENT))
.setOpenMode(OpenMode.APPEND).setMaxBufferedDocs(10));
writer.setMergePolicy(new LogDocMergePolicy(writer));
((LogMergePolicy) writer.getMergePolicy()).setMergeFactor(5);
// merge factor is changed, so check invariants after all adds
for (int i = 0; i < 10; i++) {
addDoc(writer);
}
writer.commit();
((ConcurrentMergeScheduler) writer.getConfig().getMergeScheduler()).sync();
writer.commit();
checkInvariants(writer);
assertEquals(10, writer.maxDoc());
writer.close();
}
private void addDoc(IndexWriter writer) throws IOException {
Document doc = new Document();
doc.add(new Field("content", "aaa", Field.Store.NO, Field.Index.ANALYZED));
writer.addDocument(doc);
}
private void checkInvariants(IndexWriter writer) throws IOException {
_TestUtil.syncConcurrentMerges(writer);
int maxBufferedDocs = writer.getConfig().getMaxBufferedDocs();
int mergeFactor = ((LogMergePolicy) writer.getMergePolicy()).getMergeFactor();
int maxMergeDocs = ((LogMergePolicy) writer.getMergePolicy()).getMaxMergeDocs();
int ramSegmentCount = writer.getNumBufferedDocuments();
assertTrue(ramSegmentCount < maxBufferedDocs);
int lowerBound = -1;
int upperBound = maxBufferedDocs;
int numSegments = 0;
int segmentCount = writer.getSegmentCount();
for (int i = segmentCount - 1; i >= 0; i--) {
int docCount = writer.getDocCount(i);
assertTrue(docCount > lowerBound);
if (docCount <= upperBound) {
numSegments++;
} else {
if (upperBound * mergeFactor <= maxMergeDocs) {
assertTrue("maxMergeDocs=" + maxMergeDocs + "; numSegments=" + numSegments + "; upperBound=" + upperBound + "; mergeFactor=" + mergeFactor + "; segs=" + writer.segString(), numSegments < mergeFactor);
}
do {
lowerBound = upperBound;
upperBound *= mergeFactor;
} while (docCount > upperBound);
numSegments = 1;
}
}
if (upperBound * mergeFactor <= maxMergeDocs) {
assertTrue(numSegments < mergeFactor);
}
String[] files = writer.getDirectory().listAll();
int segmentCfsCount = 0;
for (int i = 0; i < files.length; i++) {
if (files[i].endsWith(".cfs")) {
segmentCfsCount++;
}
}
assertEquals(segmentCount, segmentCfsCount);
}
/*
private void printSegmentDocCounts(IndexWriter writer) {
int segmentCount = writer.getSegmentCount();
System.out.println("" + segmentCount + " segments total");
for (int i = 0; i < segmentCount; i++) {
System.out.println(" segment " + i + " has " + writer.getDocCount(i)
+ " docs");
}
}
*/
}
| apache-2.0 |
datenstrudel/bulbs-core | src/test/java/net/datenstrudel/bulbs/core/domain/model/bulb/CmdHwExecutorTest.java | 5677 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package net.datenstrudel.bulbs.core.domain.model.bulb;
import net.datenstrudel.bulbs.core.application.messaging.BulbsCoreEventProcessor;
import net.datenstrudel.bulbs.core.application.messaging.eventStore.DomainEventStore;
import net.datenstrudel.bulbs.core.domain.model.identity.BulbsPrincipal;
import net.datenstrudel.bulbs.core.domain.model.identity.BulbsPrincipalState;
import net.datenstrudel.bulbs.core.domain.model.infrastructure.DomainServiceLocator;
import net.datenstrudel.bulbs.core.domain.model.messaging.DomainEvent;
import net.datenstrudel.bulbs.core.domain.model.messaging.DomainEventPublisherDeferrer;
import net.datenstrudel.bulbs.core.infrastructure.services.hardwareadapter.bulb.BulbBridgeHardwareAdapter;
import net.datenstrudel.bulbs.shared.domain.model.bulb.BulbBridgeAddress;
import net.datenstrudel.bulbs.shared.domain.model.bulb.BulbState;
import net.datenstrudel.bulbs.shared.domain.model.bulb.BulbsPlatform;
import net.datenstrudel.bulbs.shared.domain.model.bulb.CommandPriority;
import net.datenstrudel.bulbs.shared.domain.model.color.ColorRGB;
import net.datenstrudel.bulbs.shared.domain.model.identity.AppId;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.test.util.ReflectionTestUtils;
import java.util.LinkedList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
/**
*
* @author Thomas Wendzinski
*/
@RunWith(MockitoJUnitRunner.class)
public class CmdHwExecutorTest {
@Mock
BulbBridgeHardwareAdapter mk_hwAdapter;
@Mock
DomainEventStore mk_eventStore;
BulbBridgeAddress T_BRIDGE_ADDRESS = new BulbBridgeAddress("localhost", 0);
BulbsPlatform mk_BulbsPlatform = BulbsPlatform._EMULATED;
@Mock
DomainServiceLocator mk_domainServiceLocator;
DomainServiceLocator serviceLocator;
public CmdHwExecutorTest() {
}
@Before
public void setUp() {
// mk_hwAdapter = createMock(BulbBridgeHardwareAdapter.class);
// mk_eventStore = createMock(DomainEventStore.class);
ReflectionTestUtils.setField(new BulbsCoreEventProcessor(), "eventStore",
mk_eventStore);
serviceLocator = new DomainServiceLocator();
ReflectionTestUtils.setField(serviceLocator, "instance", mk_domainServiceLocator);
}
@After
public void tearDown(){
ReflectionTestUtils.setField(serviceLocator, "instance", null);
}
// @Test
public void testCancelExecution() {
System.out.println("cancelExecution");
CmdHwExecutor instance = null;
instance.cancelExecution();
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
// @Test
public void testIsExecutionFinished() {
System.out.println("isExecutionFinished");
CmdHwExecutor instance = null;
boolean expResult = false;
boolean result = instance.isExecutionFinished();
assertEquals(expResult, result);
// TODO review the generated test code and remove the default call to fail.
fail("The test case is a prototype.");
}
@Test
public void testRun() throws Exception{
System.out.println("run");
CmdHwExecutor instance;
final int COUNT_STATES = 50;
BulbActuatorCommand command = new BulbActuatorCommand(
new BulbId(new BulbBridgeId("brId"), "1"),
new AppId("testApp"), "testUserApiKey", CommandPriority.standard(),
new LinkedList<BulbState>(){{
for (int i = 0; i < COUNT_STATES; i++) {
add(new BulbState(new ColorRGB(
(255/COUNT_STATES)*i, (255/COUNT_STATES)*i, (255/COUNT_STATES)*i), true));
}
}},
false
);
BulbsPrincipal principal = new BulbsPrincipal(
"testPrincipalUsernam", new AppId("testCore"), "brId",
BulbsPrincipalState.REGISTERED);
BulbState prevState = new BulbState(false);
instance = new CmdHwExecutor(T_BRIDGE_ADDRESS, principal, command, prevState, mk_hwAdapter, mk_BulbsPlatform);
when(mk_domainServiceLocator.getBeanInternal(DomainEventStore.class)).thenReturn(mk_eventStore);
when(mk_domainServiceLocator.getBeanInternal(DomainEventPublisherDeferrer.class)).thenReturn(null);
when(mk_eventStore.store(any(DomainEvent.class))).thenReturn(-1l);
// replay(mk_hwAdapter, mk_eventStore, mk_domainServiceLocator);
instance.run();
verify(mk_hwAdapter, atMost(1)).applyBulbState(
any(BulbId.class), eq(T_BRIDGE_ADDRESS),
eq(principal),
any(BulbState.class),
any(BulbsPlatform.class),
eq(prevState)
);
verify(mk_hwAdapter, atLeast(COUNT_STATES)).applyBulbState(
any(BulbId.class), eq(T_BRIDGE_ADDRESS),
eq(principal),
any(BulbState.class),
any(BulbsPlatform.class),
any(BulbState.class)
);
}
public void testRun_WithLoop() throws Exception{
//TODO: Implement Me!
}
}
| apache-2.0 |
komamj/KomaMusic | app/src/main/java/com/koma/music/data/model/MusicPlaybackTrack.java | 3026 | /*
* Copyright (C) 2017 Koma MJ
*
* Licensed under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package com.koma.music.data.model;
import android.os.Parcel;
import android.os.Parcelable;
/**
* Created by koma on 3/21/17.
*/
/**
* This is used by the music playback service to track the music tracks it is playing
* It has extra meta data to determine where the track came from so that we can show the appropriate
* song playing indicator
*/
public class MusicPlaybackTrack implements Parcelable {
/**
* The track id
*/
public long mId;
/**
* Where was this track added from? Artist id/Album id/Playlist id
*/
public long mSourceId;
/**
* This is only used for playlists since it is possible that a playlist can contain the same
* song multiple times. So to prevent the song indicator showing up multiple times, we need
* to keep track of the position
*/
public int mSourcePosition;
/**
* Parcelable creator
*/
public static final Creator<MusicPlaybackTrack> CREATOR = new Creator<MusicPlaybackTrack>() {
@Override
public MusicPlaybackTrack createFromParcel(Parcel source) {
return new MusicPlaybackTrack(source);
}
@Override
public MusicPlaybackTrack[] newArray(int size) {
return new MusicPlaybackTrack[size];
}
};
public MusicPlaybackTrack(long id, long sourceId, int sourcePosition) {
mId = id;
mSourceId = sourceId;
mSourcePosition = sourcePosition;
}
public MusicPlaybackTrack(Parcel in) {
mId = in.readLong();
mSourceId = in.readLong();
mSourcePosition = in.readInt();
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeLong(mId);
dest.writeLong(mSourceId);
dest.writeInt(mSourcePosition);
}
@Override
public boolean equals(Object o) {
if (o instanceof MusicPlaybackTrack) {
MusicPlaybackTrack other = (MusicPlaybackTrack) o;
if (other != null) {
if (mId == other.mId
&& mSourceId == other.mSourceId
//&& mSourceType == other.mSourceType
&& mSourcePosition == other.mSourcePosition) {
return true;
}
return false;
}
}
return super.equals(o);
}
}
| apache-2.0 |
mF2C/COMPSs | compss/runtime/adaptors/agent/RESTagent/commons/src/main/java/es/bsc/compss/agent/rest/types/messages/LostNodeNotification.java | 1357 | /*
* Copyright 2002-2019 Barcelona Supercomputing Center (www.bsc.es)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package es.bsc.compss.agent.rest.types.messages;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name = "lostNode")
public class LostNodeNotification {
private String workerName;
private Long appId;
public LostNodeNotification() {
}
public LostNodeNotification(String workerName, Long appId) {
this.workerName = workerName;
this.appId = appId;
}
public String getWorkerName() {
return workerName;
}
public void setWorkerName(String workerName) {
this.workerName = workerName;
}
public Long getAppId() {
return appId;
}
public void setAppId(Long appId) {
this.appId = appId;
}
}
| apache-2.0 |
SowaLabs/OpenNLP | opennlp-tools/src/main/java/opennlp/tools/util/featuregen/GeneratorFactory.java | 19040 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.util.featuregen;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import opennlp.tools.dictionary.Dictionary;
import opennlp.tools.util.InvalidFormatException;
import opennlp.tools.util.ext.ExtensionLoader;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
/**
* Creates a set of feature generators based on a provided XML descriptor.
*
* Example of an XML descriptor:
*
* <generators>
* <charngram min = "2" max = "5"/>
* <definition/>
* <cache>
* <window prevLength = "3" nextLength = "3">
* <generators>
* <prevmap/>
* <sentence/>
* <tokenclass/>
* <tokenpattern/>
* </generators>
* </window>
* </cache>
* </generators>
*
* Each XML element is mapped to a {@link GeneratorFactory.XmlFeatureGeneratorFactory} which
* is responsible to process the element and create the specified
* {@link AdaptiveFeatureGenerator}. Elements can contain other
* elements in this case it is the responsibility of the mapped factory to process
* the child elements correctly. In some factories this leads to recursive
* calls the
* {@link GeneratorFactory.XmlFeatureGeneratorFactory#create(Element, FeatureGeneratorResourceProvider)}
* method.
*
* In the example above the generators element is mapped to the
* {@link GeneratorFactory.AggregatedFeatureGeneratorFactory} which then
* creates all the aggregated {@link AdaptiveFeatureGenerator}s to
* accomplish this it evaluates the mapping with the same mechanism
* and gives the child element to the corresponding factories. All
* created generators are added to a new instance of the
* {@link AggregatedFeatureGenerator} which is then returned.
*/
public class GeneratorFactory {
/**
* The {@link XmlFeatureGeneratorFactory} is responsible to construct
* an {@link AdaptiveFeatureGenerator} from an given XML {@link Element}
* which contains all necessary configuration if any.
*/
static interface XmlFeatureGeneratorFactory {
/**
* Creates an {@link AdaptiveFeatureGenerator} from a the describing
* XML element.
*
* @param generatorElement the element which contains the configuration
* @param resourceManager the resource manager which could be used
* to access referenced resources
*
* @return the configured {@link AdaptiveFeatureGenerator}
*/
AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException;
}
/**
* @see AggregatedFeatureGenerator
*/
static class AggregatedFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
Collection<AdaptiveFeatureGenerator> aggregatedGenerators =
new LinkedList<AdaptiveFeatureGenerator>();
NodeList childNodes = generatorElement.getChildNodes();
for (int i = 0; i < childNodes.getLength(); i++) {
Node childNode = childNodes.item(i);
if (childNode instanceof Element) {
Element aggregatedGeneratorElement = (Element) childNode;
aggregatedGenerators.add(
GeneratorFactory.createGenerator(aggregatedGeneratorElement, resourceManager));
}
}
return new AggregatedFeatureGenerator(aggregatedGenerators.toArray(
new AdaptiveFeatureGenerator[aggregatedGenerators.size()]));
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("generators", new AggregatedFeatureGeneratorFactory());
}
}
/**
* @see CachedFeatureGenerator
*/
static class CachedFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
private CachedFeatureGeneratorFactory() {
}
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
Element cachedGeneratorElement = null;
NodeList kids = generatorElement.getChildNodes();
for (int i = 0; i < kids.getLength(); i++) {
Node childNode = kids.item(i);
if (childNode instanceof Element) {
cachedGeneratorElement = (Element) childNode;
break;
}
}
if (cachedGeneratorElement == null) {
throw new InvalidFormatException("Could not find containing generator element!");
}
AdaptiveFeatureGenerator cachedGenerator =
GeneratorFactory.createGenerator(cachedGeneratorElement, resourceManager);
return new CachedFeatureGenerator(cachedGenerator);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("cache", new CachedFeatureGeneratorFactory());
}
}
/**
* @see CharacterNgramFeatureGenerator
*/
static class CharacterNgramFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String minString = generatorElement.getAttribute("min");
int min;
try {
min = Integer.parseInt(minString);
} catch (NumberFormatException e) {
throw new InvalidFormatException("min attribute '" + minString + "' is not a number!", e);
}
String maxString = generatorElement.getAttribute("max");
int max;
try {
max = Integer.parseInt(maxString);
} catch (NumberFormatException e) {
throw new InvalidFormatException("max attribute '" + maxString + "' is not a number!", e);
}
return new CharacterNgramFeatureGenerator(min, max);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("charngram", new CharacterNgramFeatureGeneratorFactory());
}
}
/**
* @see DefinitionFeatureGenerator
*/
static class DefinitionFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
private static final String ELEMENT_NAME = "definition";
private DefinitionFeatureGeneratorFactory() {
}
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
return new OutcomePriorFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put(ELEMENT_NAME, new DefinitionFeatureGeneratorFactory());
}
}
/**
* @see DictionaryFeatureGenerator
*/
static class DictionaryFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String dictResourceKey = generatorElement.getAttribute("dict");
Object dictResource = resourceManager.getResource(dictResourceKey);
if (!(dictResource instanceof Dictionary)) {
throw new InvalidFormatException("No dictionary resource for key: " + dictResourceKey);
}
String prefix = generatorElement.getAttribute("prefix");
return new DictionaryFeatureGenerator(prefix, (Dictionary) dictResource);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("dictionary", new DictionaryFeatureGeneratorFactory());
}
}
/**
* @see PreviousMapFeatureGenerator
*/
static class PreviousMapFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new PreviousMapFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("prevmap", new PreviousMapFeatureGeneratorFactory());
}
}
// TODO: Add parameters ...
/**
* @see SentenceFeatureGenerator
*/
static class SentenceFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
String beginFeatureString = generatorElement.getAttribute("begin");
boolean beginFeature = true;
if (beginFeatureString.length() != 0)
beginFeature = Boolean.parseBoolean(beginFeatureString);
String endFeatureString = generatorElement.getAttribute("end");
boolean endFeature = true;
if (endFeatureString.length() != 0)
endFeature = Boolean.parseBoolean(endFeatureString);
return new SentenceFeatureGenerator(beginFeature, endFeature);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("sentence", new SentenceFeatureGeneratorFactory());
}
}
/**
* @see TokenClassFeatureGenerator
*/
static class TokenClassFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
// TODO: Make it configurable ...
return new TokenClassFeatureGenerator(true);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("tokenclass", new TokenClassFeatureGeneratorFactory());
}
}
static class TokenFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new TokenFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("token", new TokenFeatureGeneratorFactory());
}
}
static class BigramNameFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new BigramNameFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("bigram", new BigramNameFeatureGeneratorFactory());
}
}
/**
* @see TokenPatternFeatureGenerator
*/
static class TokenPatternFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new TokenPatternFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("tokenpattern", new TokenPatternFeatureGeneratorFactory());
}
}
/**
* @see WindowFeatureGenerator
*/
static class WindowFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
Element nestedGeneratorElement = null;
NodeList kids = generatorElement.getChildNodes();
for (int i = 0; i < kids.getLength(); i++) {
Node childNode = kids.item(i);
if (childNode instanceof Element) {
nestedGeneratorElement = (Element) childNode;
break;
}
}
if (nestedGeneratorElement == null) {
throw new InvalidFormatException("window feature generator must contain" +
" an aggregator element");
}
AdaptiveFeatureGenerator nestedGenerator = GeneratorFactory.createGenerator(nestedGeneratorElement, resourceManager);
String prevLengthString = generatorElement.getAttribute("prevLength");
int prevLength;
try {
prevLength = Integer.parseInt(prevLengthString);
} catch (NumberFormatException e) {
throw new InvalidFormatException("prevLength attribute '" + prevLengthString + "' is not a number!", e);
}
String nextLengthString = generatorElement.getAttribute("nextLength");
int nextLength;
try {
nextLength = Integer.parseInt(nextLengthString);
} catch (NumberFormatException e) {
throw new InvalidFormatException("nextLength attribute '" + nextLengthString + "' is not a number!", e);
}
return new WindowFeatureGenerator(nestedGenerator, prevLength, nextLength);
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("window", new WindowFeatureGeneratorFactory());
}
}
/**
* @see TokenPatternFeatureGenerator
*/
static class PrefixFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new PrefixFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("prefix", new PrefixFeatureGeneratorFactory());
}
}
/**
* @see TokenPatternFeatureGenerator
*/
static class SuffixFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) {
return new SuffixFeatureGenerator();
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("suffix", new SuffixFeatureGeneratorFactory());
}
}
static class CustomFeatureGeneratorFactory implements XmlFeatureGeneratorFactory {
public AdaptiveFeatureGenerator create(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String featureGeneratorClassName = generatorElement.getAttribute("class");
AdaptiveFeatureGenerator generator = ExtensionLoader.instantiateExtension(AdaptiveFeatureGenerator.class,
featureGeneratorClassName);
return generator;
}
static void register(Map<String, XmlFeatureGeneratorFactory> factoryMap) {
factoryMap.put("custom", new CustomFeatureGeneratorFactory());
}
}
private static Map<String, XmlFeatureGeneratorFactory> factories =
new HashMap<String, XmlFeatureGeneratorFactory>();
static {
AggregatedFeatureGeneratorFactory.register(factories);
CachedFeatureGeneratorFactory.register(factories);
CharacterNgramFeatureGeneratorFactory.register(factories);
DefinitionFeatureGeneratorFactory.register(factories);
DictionaryFeatureGeneratorFactory.register(factories);
PreviousMapFeatureGeneratorFactory.register(factories);
SentenceFeatureGeneratorFactory.register(factories);
TokenClassFeatureGeneratorFactory.register(factories);
TokenFeatureGeneratorFactory.register(factories);
BigramNameFeatureGeneratorFactory.register(factories);
TokenPatternFeatureGeneratorFactory.register(factories);
PrefixFeatureGeneratorFactory.register(factories);
SuffixFeatureGeneratorFactory.register(factories);
WindowFeatureGeneratorFactory.register(factories);
CustomFeatureGeneratorFactory.register(factories);
}
/**
* Creates a {@link AdaptiveFeatureGenerator} for the provided element.
* To accomplish this it looks up the corresponding factory by the
* element tag name. The factory is then responsible for the creation
* of the generator from the element.
*
* @param generatorElement
* @param resourceManager
*
* @return
*/
static AdaptiveFeatureGenerator createGenerator(Element generatorElement,
FeatureGeneratorResourceProvider resourceManager) throws InvalidFormatException {
String elementName = generatorElement.getTagName();
XmlFeatureGeneratorFactory generatorFactory = factories.get(elementName);
if (generatorFactory == null) {
throw new InvalidFormatException("Unexpected element: " + elementName);
}
return generatorFactory.create(generatorElement, resourceManager);
}
/**
* Creates an {@link AdaptiveFeatureGenerator} from an provided XML descriptor.
*
* Usually this XML descriptor contains a set of nested feature generators
* which are then used to generate the features by one of the opennlp
* components.
*
* @param xmlDescriptorIn the {@link InputStream} from which the descriptor
* is read, the stream remains open and must be closed by the caller.
*
* @param resourceManager the resource manager which is used to resolve resources
* referenced by a key in the descriptor
*
* @return created feature generators
*
* @throws IOException if an error occurs during reading from the descriptor
* {@link InputStream}
*/
public static AdaptiveFeatureGenerator create(InputStream xmlDescriptorIn,
FeatureGeneratorResourceProvider resourceManager) throws IOException, InvalidFormatException {
DocumentBuilderFactory documentBuilderFacoty = DocumentBuilderFactory.newInstance();
DocumentBuilder documentBuilder;
try {
documentBuilder = documentBuilderFacoty.newDocumentBuilder();
} catch (ParserConfigurationException e) {
throw new IllegalStateException(e);
}
org.w3c.dom.Document xmlDescriptorDOM;
try {
xmlDescriptorDOM = documentBuilder.parse(xmlDescriptorIn);
} catch (SAXException e) {
throw new InvalidFormatException("Descriptor is not valid XML!", e);
}
Element generatorElement = xmlDescriptorDOM.getDocumentElement();
return createGenerator(generatorElement, resourceManager);
}
}
| apache-2.0 |
jpaw/jpaw | jpaw-cmdline/src/main/java/de/jpaw/cmdline/CmdlineCallback.java | 114 | package de.jpaw.cmdline;
public interface CmdlineCallback {
void readParameters(CmdlineParserContext ctx);
}
| apache-2.0 |
mozilla-japan/gitfab2 | config/initializers/01_keychain.rb | 106 | path = Rails.root.join 'config', 'keychain.yml'
KEYS = YAML.load(ERB.new(IO.read path).result)[Rails.env]
| apache-2.0 |
alban/scope | report/node.go | 6401 | package report
import (
"time"
"github.com/weaveworks/common/mtime"
)
// Node describes a superset of the metadata that probes can collect
// about a given node in a given topology, along with the edges (aka
// adjacency) emanating from the node.
type Node struct {
ID string `json:"id,omitempty"`
Topology string `json:"topology,omitempty"`
Counters Counters `json:"counters,omitempty"`
Sets Sets `json:"sets,omitempty"`
Adjacency IDList `json:"adjacency"`
Controls NodeControls `json:"controls,omitempty"`
LatestControls NodeControlDataLatestMap `json:"latestControls,omitempty"`
Latest StringLatestMap `json:"latest,omitempty"`
Metrics Metrics `json:"metrics,omitempty"`
Parents Sets `json:"parents,omitempty"`
Children NodeSet `json:"children,omitempty"`
}
// MakeNode creates a new Node with no initial metadata.
func MakeNode(id string) Node {
return Node{
ID: id,
Counters: MakeCounters(),
Sets: MakeSets(),
Adjacency: MakeIDList(),
Controls: MakeNodeControls(),
LatestControls: MakeNodeControlDataLatestMap(),
Latest: MakeStringLatestMap(),
Metrics: Metrics{},
Parents: MakeSets(),
}
}
// MakeNodeWith creates a new Node with the supplied map.
func MakeNodeWith(id string, m map[string]string) Node {
return MakeNode(id).WithLatests(m)
}
// WithID returns a fresh copy of n, with ID changed.
func (n Node) WithID(id string) Node {
n.ID = id
return n
}
// WithTopology returns a fresh copy of n, with ID changed.
func (n Node) WithTopology(topology string) Node {
n.Topology = topology
return n
}
// Before is used for sorting nodes by topology and id
func (n Node) Before(other Node) bool {
return n.Topology < other.Topology || (n.Topology == other.Topology && n.ID < other.ID)
}
// Equal is used for comparing nodes by topology and id
func (n Node) Equal(other Node) bool {
return n.Topology == other.Topology && n.ID == other.ID
}
// After is used for sorting nodes by topology and id
func (n Node) After(other Node) bool {
return other.Topology < n.Topology || (other.Topology == n.Topology && other.ID < n.ID)
}
// WithLatests returns a fresh copy of n, with Metadata m merged in.
func (n Node) WithLatests(m map[string]string) Node {
ts := mtime.Now()
for k, v := range m {
n.Latest = n.Latest.Set(k, ts, v)
}
return n
}
// WithLatest produces a new Node with k mapped to v in the Latest metadata.
func (n Node) WithLatest(k string, ts time.Time, v string) Node {
n.Latest = n.Latest.Set(k, ts, v)
return n
}
// WithCounters returns a fresh copy of n, with Counters c merged in.
func (n Node) WithCounters(c map[string]int) Node {
n.Counters = n.Counters.Merge(Counters{}.fromIntermediate(c))
return n
}
// WithSet returns a fresh copy of n, with set merged in at key.
func (n Node) WithSet(key string, set StringSet) Node {
n.Sets = n.Sets.Add(key, set)
return n
}
// WithSets returns a fresh copy of n, with sets merged in.
func (n Node) WithSets(sets Sets) Node {
n.Sets = n.Sets.Merge(sets)
return n
}
// WithMetric returns a fresh copy of n, with metric merged in at key.
func (n Node) WithMetric(key string, metric Metric) Node {
n.Metrics = n.Metrics.Copy()
n.Metrics[key] = n.Metrics[key].Merge(metric)
return n
}
// WithMetrics returns a fresh copy of n, with metrics merged in.
func (n Node) WithMetrics(metrics Metrics) Node {
n.Metrics = n.Metrics.Merge(metrics)
return n
}
// WithAdjacent returns a fresh copy of n, with 'a' added to Adjacency
func (n Node) WithAdjacent(a ...string) Node {
n.Adjacency = n.Adjacency.Add(a...)
return n
}
// WithControls returns a fresh copy of n, with cs added to Controls.
func (n Node) WithControls(cs ...string) Node {
n.Controls = n.Controls.Add(cs...)
return n
}
// WithLatestActiveControls returns a fresh copy of n, with active controls cs added to LatestControls.
func (n Node) WithLatestActiveControls(cs ...string) Node {
lcs := map[string]NodeControlData{}
for _, control := range cs {
lcs[control] = NodeControlData{}
}
return n.WithLatestControls(lcs)
}
// WithLatestControls returns a fresh copy of n, with lcs added to LatestControls.
func (n Node) WithLatestControls(lcs map[string]NodeControlData) Node {
ts := mtime.Now()
for k, v := range lcs {
n.LatestControls = n.LatestControls.Set(k, ts, v)
}
return n
}
// WithLatestControl produces a new Node with control added to it
func (n Node) WithLatestControl(control string, ts time.Time, data NodeControlData) Node {
n.LatestControls = n.LatestControls.Set(control, ts, data)
return n
}
// WithParents returns a fresh copy of n, with sets merged in.
func (n Node) WithParents(parents Sets) Node {
n.Parents = n.Parents.Merge(parents)
return n
}
// PruneParents returns a fresh copy of n, without any parents.
func (n Node) PruneParents() Node {
n.Parents = MakeSets()
return n
}
// WithChildren returns a fresh copy of n, with children merged in.
func (n Node) WithChildren(children NodeSet) Node {
n.Children = n.Children.Merge(children)
return n
}
// WithChild returns a fresh copy of n, with one child merged in.
func (n Node) WithChild(child Node) Node {
n.Children = n.Children.Merge(MakeNodeSet(child))
return n
}
// Merge mergses the individual components of a node and returns a
// fresh node.
func (n Node) Merge(other Node) Node {
id := n.ID
if id == "" {
id = other.ID
}
topology := n.Topology
if topology == "" {
topology = other.Topology
} else if other.Topology != "" && topology != other.Topology {
panic("Cannot merge nodes with different topology types: " + topology + " != " + other.Topology)
}
return Node{
ID: id,
Topology: topology,
Counters: n.Counters.Merge(other.Counters),
Sets: n.Sets.Merge(other.Sets),
Adjacency: n.Adjacency.Merge(other.Adjacency),
Controls: n.Controls.Merge(other.Controls),
LatestControls: n.LatestControls.Merge(other.LatestControls),
Latest: n.Latest.Merge(other.Latest),
Metrics: n.Metrics.Merge(other.Metrics),
Parents: n.Parents.Merge(other.Parents),
Children: n.Children.Merge(other.Children),
}
}
| apache-2.0 |
zalando/freshly | web/server/Array_distinct.js | 263 | Array.prototype.distinct = function () {
var known = {};
var result = [];
this.forEach(item => {
if (!known[item]) {
known[item] = true;
result.push(item);
}
});
return result;
};
| apache-2.0 |
0x6e6562/diffa | client-support/src/test/scala/net/lshift/diffa/client/ScanParticipantRestClientTest.scala | 5835 | /**
* Copyright (C) 2010-2012 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.client
import org.easymock.EasyMock._
import net.lshift.diffa.kernel.config._
import org.junit.{Ignore, Test}
import java.io.ByteArrayInputStream
import org.junit.Assert._
import org.hamcrest.CoreMatchers._
import net.lshift.diffa.adapter.scanning.{ScanConstraint, StringPrefixConstraint, ScanResultEntry}
import org.joda.time.DateTime
import java.net.{SocketTimeoutException, SocketException, ConnectException, URI}
import net.lshift.diffa.kernel.config.PairRef
import net.lshift.diffa.kernel.config.QueryParameterCredentials
import net.lshift.diffa.kernel.config.BasicAuthCredentials
@Ignore
class ScanParticipantRestClientTest {
final val JSON = "application/json"
final val pair = PairRef("key", 702L)
final val scanUrl = "http://dummy/url"
lazy val httpClient = createMock(classOf[DiffaHttpClient])
lazy val credentialsLookup = createMock(classOf[DomainCredentialsLookup])
lazy val parser = createMock(classOf[JsonScanResultParser])
lazy val nullQuery = Map[String, Seq[String]]()
lazy val scanQuery = DiffaHttpQuery(scanUrl).accepting(JSON)
//val nullAggregations: scala.Seq[CategoryFunction] = Seq()
val nullConstraints: scala.Seq[ScanConstraint] = Seq()
lazy val scanningParticipant = new ScanParticipantRestClient(pair, scanUrl, credentialsLookup, httpClient, parser)
val emptyResponseContent = "[]" + " " * 40
lazy val emptyResponse = new ByteArrayInputStream(emptyResponseContent.getBytes("UTF8"))
val parserResult = Seq[ScanResultEntry]()
lazy val sampleConstraints: Seq[ScanConstraint] = Seq(new StringPrefixConstraint("property", "thePrefix"))
//lazy val sampleAggregations: Seq[CategoryFunction] = Seq(new StringPrefixCategoryFunction("property", 1, 2, 3))
@Test
def participantShouldMakeGetRequestOnScan {
expect(httpClient.get(scanQuery, parser)).andReturn(parserResult)
replay(httpClient)
expectingNullCredentials()
//scanningParticipant.scan(nullConstraints, nullAggregations)
verify(httpClient)
}
@Test
def participantShouldMakeGetRequestWithAggregationsOnScan {
/*
val query = scanQuery.withAggregations(sampleAggregations).withConstraints(sampleConstraints)
expect(httpClient.get(query, parser)).andReturn(parserResult)
replay(httpClient)
expectingNullCredentials()
*/
//scanningParticipant.scan(sampleConstraints, sampleAggregations)
verify(httpClient)
}
@Test
def participantReturnsParsedResponse {
val entities = Seq(ScanResultEntry.forEntity("id", "version", DateTime.now()))
expect(httpClient.get(scanQuery, parser)).andStubReturn(entities)
replay(httpClient)
expectingNullCredentials()
/*
assertThat(scanningParticipant.scan(nullConstraints, nullAggregations),
equalTo(entities))
*/
}
/*
@Test(expected= classOf[ScanFailedException])
def shouldHandleConnectExceptionsAndRethrow {
expectHttpError(new ConnectException())
expectingNullCredentials()
scanningParticipant.scan(nullConstraints, nullAggregations)
}
@Test(expected=classOf[ScanFailedException])
def shouldHandleSocketExceptionsAndRethrow {
expectHttpError(new SocketException())
expectingNullCredentials()
scanningParticipant.scan(nullConstraints, nullAggregations)
}
@Test(expected=classOf[ScanFailedException])
def shouldHandleSocketTimeoutExceptionsAndRethrow {
expectHttpError(new SocketTimeoutException())
expectingNullCredentials()
scanningParticipant.scan(nullConstraints, nullAggregations)
}
*/
@Test
def shouldQueryForAuthMechanism {
expect(credentialsLookup.credentialsForUri(pair.space, new URI(scanUrl))).andReturn(None)
expect(httpClient.get(anyObject(), anyObject())).andStubReturn(parserResult)
replay(credentialsLookup, httpClient)
//scanningParticipant.scan(nullConstraints, nullAggregations)
verify(credentialsLookup, httpClient)
}
@Test
def itAddsQueryParameterCredentialsToTheRequest {
val credentials = QueryParameterCredentials("fred", "foobar")
expect(credentialsLookup.credentialsForUri(pair.space, new URI(scanUrl))) andReturn(Some(credentials))
val expectedQuery = scanQuery.withQuery(Map(credentials.name -> Seq(credentials.value)))
expect(httpClient.get(expectedQuery, parser)) andReturn(parserResult)
replay(credentialsLookup, httpClient)
//scanningParticipant.scan(nullConstraints, nullAggregations)
}
@Test
def itAddsBasicAuthToTheRequest {
val credentials = BasicAuthCredentials("fred", "foobar")
expect(credentialsLookup.credentialsForUri(pair.space, new URI(scanUrl))) andReturn(Some(credentials))
val expectedQuery = scanQuery.withBasicAuth(credentials.username, credentials.password)
expect(httpClient.get(expectedQuery, parser)) andReturn(parserResult)
replay(credentialsLookup, httpClient)
//scanningParticipant.scan(nullConstraints, nullAggregations)
}
private def expectHttpError(ex: Throwable) {
expect(httpClient.get(scanQuery, parser)).andStubThrow(ex)
replay(httpClient)
}
private def expectingNullCredentials() {
expect(credentialsLookup.credentialsForUri(pair.space, new URI(scanUrl))).andReturn(None)
replay(credentialsLookup)
}
} | apache-2.0 |
mandarjog/mixr | pkg/config/listChecker/bindingEvaluator.go | 1123 | // Copyright 2016 Google Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package listChecker
// BindingEvaluator calculates the input binding values for invoking an adapter function.
type BindingEvaluator interface {
// EvaluateSymbolBinding evaluates the value of the symbol binding for the CheckList call.
EvaluateSymbolBinding(values map[string]string) (string, error)
}
// NewBindingEvaluator instantiates and returns a new BindingEvaluator, based on the given configuration.
func NewBindingEvaluator(config BindingConfig) BindingEvaluator {
// TODO: Construct a new BindingEvaluator
return nil
}
| apache-2.0 |
d80harri/wr3 | wr.db/src/test/java/net/d80harri/wr/db/MyTest.java | 1184 | package net.d80harri.wr.db;
import net.d80harri.wr.db.model.Item;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import org.springframework.transaction.annotation.Transactional;
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = { "/net/d80harri/wr/db/test-application-context.xml"})
public class MyTest {
@Autowired
private EntityFactory entityFactory;
@Autowired
private ProviderFactory providerFactory;
@Test
@Transactional
public void myTest() throws ValueProviderException {
entityFactory.selectAll(Item.class);
ValueProvider<Item> provider = providerFactory.get(Item.class);
System.out.println(provider);
Item item = provider.provide();
Item parent = provider.provide();
item.setParentItem(parent);
System.out.println(item);
entityFactory.saveOrUpdate(parent);
entityFactory.saveOrUpdate(item);
item = entityFactory.selectById(item.getId(), Item.class);
item.getParentItem().getTitle();
}
}
| apache-2.0 |
sevenler/JFinal_Authority_Johnny | jfinal-authority/src/main/java/com/jayqqaa12/system/controller/LogCtrl.java | 1285 | package com.jayqqaa12.system.controller;
import java.io.File;
import java.io.IOException;
import com.jayqqaa12.jbase.jfinal.ext.ctrl.JsonController;
import com.jayqqaa12.jbase.jfinal.ext.spring.Inject;
import com.jayqqaa12.jbase.util.Fs;
import com.jayqqaa12.model.json.SendJson;
import com.jayqqaa12.system.model.Log;
import com.jfinal.aop.Before;
import com.jfinal.ext.route.ControllerBind;
import com.jfinal.plugin.ehcache.CacheName;
import com.jfinal.plugin.ehcache.EvictInterceptor;
@CacheName(value = "/system/log")
@ControllerBind(controllerKey = "/system/log")
public class LogCtrl extends JsonController<Log> {
public void data() {
sendJson(Log.dao.getVisitCount());
}
public void browser() {
sendJson(Log.dao.browser());
}
public void list() {
setJsonData("list", Log.dao.log(getFrom(Log.dao.tableName)));
setJsonData("total", Log.dao.getCountByWhere(getFrom(Log.dao.tableName).getWhere()));
sendJson();
}
public void error() throws IOException {
String log = Fs.readFile(new File(System.getProperty("LOGDIR") + "/jfinal.log"));
renderText(log);
}
@Before(value = { EvictInterceptor.class })
public void delete() {
renderJsonResult(Log.dao.deleteById(getPara("id")));
}
public void chart() {
renderGson(Log.dao.chart());
}
}
| apache-2.0 |
stripe/stripe-dotnet | src/StripeTests/Infrastructure/Public/SystemNetHttpClientTest.cs | 5503 | namespace StripeTests
{
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Threading;
using System.Threading.Tasks;
using Moq;
using Moq.Protected;
using Newtonsoft.Json.Linq;
using Stripe;
using Xunit;
public class SystemNetHttpClientTest : BaseStripeTest
{
public SystemNetHttpClientTest(MockHttpClientFixture mockHttpClientFixture)
: base(mockHttpClientFixture)
{
}
[Fact]
public async Task MakeRequestAsync()
{
var responseMessage = new HttpResponseMessage(HttpStatusCode.OK);
responseMessage.Content = new StringContent("Hello world!");
this.MockHttpClientFixture.MockHandler.Protected()
.Setup<Task<HttpResponseMessage>>(
"SendAsync",
ItExpr.IsAny<HttpRequestMessage>(),
ItExpr.IsAny<CancellationToken>())
.Returns(Task.FromResult(responseMessage));
var client = new SystemNetHttpClient(
new HttpClient(this.MockHttpClientFixture.MockHandler.Object));
var request = new StripeRequest(
this.StripeClient,
HttpMethod.Post,
"/foo",
null,
null);
var response = await client.MakeRequestAsync(request);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
Assert.Equal("Hello world!", response.Content);
}
[Fact]
public async Task MakeStreamingRequestAsync()
{
var responseMessage = new HttpResponseMessage(HttpStatusCode.OK);
responseMessage.Content = new StringContent("Hello world!");
this.MockHttpClientFixture.MockHandler.Protected()
.Setup<Task<HttpResponseMessage>>(
"SendAsync",
ItExpr.IsAny<HttpRequestMessage>(),
ItExpr.IsAny<CancellationToken>())
.Returns(Task.FromResult(responseMessage));
var client = new SystemNetHttpClient(
new HttpClient(this.MockHttpClientFixture.MockHandler.Object));
var request = new StripeRequest(
this.StripeClient,
HttpMethod.Post,
"/foo",
null,
null);
var streamedResponse = await client.MakeStreamingRequestAsync(request);
Assert.Equal(HttpStatusCode.OK, streamedResponse.StatusCode);
var response = await streamedResponse.ToStripeResponseAsync();
Assert.Equal("Hello world!", response.Content);
}
[Fact]
public async Task UserAgentIncludesAppInfo()
{
var appInfo = new AppInfo
{
Name = "MyAwesomeApp",
PartnerId = "pp_123",
Version = "1.2.34",
Url = "https://myawesomeapp.info",
};
var responseMessage = new HttpResponseMessage(HttpStatusCode.OK);
responseMessage.Content = new StringContent("Hello world!");
this.MockHttpClientFixture.MockHandler.Protected()
.Setup<Task<HttpResponseMessage>>(
"SendAsync",
ItExpr.IsAny<HttpRequestMessage>(),
ItExpr.IsAny<CancellationToken>())
.Returns(Task.FromResult(responseMessage));
var client = new SystemNetHttpClient(
httpClient: new HttpClient(this.MockHttpClientFixture.MockHandler.Object),
appInfo: appInfo);
var request = new StripeRequest(
this.StripeClient,
HttpMethod.Post,
"/foo",
null,
null);
await client.MakeRequestAsync(request);
this.MockHttpClientFixture.MockHandler.Protected()
.Verify(
"SendAsync",
Times.Once(),
ItExpr.Is<HttpRequestMessage>(m => this.VerifyHeaders(m.Headers)),
ItExpr.IsAny<CancellationToken>());
}
[Fact]
public void CanInspectMaxNetworkRetries()
{
var client = new SystemNetHttpClient(
httpClient: new HttpClient(this.MockHttpClientFixture.MockHandler.Object),
maxNetworkRetries: 2);
Assert.Equal(2, client.MaxNetworkRetries);
}
[Fact]
public void CanInspectEnableTelemetry()
{
var client = new SystemNetHttpClient(
httpClient: new HttpClient(this.MockHttpClientFixture.MockHandler.Object),
enableTelemetry: true);
Assert.True(client.EnableTelemetry);
}
private bool VerifyHeaders(HttpRequestHeaders headers)
{
var userAgent = headers.UserAgent.ToString();
var appInfo = JObject.Parse(headers.GetValues("X-Stripe-Client-User-Agent").First())["application"];
return userAgent.Contains("MyAwesomeApp/1.2.34 (https://myawesomeapp.info)") &&
appInfo.Value<string>("name") == "MyAwesomeApp" &&
appInfo.Value<string>("partner_id") == "pp_123" &&
appInfo.Value<string>("version") == "1.2.34" &&
appInfo.Value<string>("url") == "https://myawesomeapp.info";
}
}
}
| apache-2.0 |
benjchristensen/RxJava | src/main/java/io/reactivex/internal/operators/flowable/FlowableTakeUntil.java | 3970 | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.flowable;
import java.util.concurrent.atomic.*;
import org.reactivestreams.*;
import io.reactivex.internal.subscriptions.SubscriptionHelper;
import io.reactivex.internal.util.*;
public final class FlowableTakeUntil<T, U> extends AbstractFlowableWithUpstream<T, T> {
final Publisher<? extends U> other;
public FlowableTakeUntil(Publisher<T> source, Publisher<? extends U> other) {
super(source);
this.other = other;
}
@Override
protected void subscribeActual(Subscriber<? super T> child) {
TakeUntilMainSubscriber<T> parent = new TakeUntilMainSubscriber<T>(child);
child.onSubscribe(parent);
other.subscribe(parent.other);
source.subscribe(parent);
}
static final class TakeUntilMainSubscriber<T> extends AtomicInteger implements Subscriber<T>, Subscription {
private static final long serialVersionUID = -4945480365982832967L;
final Subscriber<? super T> actual;
final AtomicLong requested;
final AtomicReference<Subscription> s;
final AtomicThrowable error;
final OtherSubscriber other;
TakeUntilMainSubscriber(Subscriber<? super T> actual) {
this.actual = actual;
this.requested = new AtomicLong();
this.s = new AtomicReference<Subscription>();
this.other = new OtherSubscriber();
this.error = new AtomicThrowable();
}
@Override
public void onSubscribe(Subscription s) {
SubscriptionHelper.deferredSetOnce(this.s, requested, s);
}
@Override
public void onNext(T t) {
HalfSerializer.onNext(actual, t, this, error);
}
@Override
public void onError(Throwable t) {
SubscriptionHelper.cancel(other);
HalfSerializer.onError(actual, t, this, error);
}
@Override
public void onComplete() {
SubscriptionHelper.cancel(other);
HalfSerializer.onComplete(actual, this, error);
}
@Override
public void request(long n) {
SubscriptionHelper.deferredRequest(s, requested, n);
}
@Override
public void cancel() {
SubscriptionHelper.cancel(s);
SubscriptionHelper.cancel(other);
}
final class OtherSubscriber extends AtomicReference<Subscription> implements Subscriber<Object> {
private static final long serialVersionUID = -3592821756711087922L;
@Override
public void onSubscribe(Subscription s) {
if (SubscriptionHelper.setOnce(this, s)) {
s.request(Long.MAX_VALUE);
}
}
@Override
public void onNext(Object t) {
SubscriptionHelper.cancel(this);
onComplete();
}
@Override
public void onError(Throwable t) {
SubscriptionHelper.cancel(s);
HalfSerializer.onError(actual, t, TakeUntilMainSubscriber.this, error);
}
@Override
public void onComplete() {
SubscriptionHelper.cancel(s);
HalfSerializer.onComplete(actual, TakeUntilMainSubscriber.this, error);
}
}
}
}
| apache-2.0 |
DEVSENSE/PTVS | Common/Tests/Utilities.UI/UI/SolutionExplorerTree.cs | 7642 | // Visual Studio Shared Project
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.IO;
using System.Linq;
using System.Windows.Automation;
using Microsoft.VisualStudio.TestTools.UnitTesting;
namespace TestUtilities.UI {
public class SolutionExplorerTree : TreeView {
public SolutionExplorerTree(AutomationElement element)
: base(element) {
}
public void AssertFileExists(string projectLocation, params string[] path) {
AssertItemExistsInTree(path);
var basePath = projectLocation;
for (int i = 1; i < path.Length; i++) {
basePath = Path.Combine(basePath, path[i]);
}
Assert.IsTrue(File.Exists(basePath), "File doesn't exist: " + basePath);
}
public void AssertFileExistsWithContent(string projectLocation, string content, params string[] path) {
AssertItemExistsInTree(path);
var basePath = projectLocation;
for (int i = 1; i < path.Length; i++) {
basePath = Path.Combine(basePath, path[i]);
}
Assert.IsTrue(File.Exists(basePath), "File doesn't exist: " + basePath);
Assert.AreEqual(File.ReadAllText(basePath), content);
}
public void AssertFileDoesntExist(string projectLocation, params string[] path) {
Assert.IsNull(FindItem(path), "Item exists in solution explorer: " + String.Join("\\", path));
var basePath = projectLocation;
for (int i = 1; i < path.Length; i++) {
basePath = Path.Combine(basePath, path[i]);
}
Assert.IsFalse(File.Exists(basePath), "File exists: " + basePath);
}
public void AssertFolderExists(string projectLocation, params string[] path) {
AssertItemExistsInTree(path);
var basePath = projectLocation;
for (int i = 1; i < path.Length; i++) {
basePath = Path.Combine(basePath, path[i]);
}
Assert.IsTrue(Directory.Exists(basePath), "File doesn't exist: " + basePath);
}
public void AssertFolderDoesntExist(string projectLocation, params string[] path) {
Assert.IsNull(WaitForItemRemoved(path), "Item exists in solution explorer: " + String.Join("\\", path));
var basePath = projectLocation;
for (int i = 1; i < path.Length; i++) {
basePath = Path.Combine(basePath, path[i]);
}
Assert.IsFalse(Directory.Exists(basePath), "File exists: " + basePath);
}
private void AssertItemExistsInTree(string[] path) {
var item = WaitForItem(path);
if (item == null) {
string msg = "Item not found in solution explorer " + String.Join("\\", path);
for (int i = 1; i < path.Length; i++) {
item = FindItem(path.Take(i).ToArray());
if (item == null) {
msg += Environment.NewLine + "Item missing at: " + String.Join("\\", path.Take(i));
break;
}
}
Assert.IsNotNull(item, msg);
}
}
public void SelectProject(EnvDTE.Project project) {
var slnName = string.Format("Solution '{0}' ({1} project{2})",
Path.GetFileNameWithoutExtension(project.DTE.Solution.FullName),
project.DTE.Solution.Projects.Count,
project.DTE.Solution.Projects.Count == 1 ? "" : "s"
);
var item = WaitForItem(slnName, project.Name).AsWrapper();
Assert.IsNotNull(item);
item.Select();
}
public TreeNode WaitForChildOfProject(EnvDTE.Project project, params string[] path) {
var item = WaitForItemHelper(p => FindChildOfProjectHelper(project, p, false), path);
// Check one more time, but now let the assertions be raised.
return new TreeNode(FindChildOfProjectHelper(project, path, true));
}
public AutomationElement WaitForChildOfProjectRemoved(EnvDTE.Project project, params string[] path) {
return WaitForItemRemovedHelper(p => FindChildOfProjectHelper(project, p, false), path);
}
public TreeNode FindChildOfProject(EnvDTE.Project project, params string[] path) {
return new TreeNode(FindChildOfProjectHelper(project, path, true));
}
public TreeNode TryFindChildOfProject(EnvDTE.Project project, params string[] path) {
return new TreeNode(FindChildOfProjectHelper(project, path, false));
}
private AutomationElement FindChildOfProjectHelper(EnvDTE.Project project, string[] path, bool assertOnFailure) {
var sln = project.DTE.Solution;
int count = sln.Projects.OfType<EnvDTE.Project>().Count(p => {
try {
return !string.IsNullOrEmpty(p.FullName);
} catch (Exception) {
return false;
}
});
var slnLabel = string.Format(
"Solution '{0}' ({1} project{2})",
Path.GetFileNameWithoutExtension(sln.FullName),
count,
count == 1 ? "" : "s"
);
var slnElements = Element.FindAll(TreeScope.Children, new PropertyCondition(
AutomationElement.NameProperty, slnLabel
));
int slnCount = slnElements.OfType<AutomationElement>().Count();
if (assertOnFailure) {
Assert.AreEqual(1, slnCount, string.Format("Did not find single node <{0}>", slnLabel));
} else if (slnCount != 1) {
return null;
}
var slnElement = slnElements.Cast<AutomationElement>().Single();
var projLabel = project.Name;
var projElements = slnElement.FindAll(TreeScope.Children, new PropertyCondition(
AutomationElement.NameProperty, projLabel
));
int projCount = projElements.OfType<AutomationElement>().Count();
if (assertOnFailure) {
Assert.AreEqual(1, projCount, string.Format("Did not find single node <{0}>", projLabel));
} else if (projCount != 1) {
return null;
}
var projElement = projElements.Cast<AutomationElement>().Single();
var itemElement = path.Any() ? FindNode(
projElement.FindAll(TreeScope.Children, Condition.TrueCondition),
path,
0
) : projElement;
if (assertOnFailure) {
AutomationWrapper.DumpElement(Element);
Assert.IsNotNull(itemElement, string.Format("Did not find element <{0}\\{1}\\{2}>", slnLabel, projLabel, string.Join("\\", path)));
}
return itemElement;
}
}
}
| apache-2.0 |
Flauschbaellchen/gaming-base | engines/game_florensia/test/models/florensia/item/ship_c_weapon_test.rb | 162 | require 'test_helper'
module Florensia
class Item::ShipCWeaponTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
end
| apache-2.0 |
benjchristensen/RxJava | src/test/java/io/reactivex/internal/schedulers/SingleSchedulerTest.java | 2318 | /**
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.schedulers;
import static org.junit.Assert.*;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import io.reactivex.*;
import io.reactivex.Scheduler.Worker;
import io.reactivex.disposables.Disposables;
import io.reactivex.internal.schedulers.SingleScheduler.ScheduledWorker;
public class SingleSchedulerTest {
@Test
public void shutdownRejects() {
final int[] calls = { 0 };
Runnable r = new Runnable() {
@Override
public void run() {
calls[0]++;
}
};
Scheduler s = new SingleScheduler();
s.shutdown();
assertEquals(Disposables.disposed(), s.scheduleDirect(r));
assertEquals(Disposables.disposed(), s.scheduleDirect(r, 1, TimeUnit.SECONDS));
assertEquals(Disposables.disposed(), s.schedulePeriodicallyDirect(r, 1, 1, TimeUnit.SECONDS));
Worker w = s.createWorker();
((ScheduledWorker)w).executor.shutdownNow();
assertEquals(Disposables.disposed(), w.schedule(r));
assertEquals(Disposables.disposed(), w.schedule(r, 1, TimeUnit.SECONDS));
assertEquals(Disposables.disposed(), w.schedulePeriodically(r, 1, 1, TimeUnit.SECONDS));
assertEquals(0, calls[0]);
w.dispose();
assertTrue(w.isDisposed());
}
@Test
public void startRace() {
final Scheduler s = new SingleScheduler();
for (int i = 0; i < 500; i++) {
s.shutdown();
Runnable r1 = new Runnable() {
@Override
public void run() {
s.start();
}
};
TestHelper.race(r1, r1);
}
}
}
| apache-2.0 |
LiberatorUSA/GUCEF | plugins/CORE/dstorepluginPARSIFALXML/premake5.lua | 1961 | --------------------------------------------------------------------
-- This file was automatically generated by ProjectGenerator
-- which is tooling part the build system designed for GUCEF
-- (Galaxy Unlimited Framework)
-- For the latest info, see http://www.VanvelzenSoftware.com/
--
-- The contents of this file are placed in the public domain. Feel
-- free to make use of it in any way you like.
--------------------------------------------------------------------
--
-- Configuration for module: dstorepluginPARSIFALXML
project( "dstorepluginPARSIFALXML" )
configuration( {} )
location( os.getenv( "PM5OUTPUTDIR" ) )
configuration( {} )
targetdir( os.getenv( "PM5TARGETDIR" ) )
configuration( {} )
language( "C" )
configuration( {} )
kind( "SharedLib" )
configuration( {} )
links( { "gucefCORE", "gucefMT", "libparsifal" } )
links( { "libparsifal" } )
configuration( {} )
defines( { "DSTOREPLUGINPARSIFALXML_BUILD_MODULE" } )
configuration( {} )
vpaths { ["Headers"] = { "**.h", "**.hpp", "**.hxx" } }
files( {
"include/DLLMainDSTOREpluginPARSIFALXML.h"
} )
configuration( {} )
vpaths { ["Source"] = { "**.c", "**.cpp", "**.cs", "**.asm" } }
files( {
"src/DLLMainDSTOREpluginPARSIFALXML.c"
} )
configuration( {} )
includedirs( { "../../../common/include", "../../../dependencies/libparsifal/include", "../../../dependencies/libparsifal/include/libparsifal", "../../../platform/gucefCORE/include", "../../../platform/gucefMT/include", "include" } )
configuration( { "ANDROID" } )
includedirs( { "../../../platform/gucefCORE/include/android" } )
configuration( { "LINUX32" } )
includedirs( { "../../../platform/gucefCORE/include/linux" } )
configuration( { "LINUX64" } )
includedirs( { "../../../platform/gucefCORE/include/linux" } )
configuration( { "WIN32" } )
includedirs( { "../../../platform/gucefCORE/include/mswin" } )
configuration( { "WIN64" } )
includedirs( { "../../../platform/gucefCORE/include/mswin" } )
| apache-2.0 |
covito/legend-shop | src/main/webapp/plugins/fckeditor/editor/dialog/fck_flash/fck_flash.js | 8276 | /*
* FCKeditor - The text editor for Internet - http://www.fckeditor.net
* Copyright (C) 2003-2009 Frederico Caldeira Knabben
*
* == BEGIN LICENSE ==
*
* Licensed under the terms of any of the following licenses at your
* choice:
*
* - GNU General Public License Version 2 or later (the "GPL")
* http://www.gnu.org/licenses/gpl.html
*
* - GNU Lesser General Public License Version 2.1 or later (the "LGPL")
* http://www.gnu.org/licenses/lgpl.html
*
* - Mozilla Public License Version 1.1 or later (the "MPL")
* http://www.mozilla.org/MPL/MPL-1.1.html
*
* == END LICENSE ==
*
* Scripts related to the Flash dialog window (see fck_flash.html).
*/
var dialog = window.parent ;
var oEditor = dialog.InnerDialogLoaded() ;
var FCK = oEditor.FCK ;
var FCKLang = oEditor.FCKLang ;
var FCKConfig = oEditor.FCKConfig ;
var FCKTools = oEditor.FCKTools ;
//#### Dialog Tabs
// Set the dialog tabs.
dialog.AddTab( 'Info', oEditor.FCKLang.DlgInfoTab ) ;
if ( FCKConfig.FlashUpload )
dialog.AddTab( 'Upload', FCKLang.DlgLnkUpload ) ;
if ( !FCKConfig.FlashDlgHideAdvanced )
dialog.AddTab( 'Advanced', oEditor.FCKLang.DlgAdvancedTag ) ;
// Function called when a dialog tag is selected.
function OnDialogTabChange( tabCode )
{
ShowE('divInfo' , ( tabCode == 'Info' ) ) ;
ShowE('divUpload' , ( tabCode == 'Upload' ) ) ;
ShowE('divAdvanced' , ( tabCode == 'Advanced' ) ) ;
}
// Get the selected flash embed (if available).
var oFakeImage = dialog.Selection.GetSelectedElement() ;
var oEmbed ;
if ( oFakeImage )
{
if ( oFakeImage.tagName == 'IMG' && oFakeImage.getAttribute('_fckflash') )
oEmbed = FCK.GetRealElement( oFakeImage ) ;
else
oFakeImage = null ;
}
window.onload = function()
{
// Translate the dialog box texts.
oEditor.FCKLanguageManager.TranslatePage(document) ;
// Load the selected element information (if any).
LoadSelection() ;
// Show/Hide the "Browse Server" button.
GetE('tdBrowse').style.display = FCKConfig.FlashBrowser ? '' : 'none' ;
// Set the actual uploader URL.
if ( FCKConfig.FlashUpload )
GetE('frmUpload').action = FCKConfig.FlashUploadURL ;
dialog.SetAutoSize( true ) ;
// Activate the "OK" button.
dialog.SetOkButton( true ) ;
SelectField( 'txtUrl' ) ;
}
function LoadSelection()
{
if ( ! oEmbed ) return ;
GetE('txtUrl').value = GetAttribute( oEmbed, 'src', '' ) ;
GetE('txtWidth').value = GetAttribute( oEmbed, 'width', '' ) ;
GetE('txtHeight').value = GetAttribute( oEmbed, 'height', '' ) ;
// Get Advances Attributes
GetE('txtAttId').value = oEmbed.id ;
GetE('chkAutoPlay').checked = GetAttribute( oEmbed, 'play', 'true' ) == 'true' ;
GetE('chkLoop').checked = GetAttribute( oEmbed, 'loop', 'true' ) == 'true' ;
GetE('chkMenu').checked = GetAttribute( oEmbed, 'menu', 'true' ) == 'true' ;
GetE('cmbScale').value = GetAttribute( oEmbed, 'scale', '' ).toLowerCase() ;
GetE('txtAttTitle').value = oEmbed.title ;
if ( oEditor.FCKBrowserInfo.IsIE )
{
GetE('txtAttClasses').value = oEmbed.getAttribute('className') || '' ;
GetE('txtAttStyle').value = oEmbed.style.cssText ;
}
else
{
GetE('txtAttClasses').value = oEmbed.getAttribute('class',2) || '' ;
GetE('txtAttStyle').value = oEmbed.getAttribute('style',2) || '' ;
}
UpdatePreview() ;
}
//#### The OK button was hit.
function Ok()
{
if ( GetE('txtUrl').value.length == 0 )
{
dialog.SetSelectedTab( 'Info' ) ;
GetE('txtUrl').focus() ;
alert( oEditor.FCKLang.DlgAlertUrl ) ;
return false ;
}
oEditor.FCKUndo.SaveUndoStep() ;
if ( !oEmbed )
{
oEmbed = FCK.EditorDocument.createElement( 'EMBED' ) ;
oFakeImage = null ;
}
UpdateEmbed( oEmbed ) ;
if ( !oFakeImage )
{
oFakeImage = oEditor.FCKDocumentProcessor_CreateFakeImage( 'FCK__Flash', oEmbed ) ;
oFakeImage.setAttribute( '_fckflash', 'true', 0 ) ;
oFakeImage = FCK.InsertElement( oFakeImage ) ;
}
oEditor.FCKEmbedAndObjectProcessor.RefreshView( oFakeImage, oEmbed ) ;
return true ;
}
function UpdateEmbed( e )
{
SetAttribute( e, 'type' , 'application/x-shockwave-flash' ) ;
SetAttribute( e, 'pluginspage' , 'http://www.macromedia.com/go/getflashplayer' ) ;
SetAttribute( e, 'src', GetE('txtUrl').value ) ;
SetAttribute( e, "width" , GetE('txtWidth').value ) ;
SetAttribute( e, "height", GetE('txtHeight').value ) ;
// Advances Attributes
SetAttribute( e, 'id' , GetE('txtAttId').value ) ;
SetAttribute( e, 'scale', GetE('cmbScale').value ) ;
SetAttribute( e, 'play', GetE('chkAutoPlay').checked ? 'true' : 'false' ) ;
SetAttribute( e, 'loop', GetE('chkLoop').checked ? 'true' : 'false' ) ;
SetAttribute( e, 'menu', GetE('chkMenu').checked ? 'true' : 'false' ) ;
SetAttribute( e, 'title' , GetE('txtAttTitle').value ) ;
if ( oEditor.FCKBrowserInfo.IsIE )
{
SetAttribute( e, 'className', GetE('txtAttClasses').value ) ;
e.style.cssText = GetE('txtAttStyle').value ;
}
else
{
SetAttribute( e, 'class', GetE('txtAttClasses').value ) ;
SetAttribute( e, 'style', GetE('txtAttStyle').value ) ;
}
}
var ePreview ;
function SetPreviewElement( previewEl )
{
ePreview = previewEl ;
if ( GetE('txtUrl').value.length > 0 )
UpdatePreview() ;
}
function UpdatePreview()
{
if ( !ePreview )
return ;
while ( ePreview.firstChild )
ePreview.removeChild( ePreview.firstChild ) ;
if ( GetE('txtUrl').value.length == 0 )
ePreview.innerHTML = ' ' ;
else
{
var oDoc = ePreview.ownerDocument || ePreview.document ;
var e = oDoc.createElement( 'EMBED' ) ;
SetAttribute( e, 'src', GetE('txtUrl').value ) ;
SetAttribute( e, 'type', 'application/x-shockwave-flash' ) ;
SetAttribute( e, 'width', '100%' ) ;
SetAttribute( e, 'height', '100%' ) ;
ePreview.appendChild( e ) ;
}
}
// <embed id="ePreview" src="fck_flash/claims.swf" width="100%" height="100%" style="visibility:hidden" type="application/x-shockwave-flash" pluginspage="http://www.macromedia.com/go/getflashplayer">
function BrowseServer()
{
OpenFileBrowser( FCKConfig.FlashBrowserURL, FCKConfig.FlashBrowserWindowWidth, FCKConfig.FlashBrowserWindowHeight ) ;
}
function SetUrl( url, width, height )
{
GetE('txtUrl').value = url ;
if ( width )
GetE('txtWidth').value = width ;
if ( height )
GetE('txtHeight').value = height ;
UpdatePreview() ;
dialog.SetSelectedTab( 'Info' ) ;
}
function OnUploadCompleted( errorNumber, fileUrl, fileName, customMsg )
{
// Remove animation
window.parent.Throbber.Hide() ;
GetE( 'divUpload' ).style.display = '' ;
switch ( errorNumber )
{
case 0 : // No errors
alert( 'Your file has been successfully uploaded' ) ;
break ;
case 1 : // Custom error
alert( customMsg ) ;
return ;
case 101 : // Custom warning
alert( customMsg ) ;
break ;
case 201 :
alert( 'A file with the same name is already available. The uploaded file has been renamed to "' + fileName + '"' ) ;
break ;
case 202 :
alert( 'Invalid file type' ) ;
return ;
case 203 :
alert( "Security error. You probably don't have enough permissions to upload. Please check your server." ) ;
return ;
case 500 :
alert( 'The connector is disabled' ) ;
break ;
default :
alert( 'Error on file upload. Error number: ' + errorNumber ) ;
return ;
}
SetUrl( fileUrl ) ;
GetE('frmUpload').reset() ;
}
var oUploadAllowedExtRegex = new RegExp( FCKConfig.FlashUploadAllowedExtensions, 'i' ) ;
var oUploadDeniedExtRegex = new RegExp( FCKConfig.FlashUploadDeniedExtensions, 'i' ) ;
function CheckUpload()
{
var sFile = GetE('txtUploadFile').value ;
if ( sFile.length == 0 )
{
alert( 'Please select a file to upload' ) ;
return false ;
}
if ( ( FCKConfig.FlashUploadAllowedExtensions.length > 0 && !oUploadAllowedExtRegex.test( sFile ) ) ||
( FCKConfig.FlashUploadDeniedExtensions.length > 0 && oUploadDeniedExtRegex.test( sFile ) ) )
{
OnUploadCompleted( 202 ) ;
return false ;
}
// Show animation
window.parent.Throbber.Show( 100 ) ;
GetE( 'divUpload' ).style.display = 'none' ;
return true ;
}
| apache-2.0 |
atiq-shumon/DotNetProjects | Hospital_ERP_VS13-WCF_WF/AH.ModuleController/UI/DRS/Reports/Viewer/ViewerSelector.cs | 633 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AH.ModuleController.UI.DRS.Reports.Viewer
{
public enum ViewerSelector
{
InvestigationReport, SpecimenReport, SpecimenDetailsReport, StainReport, CultureAndSensiReport,
InvestBulkReportPrint, StainBulkReportPrint, CultureBulkReportPrint,MicroBioResultA, MicroBioResultB, MicroBioResultC,
WorkSheet, CultureAndSensitivityWorkSheet, InvestigationReportPreview, multicolumn, printPatientLabel, MicroBioReportPreview, StainReportPreview, SpecimenDetailsReportPreView
}
}
| apache-2.0 |
awsdocs/aws-doc-sdk-examples | python/example_code/pinpoint/pinpoint_send_sms_message_api.py | 2761 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
"""
Purpose
Shows how to use the AWS SDK for Python (Boto3) with Amazon Pinpoint to
send SMS messages.
"""
# snippet-start:[pinpoint.python.pinpoint_send_sms_message_api.complete]
import logging
import boto3
from botocore.exceptions import ClientError
logger = logging.getLogger(__name__)
def send_sms_message(
pinpoint_client, app_id, origination_number, destination_number, message,
message_type):
"""
Sends an SMS message with Amazon Pinpoint.
:param pinpoint_client: A Boto3 Pinpoint client.
:param app_id: The Amazon Pinpoint project/application ID to use when you send
this message. The SMS channel must be enabled for the project or
application.
:param destination_number: The recipient's phone number in E.164 format.
:param origination_number: The phone number to send the message from. This phone
number must be associated with your Amazon Pinpoint
account and be in E.164 format.
:param message: The content of the SMS message.
:param message_type: The type of SMS message that you want to send. If you send
time-sensitive content, specify TRANSACTIONAL. If you send
marketing-related content, specify PROMOTIONAL.
:return: The ID of the message.
"""
try:
response = pinpoint_client.send_messages(
ApplicationId=app_id,
MessageRequest={
'Addresses': {destination_number: {'ChannelType': 'SMS'}},
'MessageConfiguration': {
'SMSMessage': {
'Body': message,
'MessageType': message_type,
'OriginationNumber': origination_number}}})
except ClientError:
logger.exception("Couldn't send message.")
raise
else:
return response['MessageResponse']['Result'][destination_number]['MessageId']
def main():
app_id = "ce796be37f32f178af652b26eexample"
origination_number = "+12065550199"
destination_number = "+14255550142"
message = (
"This is a sample message sent from Amazon Pinpoint by using the AWS SDK for "
"Python (Boto 3).")
message_type = "TRANSACTIONAL"
print("Sending SMS message.")
message_id = send_sms_message(
boto3.client('pinpoint'), app_id, origination_number, destination_number,
message, message_type)
print(f"Message sent! Message ID: {message_id}.")
if __name__ == '__main__':
main()
# snippet-end:[pinpoint.python.pinpoint_send_sms_message_api.complete]
| apache-2.0 |
googleapis/sphinx-docfx-yaml | tests/example/format/google/__init__.py | 649 | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# coding: utf-8
# This package is used to test google style docstring.
| apache-2.0 |
mnunberg/couchbase-python-client | couchbase/iops/base.py | 4972 | #
# Copyright 2013, Couchbase, Inc.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This file is here for example purposes only. It demonstrates the basic
IOPS API.
This is not yet considered stable interface, although this is currently
the only means by which an external event loop can be integrated with
Couchbase through Python
"""
from couchbase._libcouchbase import (
PYCBC_EVACTION_WATCH,
PYCBC_EVACTION_UNWATCH,
PYCBC_EVACTION_CLEANUP,
LCB_READ_EVENT,
LCB_WRITE_EVENT,
LCB_RW_EVENT,
IOEvent,
TimerEvent,
Event
)
class IOPS(object):
def __init__(self):
"""
The IOPS class is intended as an efficient and multiplexing
manager of one or more :class:`Event` objects.
As this represents an interface with methods only,
there is no required behavior in the constructor of this object
"""
def update_event(self, event, action, flags):
"""
This method shall perform an action modifying an event.
:param event: An :class:`IOEvent` object which shall have its
watcher settings modified. The ``IOEvent`` object is an object
which provides a ``fileno()`` method.
:param int action: one of:
* ``PYCBC_EVACTION_WATCH``: Watch this file for events
* ``PYCBC_EVACTION_UNWATCH``: Remove this file from all watches
* ``PYCBC_EVACTION_CLEANUP``: Destroy any references to this object
:param int flags: Event details, this indicates which events this
file should be watched for. This is only applicable if ``action``
was ``PYCBC_EVACTION_WATCH``. It can a bitmask of the following:
* ``LCB_READ_EVENT``: Watch this file until it becomes readable
* ``LCB_WRITE_EVENT``: Watch this file until it becomes writeable
If the action is to watch the event for readability or writeability,
the ``IOPS`` implementation shall schedule the underlying event system
to call one of the ``ready_r``, ``ready_w`` or ``ready_rw`` methods
(for readbility, writeability or both readability and writability
respectively) at such a time when the underlying reactor/event loop
implementation has signalled it being so.
Event watchers are non-repeatable. This means that once the event
has been delivered, the ``IOEvent`` object shall be removed from a
watching state. The extension shall call this method again for each
time an event is requested.
This method must be implemented
"""
def update_timer(self, timer, action, usecs):
"""
This method shall schedule or unschedule a timer.
:param timer: A :class:`TimerEvent` object.
:param action: See :meth:`update_event` for meaning
:param usecs: A relative offset in microseconds when this timer
shall be fired.
This method follows the same semantics as :meth:`update_event`,
except that there is no file.
When the underlying event system shall invoke the timer, the
``TimerEvent`` ``ready`` method shall be called with ``0`` as its
argument.
Like ``IOEvents``, ``TimerEvents`` are non-repeatable.
This method must be implemented
"""
def io_event_factory(self):
"""
Returns a new instance of :class:`IOEvent`.
This method is optional, and is useful in case an implementation
wishes to utilize its own subclass of ``IOEvent``.
As with most Python subclasses, the user should ensure that the
base implementation's ``__init__`` is called.
"""
def timer_event_factory(self):
"""
Returns a new instance of :class:`TimerEvent`. Like the
:meth:`io_event_factory`, this is optional
"""
def start_watching(self):
"""
Called by the extension when all scheduled IO events have been
submitted. Depending on the I/O model, this method can either
drive the event loop until :meth:`stop_watching` is called, or
do nothing.
This method must be implemented
"""
def stop_watching(self):
"""
Called by the extension when it no longer needs to wait for events.
Its function is to undo anything which was done in the
:meth:`start_watching` method
This method must be implemented
"""
| apache-2.0 |
1170197998/Objective_C-DEMO | JSPatchDemo/JSPatchDemo/test.js | 1548 | defineClass('ViewController', {
rightButtonItemClick() {
var tableViewCtrl = TableViewController.alloc().init()
self.navigationController().pushViewController_animated(tableViewCtrl, YES)
tableViewCtrl.setTitle("JS创建的控制器");
}
})
defineClass('TableViewController : UITableViewController <UIAlertViewDelegate>', ['data'], {
dataSource: function() {
var data = self.data();
if (data) return data;
var data = [];
for (var i = 0; i < 30; i ++) {
data.push("第" + (i + 1) + "行");
}
self.setData(data)
return data;
},
numberOfSectionsInTableView: function(tableView) {
return 1;
},
tableView_numberOfRowsInSection: function(tableView, section) {
return self.dataSource().length;
},
tableView_cellForRowAtIndexPath: function(tableView, indexPath) {
var cell = tableView.dequeueReusableCellWithIdentifier("cell")
if (!cell) {
cell = require('UITableViewCell').alloc().initWithStyle_reuseIdentifier(0, "cell")
}
cell.textLabel().setText(self.dataSource()[indexPath.row()])
return cell
},
tableView_heightForRowAtIndexPath: function(tableView, indexPath) {
return 60
},
tableView_didSelectRowAtIndexPath: function(tableView, indexPath) {
var alertView = require('UIAlertView').alloc().initWithTitle_message_delegate_cancelButtonTitle_otherButtonTitles("点击了",self.dataSource()[indexPath.row()], self, "OK", null);
alertView.show()
},
alertView_willDismissWithButtonIndex: function(alertView, idx) {
console.log('click btn ' + alertView.buttonTitleAtIndex(idx).toJS())
}
})
| apache-2.0 |
googleads/googleads-php-lib | src/Google/AdsApi/AdManager/Util/v202108/StatementBuilder.php | 8453 | <?php
/**
* Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Google\AdsApi\AdManager\Util\v202108;
use Google\AdsApi\Common\Util\MapEntries;
use Google\AdsApi\AdManager\v202108\Statement;
use InvalidArgumentException;
/**
* Builds statements that can be used in PQL queries.
*
* Typical usage:
*
* ```
* $statementBuilder = (new StatementBuilder())
* ->where('lastModifiedDateTime > :yesterday AND status = :status')
* ->orderBy('id ASC')
* ->limit(200)
* ->withBindVariableValue(
* 'yesterday',
* AdManagerDateTimes::fromDateTime(new DateTime(
* '-1 day',
* new DateTimeZone('America/New_York')
* )))
* ->withBindVariableValue('status', SomeStatus::STATUS);
*
* $statement = $statementBuilder->toStatement();
* //...
* $statementBuilder->increaseOffsetBy(200);
* $statement = $statementBuilder->toStatement();
* ```
*/
class StatementBuilder
{
const SUGGESTED_PAGE_LIMIT = 500;
const SELECT = 'SELECT';
const FROM = 'FROM';
const WHERE = 'WHERE';
const LIMIT = 'LIMIT';
const OFFSET = 'OFFSET';
const ORDER_BY = 'ORDER BY';
private $select;
private $from;
private $where;
private $limit;
private $offset;
private $orderBy;
private $valueMap;
/**
* Creates an empty statement builder ready to have statement parts added to
* it.
*/
public function __construct()
{
$this->valueMap = [];
}
/**
* Adds a bind variable value to the statement.
*
* @param string $key the value key
* @param mixed $value the value either as a primitive, which will be
* converted to a PQL `Value` object, or a PQL `Value` object
* @return StatementBuilder this builder
*/
public function withBindVariableValue($key, $value)
{
$this->valueMap[$key] = Pql::createValue($value);
return $this;
}
/**
* Gets the statement representing the state of this statement builder.
*
* @return Statement
*/
public function toStatement()
{
$statement = new Statement();
$statement->setQuery($this->buildQuery());
$statement->setValues(
MapEntries::fromAssociativeArray(
$this->getBindVariableMap(),
'Google\AdsApi\AdManager\v202108\String_ValueMapEntry'
)
);
return $statement;
}
/**
* Removes the specified keyword from the clause, if present. Will remove
* `keyword + ' '`.
*
* @param string $clause
* @param string $keyword
* @return string a new string with the `keyword + ' '` removed
*/
private static function removeKeyword($clause, $keyword)
{
$keyword .= ' ';
if (stristr(substr($clause, 0, strlen($keyword)), $keyword) !== false) {
return substr($clause, strlen($keyword));
}
return $clause;
}
/**
* Sets the statement `SELECT` clause in the form of 'a, b, ...'.
*
* Only necessary for statements being sent to the
* `PublisherQueryLanguageService`. The 'SELECT ' keyword will be ignored.
*
* @param string $columns the statement select clause without `SELECT`
* @return StatementBuilder this builder
*/
public function select($columns)
{
$columns = self::removeKeyword($columns, self::SELECT);
$this->select = $columns;
return $this;
}
/**
* Sets the statement `FROM` clause in the form of 'table'.
*
* Only necessary for statements being sent to the
* `PublisherQueryLanguageService`. The 'FROM ' keyword will be ignored.
*
* @param string $table the statement from clause without `FROM`
* @return StatementBuilder this builder
*/
public function from($table)
{
$table = self::removeKeyword($table, self::FROM);
$this->from = $table;
return $this;
}
/**
* Sets the statement `WHERE` clause in the form of `'WHERE <condition> {[AND
* | OR] <condition> ...}'`. E.g., 'a = b OR b = c'. The 'WHERE ' keyword will
* be ignored.
*
* @param string $conditions the statement query without `WHERE`
* @return StatementBuilder this builder
*/
public function where($conditions)
{
$conditions = self::removeKeyword($conditions, self::WHERE);
$this->where = $conditions;
return $this;
}
/**
* Sets the statement `LIMIT` clause in the form of `'LIMIT <count>'`. E.g.,
* 1000.
*
* @param int $count the statement limit
* @return StatementBuilder this builder
*/
public function limit($count)
{
$this->limit = $count;
return $this;
}
/**
* Sets the statement `OFFSET` clause in the form of `'OFFSET <count>'`. E.g.,
* 200.
*
* @param int $count the statement offset
* @return StatementBuilder this builder
*/
public function offset($count)
{
$this->offset = $count;
return $this;
}
/**
* Increases the offset by the specified amount.
*
* @param int $amount
* @return StatementBuilder this builder
*/
public function increaseOffsetBy($amount)
{
if ($this->offset === null) {
$this->offset = 0;
}
$this->offset += $amount;
return $this;
}
/**
* Gets the current offset.
*
* @return int
*/
public function getOffset()
{
return $this->offset;
}
/**
* Removes the limit and offset from the query.
*
* @return StatementBuilder this builder
*/
public function removeLimitAndOffset()
{
$this->offset = null;
$this->limit = null;
return $this;
}
/**
* Sets the statement `ORDER BY` clause in the form of `'ORDER BY <property>
* [ASC | DESC]'`. E.g., 'type ASC, lastModifiedDateTime DESC'. The 'ORDER BY'
* keyword will be ignored.
*
* @param string $orderBy the statement order by without `ORDER BY`
* @return StatementBuilder this builder
*/
public function orderBy($orderBy)
{
$orderBy = self::removeKeyword($orderBy, self::ORDER_BY);
$this->orderBy = $orderBy;
return $this;
}
/**
* Returns the key to value bind variable map.
*
* @return array
*/
public function getBindVariableMap()
{
return $this->valueMap;
}
/**
* Checks that the query is valid.
*
* @throws InvalidArgumentException if the query is invalid
*/
private function validateQuery()
{
if ($this->offset !== null && $this->limit === null) {
throw new InvalidArgumentException(
'OFFSET cannot be set if LIMIT is not set.'
);
}
}
/**
* Builds the query from the clauses.
*
* @return string the query
*/
private function buildQuery()
{
$this->validateQuery();
$statement = "";
if ($this->select !== null) {
$statement .= sprintf("%s %s ", self::SELECT, $this->select);
}
if ($this->from !== null) {
$statement .= sprintf("%s %s ", self::FROM, $this->from);
}
if ($this->where !== null) {
$statement .= sprintf("%s %s ", self::WHERE, $this->where);
}
if ($this->orderBy !== null) {
$statement .= sprintf("%s %s ", self::ORDER_BY, $this->orderBy);
}
if ($this->limit !== null) {
$statement .= sprintf("%s %s ", self::LIMIT, $this->limit);
}
if ($this->offset !== null) {
$statement .= sprintf("%s %s ", self::OFFSET, $this->offset);
}
return trim($statement);
}
}
| apache-2.0 |
Yannic/closure-compiler | externs/browser/whatwg_encoding.js | 1496 | /*
* Copyright 2015 The Closure Compiler Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Definitions for WHATWG's Encoding specification
* https://encoding.spec.whatwg.org
* @externs
*/
/**
* @constructor
* @param {string=} encoding
* @param {Object=} options
*/
function TextDecoder(encoding, options) {}
/** @type {string} **/ TextDecoder.prototype.encoding;
/** @type {boolean} **/ TextDecoder.prototype.fatal;
/** @type {boolean} **/ TextDecoder.prototype.ignoreBOM;
/**
* @param {!BufferSource=} input
* @param {?Object=} options
* @return {string}
* @see https://encoding.spec.whatwg.org/#textdecoder
*/
TextDecoder.prototype.decode = function decode(input, options) {};
/**
* @constructor
* @param {string=} utfLabel
*/
function TextEncoder(utfLabel) {}
/** @type {string} **/ TextEncoder.prototype.encoding;
/**
* @param {string=} input
* @return {!Uint8Array}
*/
TextEncoder.prototype.encode = function(input) {};
| apache-2.0 |
consulo/consulo-android | android/android/src/com/android/tools/idea/editors/theme/attributes/TableLabel.java | 1166 | /*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.tools.idea.editors.theme.attributes;
import org.jetbrains.annotations.NotNull;
public class TableLabel {
public final @NotNull String myLabelName;
public final int myRowPosition;
public TableLabel(@NotNull String labelName, int rowPosition) {
myLabelName = labelName;
myRowPosition = rowPosition;
}
@NotNull
public String getLabelName() {
return myLabelName;
}
public int getRowPosition() {
return myRowPosition;
}
@Override
public String toString() {
return myLabelName;
}
}
| apache-2.0 |
bmckinney/dataverse-canonical | src/main/java/edu/harvard/iq/dataverse/DatasetVersion.java | 37123 | package edu.harvard.iq.dataverse;
import edu.harvard.iq.dataverse.util.MarkupChecker;
import edu.harvard.iq.dataverse.DatasetFieldType.FieldType;
import edu.harvard.iq.dataverse.util.StringUtil;
import java.io.Serializable;
import java.sql.Timestamp;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.OrderBy;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
import javax.persistence.UniqueConstraint;
import javax.persistence.Version;
import javax.validation.ConstraintViolation;
import javax.validation.Validation;
import javax.validation.Validator;
import javax.validation.ValidatorFactory;
import org.apache.commons.lang.StringEscapeUtils;
/**
*
* @author skraffmiller
*/
@Entity
@Table(indexes = {@Index(columnList="dataset_id")},
uniqueConstraints = @UniqueConstraint(columnNames = {"dataset_id,versionnumber,minorversionnumber"}))
public class DatasetVersion implements Serializable {
/**
* Convenience comparator to compare dataset versions by their version number.
* The draft version is considered the latest.
*/
public static final Comparator<DatasetVersion> compareByVersion = new Comparator<DatasetVersion>() {
@Override
public int compare(DatasetVersion o1, DatasetVersion o2) {
if ( o1.isDraft() ) {
return o2.isDraft() ? 0 : 1;
} else {
return (int)Math.signum( (o1.getVersionNumber().equals(o2.getVersionNumber())) ?
o1.getMinorVersionNumber() - o2.getMinorVersionNumber()
: o1.getVersionNumber() - o2.getVersionNumber() );
}
}
};
// TODO: Determine the UI implications of various version states
//IMPORTANT: If you add a new value to this enum, you will also have to modify the
// StudyVersionsFragment.xhtml in order to display the correct value from a Resource Bundle
public enum VersionState {
DRAFT, RELEASED, ARCHIVED, DEACCESSIONED
};
public enum License {
NONE, CC0
}
public DatasetVersion() {
}
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
public Long getId() {
return this.id;
}
public void setId(Long id) {
this.id = id;
}
public String getUNF() {
return UNF;
}
public void setUNF(String UNF) {
this.UNF = UNF;
}
/**
* This is JPA's optimistic locking mechanism, and has no semantic meaning in the DV object model.
* @return the object db version
*/
public Long getVersion() {
return this.version;
}
public void setVersion(Long version) {
}
private String UNF;
@Version
private Long version;
private Long versionNumber;
private Long minorVersionNumber;
public static final int VERSION_NOTE_MAX_LENGTH = 1000;
@Column(length = VERSION_NOTE_MAX_LENGTH)
private String versionNote;
@Enumerated(EnumType.STRING)
private VersionState versionState;
@ManyToOne
private Dataset dataset;
@OneToMany(mappedBy = "datasetVersion", cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
@OrderBy("label") // this is not our preferred ordering, which is with the AlphaNumericComparator, but does allow the files to be grouped by category
private List<FileMetadata> fileMetadatas = new ArrayList();
public List<FileMetadata> getFileMetadatas() {
return fileMetadatas;
}
public List<FileMetadata> getFileMetadatasSorted() {
Collections.sort(fileMetadatas, FileMetadata.compareByLabel);
return fileMetadatas;
}
public void setFileMetadatas(List<FileMetadata> fileMetadatas) {
this.fileMetadatas = fileMetadatas;
}
@OneToOne(cascade = {CascadeType.MERGE, CascadeType.PERSIST, CascadeType.REMOVE}, orphanRemoval=true)
@JoinColumn(name = "termsOfUseAndAccess_id")
private TermsOfUseAndAccess termsOfUseAndAccess;
public TermsOfUseAndAccess getTermsOfUseAndAccess() {
return termsOfUseAndAccess;
}
public void setTermsOfUseAndAccess(TermsOfUseAndAccess termsOfUseAndAccess) {
this.termsOfUseAndAccess = termsOfUseAndAccess;
}
@OneToMany(mappedBy = "datasetVersion", orphanRemoval = true, cascade = {CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
//@OrderBy("datasetField.displayOrder")
private List<DatasetField> datasetFields = new ArrayList();
public List<DatasetField> getDatasetFields() {
return datasetFields;
}
/**
* Sets the dataset fields for this version. Also updates the fields to
* have @{code this} as their dataset version.
* @param datasetFields
*/
public void setDatasetFields(List<DatasetField> datasetFields) {
for ( DatasetField dsf : datasetFields ) {
dsf.setDatasetVersion(this);
}
this.datasetFields = datasetFields;
}
/*
@OneToMany(mappedBy="studyVersion", cascade={CascadeType.REMOVE, CascadeType.PERSIST})
private List<VersionContributor> versionContributors;
*/
@Temporal(value = TemporalType.TIMESTAMP)
@Column( nullable=false )
private Date createTime;
@Temporal(value = TemporalType.TIMESTAMP)
@Column( nullable=false )
private Date lastUpdateTime;
@Temporal(value = TemporalType.TIMESTAMP)
private Date releaseTime;
@Temporal(value = TemporalType.TIMESTAMP)
private Date archiveTime;
public static final int ARCHIVE_NOTE_MAX_LENGTH = 1000;
@Column(length = ARCHIVE_NOTE_MAX_LENGTH)
private String archiveNote;
private String deaccessionLink;
private boolean inReview;
public void setInReview(boolean inReview){
this.inReview = inReview;
}
/**
* The only time a dataset can be in review is when it is in draft.
*/
public boolean isInReview() {
if (versionState != null && versionState.equals(VersionState.DRAFT)) {
return inReview;
} else {
return false;
}
}
/**
* Quick hack to disable <script> tags
* for Terms of Use and Terms of Access.
*
* Need to add jsoup or something similar.
*
* @param str
* @return
*/
private String stripScriptTags(String str){
if (str == null){
return null;
}
str = str.replaceAll("(?i)<script\\b[^<]*(?:(?!<\\/script>)<[^<]*)*<\\/script>", "");
str = str.replaceAll("(?i)<\\/script>", "");
str = str.replaceAll("(?i)<script\\b", "");
return str;
}
public Date getArchiveTime() {
return archiveTime;
}
public void setArchiveTime(Date archiveTime) {
this.archiveTime = archiveTime;
}
public String getArchiveNote() {
return archiveNote;
}
public void setArchiveNote(String note) {
// @todo should this be using bean validation for trsting note length?
if (note != null && note.length() > ARCHIVE_NOTE_MAX_LENGTH) {
throw new IllegalArgumentException("Error setting archiveNote: String length is greater than maximum (" + ARCHIVE_NOTE_MAX_LENGTH + ")."
+ " StudyVersion id=" + id + ", archiveNote=" + note);
}
this.archiveNote = note;
}
public String getDeaccessionLink() {
return deaccessionLink;
}
public void setDeaccessionLink(String deaccessionLink) {
this.deaccessionLink = deaccessionLink;
}
public GlobalId getDeaccessionLinkAsGlobalId() {
return new GlobalId(deaccessionLink);
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getLastUpdateTime() {
return lastUpdateTime;
}
public void setLastUpdateTime(Date lastUpdateTime) {
if (createTime == null) {
createTime = lastUpdateTime;
}
this.lastUpdateTime = lastUpdateTime;
}
public String getVersionDate() {
if (this.lastUpdateTime == null){
return null;
}
return new SimpleDateFormat("MMMM d, yyyy").format(lastUpdateTime);
}
public String getVersionYear() {
return new SimpleDateFormat("yyyy").format(lastUpdateTime);
}
public Date getReleaseTime() {
return releaseTime;
}
public void setReleaseTime(Date releaseTime) {
this.releaseTime = releaseTime;
}
@OneToMany(mappedBy="datasetVersion", cascade={CascadeType.REMOVE, CascadeType.MERGE, CascadeType.PERSIST})
private List<DatasetVersionUser> datasetVersionUsers;
public List<DatasetVersionUser> getDatasetVersionUsers() {
return datasetVersionUsers;
}
public void setUserDatasets(List<DatasetVersionUser> datasetVersionUsers) {
this.datasetVersionUsers = datasetVersionUsers;
}
public List<String> getVersionContributorIdentifiers() {
if (this.getDatasetVersionUsers() == null) {
return Collections.emptyList();
}
List<String> ret = new LinkedList<>();
for (DatasetVersionUser contributor : this.getDatasetVersionUsers()) {
ret.add(contributor.getAuthenticatedUser().getIdentifier());
}
return ret;
}
@Transient
private String contributorNames;
public String getContributorNames() {
return contributorNames;
}
public void setContributorNames(String contributorNames) {
this.contributorNames = contributorNames;
}
public String getVersionNote() {
return versionNote;
}
public DatasetVersionDifference getDefaultVersionDifference() {
// if version is deaccessioned ignore it for differences purposes
int index = 0;
int size = this.getDataset().getVersions().size();
if (this.isDeaccessioned()) {
return null;
}
for (DatasetVersion dsv : this.getDataset().getVersions()) {
if (this.equals(dsv)) {
if ((index + 1) <= (size - 1)) {
for (DatasetVersion dvTest : this.getDataset().getVersions().subList(index + 1, size)) {
if (!dvTest.isDeaccessioned()) {
DatasetVersionDifference dvd = new DatasetVersionDifference(this, dvTest);
return dvd;
}
}
}
}
index++;
}
return null;
}
public VersionState getPriorVersionState() {
int index = 0;
int size = this.getDataset().getVersions().size();
if (this.isDeaccessioned()) {
return null;
}
for (DatasetVersion dsv : this.getDataset().getVersions()) {
if (this.equals(dsv)) {
if ((index + 1) <= (size - 1)) {
for (DatasetVersion dvTest : this.getDataset().getVersions().subList(index + 1, size)) {
return dvTest.getVersionState();
}
}
}
index++;
}
return null;
}
public void setVersionNote(String note) {
if (note != null && note.length() > VERSION_NOTE_MAX_LENGTH) {
throw new IllegalArgumentException("Error setting versionNote: String length is greater than maximum (" + VERSION_NOTE_MAX_LENGTH + ")."
+ " StudyVersion id=" + id + ", versionNote=" + note);
}
this.versionNote = note;
}
public Long getVersionNumber() {
return versionNumber;
}
public void setVersionNumber(Long versionNumber) {
this.versionNumber = versionNumber;
}
public Long getMinorVersionNumber() {
return minorVersionNumber;
}
public void setMinorVersionNumber(Long minorVersionNumber) {
this.minorVersionNumber = minorVersionNumber;
}
public String getFriendlyVersionNumber(){
if (this.isDraft()) {
return "DRAFT";
} else {
return versionNumber.toString() + "." + minorVersionNumber.toString();
}
}
public VersionState getVersionState() {
return versionState;
}
public void setVersionState(VersionState versionState) {
this.versionState = versionState;
}
public boolean isReleased() {
return versionState.equals(VersionState.RELEASED);
}
public boolean isDraft() {
return versionState.equals(VersionState.DRAFT);
}
public boolean isWorkingCopy() {
return versionState.equals(VersionState.DRAFT);
}
public boolean isArchived() {
return versionState.equals(VersionState.ARCHIVED);
}
public boolean isDeaccessioned() {
return versionState.equals(VersionState.DEACCESSIONED);
}
public boolean isRetiredCopy() {
return (versionState.equals(VersionState.ARCHIVED) || versionState.equals(VersionState.DEACCESSIONED));
}
public boolean isMinorUpdate() {
if (this.dataset.getLatestVersion().isWorkingCopy()) {
if (this.dataset.getVersions().size() > 1 && this.dataset.getVersions().get(1) != null) {
if (this.dataset.getVersions().get(1).isDeaccessioned()) {
return false;
}
}
}
if (this.getDataset().getReleasedVersion() != null) {
if (this.getFileMetadatas().size() != this.getDataset().getReleasedVersion().getFileMetadatas().size()){
return false;
} else {
List <DataFile> current = new ArrayList();
List <DataFile> previous = new ArrayList();
for (FileMetadata fmdc : this.getFileMetadatas()){
current.add(fmdc.getDataFile());
}
for (FileMetadata fmdc : this.getDataset().getReleasedVersion().getFileMetadatas()){
previous.add(fmdc.getDataFile());
}
for (DataFile fmd: current){
previous.remove(fmd);
}
return previous.isEmpty();
}
}
return true;
}
public void updateDefaultValuesFromTemplate(Template template) {
if (!template.getDatasetFields().isEmpty()) {
this.setDatasetFields(this.copyDatasetFields(template.getDatasetFields()));
}
if (template.getTermsOfUseAndAccess() != null) {
TermsOfUseAndAccess terms = template.getTermsOfUseAndAccess().copyTermsOfUseAndAccess();
terms.setDatasetVersion(this);
this.setTermsOfUseAndAccess(terms);
} else {
TermsOfUseAndAccess terms = new TermsOfUseAndAccess();
terms.setDatasetVersion(this);
terms.setLicense(TermsOfUseAndAccess.License.CC0);
terms.setDatasetVersion(this);
this.setTermsOfUseAndAccess(terms);
}
}
public void initDefaultValues() {
//first clear then initialize - in case values were present
// from template or user entry
this.setDatasetFields(new ArrayList());
this.setDatasetFields(this.initDatasetFields());
TermsOfUseAndAccess terms = new TermsOfUseAndAccess();
terms.setDatasetVersion(this);
terms.setLicense(TermsOfUseAndAccess.License.CC0);
this.setTermsOfUseAndAccess(terms);
}
public DatasetVersion getMostRecentlyReleasedVersion() {
if (this.isReleased()) {
return this;
} else {
if (this.getDataset().isReleased()) {
for (DatasetVersion testVersion : this.dataset.getVersions()) {
if (testVersion.isReleased()) {
return testVersion;
}
}
}
}
return null;
}
public DatasetVersion getLargestMinorRelease() {
if (this.getDataset().isReleased()) {
for (DatasetVersion testVersion : this.dataset.getVersions()) {
if (testVersion.getVersionNumber() != null && testVersion.getVersionNumber().equals(this.getVersionNumber())) {
return testVersion;
}
}
}
return this;
}
public Dataset getDataset() {
return dataset;
}
public void setDataset(Dataset dataset) {
this.dataset = dataset;
}
@Override
public int hashCode() {
int hash = 0;
hash += (id != null ? id.hashCode() : 0);
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!(object instanceof DatasetVersion)) {
return false;
}
DatasetVersion other = (DatasetVersion) object;
if ((this.id == null && other.id != null) || (this.id != null && !this.id.equals(other.id))) {
return false;
}
return true;
}
@Override
public String toString() {
return "[DatasetVersion id:" + getId() + "]";
}
public boolean isLatestVersion() {
return this.equals(this.getDataset().getLatestVersion());
}
public String getTitle() {
String retVal = "";
for (DatasetField dsfv : this.getDatasetFields()) {
if (dsfv.getDatasetFieldType().getName().equals(DatasetFieldConstant.title)) {
retVal = dsfv.getDisplayValue();
}
}
return retVal;
}
public String getProductionDate() {
//todo get "Production Date" from datasetfieldvalue table
return "Production Date";
}
public String getDescription() {
for (DatasetField dsf : this.getDatasetFields()) {
if (dsf.getDatasetFieldType().getName().equals(DatasetFieldConstant.description)) {
String descriptionString = "";
if (dsf.getDatasetFieldCompoundValues() != null && dsf.getDatasetFieldCompoundValues().get(0) != null) {
DatasetFieldCompoundValue descriptionValue = dsf.getDatasetFieldCompoundValues().get(0);
for (DatasetField subField : descriptionValue.getChildDatasetFields()) {
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.descriptionText) && !subField.isEmptyForDisplay()) {
descriptionString = subField.getValue();
}
}
}
return MarkupChecker.sanitizeBasicHTML(descriptionString);
}
}
return "";
}
public List<String[]> getDatasetContacts(){
List <String[]> retList = new ArrayList<>();
for (DatasetField dsf : this.getDatasetFields()) {
Boolean addContributor = true;
String contributorName = "";
String contributorAffiliation = "";
if (dsf.getDatasetFieldType().getName().equals(DatasetFieldConstant.datasetContact)) {
for (DatasetFieldCompoundValue authorValue : dsf.getDatasetFieldCompoundValues()) {
for (DatasetField subField : authorValue.getChildDatasetFields()) {
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.datasetContactName)) {
if (subField.isEmptyForDisplay()) {
addContributor = false;
}
contributorName = subField.getDisplayValue();
}
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.datasetContactAffiliation)) {
contributorAffiliation = subField.getDisplayValue();
}
}
if (addContributor) {
String[] datasetContributor = new String[] {contributorName, contributorAffiliation};
retList.add(datasetContributor);
}
}
}
}
return retList;
}
public List<String[]> getDatasetProducers(){
List <String[]> retList = new ArrayList<>();
for (DatasetField dsf : this.getDatasetFields()) {
Boolean addContributor = true;
String contributorName = "";
String contributorAffiliation = "";
if (dsf.getDatasetFieldType().getName().equals(DatasetFieldConstant.producer)) {
for (DatasetFieldCompoundValue authorValue : dsf.getDatasetFieldCompoundValues()) {
for (DatasetField subField : authorValue.getChildDatasetFields()) {
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.producerName)) {
if (subField.isEmptyForDisplay()) {
addContributor = false;
}
contributorName = subField.getDisplayValue();
}
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.producerAffiliation)) {
contributorAffiliation = subField.getDisplayValue();
}
}
if (addContributor) {
String[] datasetContributor = new String[] {contributorName, contributorAffiliation};
retList.add(datasetContributor);
}
}
}
}
return retList;
}
public List<DatasetAuthor> getDatasetAuthors() {
//todo get "List of Authors" from datasetfieldvalue table
List <DatasetAuthor> retList = new ArrayList<>();
for (DatasetField dsf : this.getDatasetFields()) {
Boolean addAuthor = true;
if (dsf.getDatasetFieldType().getName().equals(DatasetFieldConstant.author)) {
for (DatasetFieldCompoundValue authorValue : dsf.getDatasetFieldCompoundValues()) {
DatasetAuthor datasetAuthor = new DatasetAuthor();
for (DatasetField subField : authorValue.getChildDatasetFields()) {
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.authorName)) {
if (subField.isEmptyForDisplay()) {
addAuthor = false;
}
datasetAuthor.setName(subField);
}
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.authorAffiliation)) {
datasetAuthor.setAffiliation(subField);
}
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.authorIdType)){
datasetAuthor.setIdType(subField.getDisplayValue());
}
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.authorIdValue)){
datasetAuthor.setIdValue(subField.getDisplayValue());
}
}
if (addAuthor) {
retList.add(datasetAuthor);
}
}
}
}
return retList;
}
public String getDatasetProducersString(){
String retVal = "";
for (DatasetField dsf : this.getDatasetFields()) {
if (dsf.getDatasetFieldType().getName().equals(DatasetFieldConstant.producer)) {
for (DatasetFieldCompoundValue authorValue : dsf.getDatasetFieldCompoundValues()) {
for (DatasetField subField : authorValue.getChildDatasetFields()) {
if (subField.getDatasetFieldType().getName().equals(DatasetFieldConstant.producerName)) {
if (retVal.isEmpty()){
retVal = subField.getDisplayValue();
} else {
retVal += ", " + subField.getDisplayValue();
}
}
}
}
}
}
return retVal;
}
public void setDatasetAuthors(List<DatasetAuthor> authors) {
// FIXME add the authores to the relevant fields
}
public String getCitation() {
return getCitation(false);
}
public String getCitation(boolean html) {
return new DataCitation(this).toString(html);
}
public Date getCitationDate() {
DatasetField citationDate = getDatasetField(this.getDataset().getCitationDateDatasetFieldType());
if (citationDate != null && citationDate.getDatasetFieldType().getFieldType().equals(FieldType.DATE)){
try {
return new SimpleDateFormat("yyyy").parse( citationDate.getValue() );
} catch (ParseException ex) {
Logger.getLogger(DatasetVersion.class.getName()).log(Level.SEVERE, null, ex);
}
}
return null;
}
public DatasetField getDatasetField(DatasetFieldType dsfType) {
if (dsfType != null) {
for (DatasetField dsf : this.getFlatDatasetFields()) {
if (dsf.getDatasetFieldType().equals(dsfType)) {
return dsf;
}
}
}
return null;
}
public String getDistributionDate() {
//todo get dist date from datasetfieldvalue table
for (DatasetField dsf : this.getDatasetFields()) {
if (DatasetFieldConstant.distributionDate.equals(dsf.getDatasetFieldType().getName())) {
String date = dsf.getValue();
return date;
}
}
return null;
}
public String getDistributorName() {
for (DatasetField dsf : this.getFlatDatasetFields()) {
if (DatasetFieldConstant.distributorName.equals(dsf.getDatasetFieldType().getName())) {
return dsf.getValue();
}
}
return null;
}
public String getRootDataverseNameforCitation(){
//Get root dataverse name for Citation
Dataverse root = this.getDataset().getOwner();
while (root.getOwner() != null) {
root = root.getOwner();
}
String rootDataverseName = root.getName();
if (!StringUtil.isEmpty(rootDataverseName)) {
return rootDataverseName + " Dataverse";
} else {
return "";
}
}
public List<DatasetDistributor> getDatasetDistributors() {
//todo get distributors from DatasetfieldValues
return new ArrayList();
}
public void setDatasetDistributors(List<DatasetDistributor> distributors) {
//todo implement
}
public String getDistributorNames() {
String str = "";
for (DatasetDistributor sd : this.getDatasetDistributors()) {
if (str.trim().length() > 1) {
str += ";";
}
str += sd.getName();
}
return str;
}
public String getAuthorsStr() {
return getAuthorsStr(true);
}
public String getAuthorsStr(boolean affiliation) {
String str = "";
for (DatasetAuthor sa : getDatasetAuthors()) {
if (sa.getName() == null) {
break;
}
if (str.trim().length() > 1) {
str += "; ";
}
str += sa.getName().getValue();
if (affiliation) {
if (sa.getAffiliation() != null) {
if (!StringUtil.isEmpty(sa.getAffiliation().getValue())) {
str += " (" + sa.getAffiliation().getValue() + ")";
}
}
}
}
return str;
}
// TODO: clean up init methods and get them to work, cascading all the way down.
// right now, only work for one level of compound objects
private DatasetField initDatasetField(DatasetField dsf) {
if (dsf.getDatasetFieldType().isCompound()) {
for (DatasetFieldCompoundValue cv : dsf.getDatasetFieldCompoundValues()) {
// for each compound value; check the datasetfieldTypes associated with its type
for (DatasetFieldType dsfType : dsf.getDatasetFieldType().getChildDatasetFieldTypes()) {
boolean add = true;
for (DatasetField subfield : cv.getChildDatasetFields()) {
if (dsfType.equals(subfield.getDatasetFieldType())) {
add = false;
break;
}
}
if (add) {
cv.getChildDatasetFields().add(DatasetField.createNewEmptyChildDatasetField(dsfType, cv));
}
}
}
}
return dsf;
}
public List<DatasetField> initDatasetFields() {
//retList - Return List of values
List<DatasetField> retList = new ArrayList();
//Running into null on create new dataset
if (this.getDatasetFields() != null) {
for (DatasetField dsf : this.getDatasetFields()) {
retList.add(initDatasetField(dsf));
}
}
//Test to see that there are values for
// all fields in this dataset via metadata blocks
//only add if not added above
for (MetadataBlock mdb : this.getDataset().getOwner().getMetadataBlocks()) {
for (DatasetFieldType dsfType : mdb.getDatasetFieldTypes()) {
if (!dsfType.isSubField()) {
boolean add = true;
//don't add if already added as a val
for (DatasetField dsf : retList) {
if (dsfType.equals(dsf.getDatasetFieldType())) {
add = false;
break;
}
}
if (add) {
retList.add(DatasetField.createNewEmptyDatasetField(dsfType, this));
}
}
}
}
//sort via display order on dataset field
Collections.sort(retList, DatasetField.DisplayOrder);
return retList;
}
/**
* For the current server, create link back to this Dataset
*
* example:
* http://dvn-build.hmdc.harvard.edu/dataset.xhtml?id=72&versionId=25
*
* @param serverName
* @param dset
* @return
*/
public String getReturnToDatasetURL(String serverName, Dataset dset) {
if (serverName == null) {
return null;
}
if (dset == null) {
dset = this.getDataset();
if (dset == null) { // currently postgres allows this, see https://github.com/IQSS/dataverse/issues/828
return null;
}
}
return serverName + "/dataset.xhtml?id=" + dset.getId() + "&versionId" + this.getId();
}
;
public List<DatasetField> copyDatasetFields(List<DatasetField> copyFromList) {
List<DatasetField> retList = new ArrayList();
for (DatasetField sourceDsf : copyFromList) {
//the copy needs to have the current version
retList.add(sourceDsf.copy(this));
}
return retList;
}
public List<DatasetField> getFlatDatasetFields() {
return getFlatDatasetFields(getDatasetFields());
}
private List<DatasetField> getFlatDatasetFields(List<DatasetField> dsfList) {
List<DatasetField> retList = new LinkedList();
for (DatasetField dsf : dsfList) {
retList.add(dsf);
if (dsf.getDatasetFieldType().isCompound()) {
for (DatasetFieldCompoundValue compoundValue : dsf.getDatasetFieldCompoundValues()) {
retList.addAll(getFlatDatasetFields(compoundValue.getChildDatasetFields()));
}
}
}
return retList;
}
public String getSemanticVersion() {
/**
* Not prepending a "v" like "v1.1" or "v2.0" because while SemVerTag
* was in http://semver.org/spec/v1.0.0.html but later removed in
* http://semver.org/spec/v2.0.0.html
*
* See also to v or not to v · Issue #1 · mojombo/semver -
* https://github.com/mojombo/semver/issues/1#issuecomment-2605236
*/
if (this.isReleased()) {
return versionNumber + "." + minorVersionNumber;
} else if (this.isDraft()){
return VersionState.DRAFT.toString();
} else if (this.isDeaccessioned()){
return versionNumber + "." + minorVersionNumber;
} else{
return versionNumber + "." + minorVersionNumber;
}
// return VersionState.DEACCESSIONED.name();
// } else {
// return "-unkwn semantic version-";
// }
}
public List<ConstraintViolation> validateRequired() {
List<ConstraintViolation> returnListreturnList = new ArrayList();
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
for (DatasetField dsf : this.getFlatDatasetFields()) {
dsf.setValidationMessage(null); // clear out any existing validation message
Set<ConstraintViolation<DatasetField>> constraintViolations = validator.validate(dsf);
for (ConstraintViolation<DatasetField> constraintViolation : constraintViolations) {
dsf.setValidationMessage(constraintViolation.getMessage());
returnListreturnList.add(constraintViolation);
break; // currently only support one message, so we can break out of the loop after the first constraint violation
}
}
return returnListreturnList;
}
public Set<ConstraintViolation> validate() {
Set<ConstraintViolation> returnSet = new HashSet();
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
for (DatasetField dsf : this.getFlatDatasetFields()) {
dsf.setValidationMessage(null); // clear out any existing validation message
Set<ConstraintViolation<DatasetField>> constraintViolations = validator.validate(dsf);
for (ConstraintViolation<DatasetField> constraintViolation : constraintViolations) {
dsf.setValidationMessage(constraintViolation.getMessage());
returnSet.add(constraintViolation);
break; // currently only support one message, so we can break out of the loop after the first constraint violation
}
for (DatasetFieldValue dsfv : dsf.getDatasetFieldValues()) {
dsfv.setValidationMessage(null); // clear out any existing validation message
Set<ConstraintViolation<DatasetFieldValue>> constraintViolations2 = validator.validate(dsfv);
for (ConstraintViolation<DatasetFieldValue> constraintViolation : constraintViolations2) {
dsfv.setValidationMessage(constraintViolation.getMessage());
returnSet.add(constraintViolation);
break; // currently only support one message, so we can break out of the loop after the first constraint violation
}
}
}
return returnSet;
}
}
| apache-2.0 |
tumf/swift3 | swift3/test/unit/test_acl.py | 8333 | # Copyright (c) 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from cStringIO import StringIO
from swift.common.swob import Request, HTTPAccepted
from swift3.test.unit import Swift3TestCase
from swift3.etree import fromstring, tostring, Element, SubElement, XMLNS_XSI
from swift3.test.unit.test_s3_acl import s3acl
import mock
from swift3.response import InvalidArgument
from swift3.acl_utils import handle_acl_header
class TestSwift3Acl(Swift3TestCase):
def setUp(self):
super(TestSwift3Acl, self).setUp()
# All ACL API should be called against to existing bucket.
self.swift.register('PUT', '/v1/AUTH_test/bucket',
HTTPAccepted, {}, None)
def _check_acl(self, owner, body):
elem = fromstring(body, 'AccessControlPolicy')
permission = elem.find('./AccessControlList/Grant/Permission').text
self.assertEquals(permission, 'FULL_CONTROL')
name = elem.find('./AccessControlList/Grant/Grantee/ID').text
self.assertEquals(name, owner)
def test_bucket_acl_GET(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac'})
status, headers, body = self.call_swift3(req)
self._check_acl('test:tester', body)
def test_bucket_acl_PUT(self):
elem = Element('AccessControlPolicy')
owner = SubElement(elem, 'Owner')
SubElement(owner, 'ID').text = 'id'
acl = SubElement(elem, 'AccessControlList')
grant = SubElement(acl, 'Grant')
grantee = SubElement(grant, 'Grantee', nsmap={'xsi': XMLNS_XSI})
grantee.set('{%s}type' % XMLNS_XSI, 'Group')
SubElement(grantee, 'URI').text = \
'http://acs.amazonaws.com/groups/global/AllUsers'
SubElement(grant, 'Permission').text = 'READ'
xml = tostring(elem)
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac'},
body=xml)
status, headers, body = self.call_swift3(req)
self.assertEquals(status.split()[0], '200')
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': StringIO(xml)},
headers={'Authorization': 'AWS test:tester:hmac',
'Transfer-Encoding': 'chunked'})
self.assertIsNone(req.content_length)
self.assertIsNone(req.message_length())
status, headers, body = self.call_swift3(req)
self.assertEquals(status.split()[0], '200')
def test_bucket_canned_acl_PUT(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'X-AMZ-ACL': 'public-read'})
status, headers, body = self.call_swift3(req)
self.assertEquals(status.split()[0], '200')
@s3acl(s3acl_only=True)
def test_bucket_canned_acl_PUT_with_s3acl(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'X-AMZ-ACL': 'public-read'})
with mock.patch('swift3.request.handle_acl_header') as mock_handler:
status, headers, body = self.call_swift3(req)
self.assertEquals(status.split()[0], '200')
self.assertEquals(mock_handler.call_count, 0)
def test_bucket_fails_with_both_acl_header_and_xml_PUT(self):
elem = Element('AccessControlPolicy')
owner = SubElement(elem, 'Owner')
SubElement(owner, 'ID').text = 'id'
acl = SubElement(elem, 'AccessControlList')
grant = SubElement(acl, 'Grant')
grantee = SubElement(grant, 'Grantee', nsmap={'xsi': XMLNS_XSI})
grantee.set('{%s}type' % XMLNS_XSI, 'Group')
SubElement(grantee, 'URI').text = \
'http://acs.amazonaws.com/groups/global/AllUsers'
SubElement(grant, 'Permission').text = 'READ'
xml = tostring(elem)
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac',
'X-AMZ-ACL': 'public-read'},
body=xml)
status, headers, body = self.call_swift3(req)
self.assertEquals(self._get_error_code(body),
'UnexpectedContent')
def test_object_acl_GET(self):
req = Request.blank('/bucket/object?acl',
environ={'REQUEST_METHOD': 'GET'},
headers={'Authorization': 'AWS test:tester:hmac'})
status, headers, body = self.call_swift3(req)
self._check_acl('test:tester', body)
def test_invalid_xml(self):
req = Request.blank('/bucket?acl',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Authorization': 'AWS test:tester:hmac'},
body='invalid')
status, headers, body = self.call_swift3(req)
self.assertEquals(self._get_error_code(body), 'MalformedACLError')
def test_handle_acl_header(self):
def check_generated_acl_header(acl, targets):
req = Request.blank('/bucket',
headers={'X-Amz-Acl': acl})
handle_acl_header(req)
for target in targets:
self.assertTrue(target[0] in req.headers)
self.assertEquals(req.headers[target[0]], target[1])
check_generated_acl_header('public-read',
[('X-Container-Read', '.r:*,.rlistings')])
check_generated_acl_header('public-read-write',
[('X-Container-Read', '.r:*,.rlistings'),
('X-Container-Write', '.r:*')])
check_generated_acl_header('private',
[('X-Container-Read', '.'),
('X-Container-Write', '.')])
@s3acl(s3acl_only=True)
def test_handle_acl_header_with_s3acl(self):
def check_generated_acl_header(acl, targets):
req = Request.blank('/bucket',
headers={'X-Amz-Acl': acl})
for target in targets:
self.assertTrue(target not in req.headers)
self.assertTrue('HTTP_X_AMZ_ACL' in req.environ)
# TODO: add transration and assertion for s3acl
check_generated_acl_header('public-read',
['X-Container-Read'])
check_generated_acl_header('public-read-write',
['X-Container-Read', 'X-Container-Write'])
check_generated_acl_header('private',
['X-Container-Read', 'X-Container-Write'])
def test_handle_acl_with_invalid_header_string(self):
req = Request.blank('/bucket', headers={'X-Amz-Acl': 'invalid'})
with self.assertRaises(InvalidArgument) as cm:
handle_acl_header(req)
self.assertTrue('argument_name' in cm.exception.info)
self.assertEquals(cm.exception.info['argument_name'], 'x-amz-acl')
self.assertTrue('argument_value' in cm.exception.info)
self.assertEquals(cm.exception.info['argument_value'], 'invalid')
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
Epi-Info/Epi-Info-Community-Edition | Epi.Core/Project.cs | 61131 | using System;
using System.Data;
using System.Globalization;
using System.IO;
using System.Text;
using System.Windows.Forms;
using System.Xml;
using System.Security.Cryptography;
using Epi;
using System.Collections.Generic;
using Epi.Collections;
using Epi.Data;
using Epi.Fields;
using Epi.Resources;
using Epi.Data.Services;
namespace Epi
{
/// <summary>
/// Class Project
/// </summary>
public class Project : INamedObject, IDisposable // Project
{
#region Public Events
/// <summary>
/// Event Handler for TableCopyStatusEvent
/// </summary>
public event TableCopyStatusEventHandler TableCopyStatusEvent;
/// <summary>
/// Raise the TableCopyStatus Event
/// </summary>
/// <param name="tableName">Table name</param>
/// <param name="recordCount">Record Count</param>
public void RaiseEventTableCopyStatus(string tableName, int recordCount)
{
if (this.TableCopyStatusEvent != null)
{
this.TableCopyStatusEvent(this, new TableCopyStatusEventArgs(tableName, recordCount));
}
}
#endregion Public Events
#region Private class members
/// <summary>
/// Collection of project views.
/// </summary>
public ViewCollection views = null;
/// <summary>
/// List of project pages.
/// </summary>
///
public List<Page> pages = null;
private Guid id;
private XmlDocument xmlDoc = null;
private const int currentSchemaVersion = 102;
private XmlElement currentViewElement;
private string collectedDataConnectionString;
bool _useMetaDataSet = false;
#endregion Private class members
#region Protected Class Members
/// <summary>
/// Project metadata accessor.
/// </summary>
protected IMetadataProvider metadata = null;
/// <summary>
/// Project collected data accessor.
/// </summary>
protected CollectedDataProvider collectedData = null;
#endregion Protected Class Members
#region Constructors
/// <summary>
/// Default Constructors
/// </summary>
public Project()
{
//isNew = true;
PreConstruct();
// Add root element and attributes
XmlElement root = xmlDoc.CreateElement("Project");
xmlDoc.AppendChild(root);
// Add attributes of the root node
XmlAttribute attr = xmlDoc.CreateAttribute("id");
root.Attributes.Append(attr);
attr = xmlDoc.CreateAttribute("name");
root.Attributes.Append(attr);
attr = xmlDoc.CreateAttribute("location");
root.Attributes.Append(attr);
attr = xmlDoc.CreateAttribute("description");
root.Attributes.Append(attr);
attr = xmlDoc.CreateAttribute("schemaVersion");
attr.Value = currentSchemaVersion.ToString();
root.Attributes.Append(attr);
attr = xmlDoc.CreateAttribute("epiVersion");
root.Attributes.Append(attr = xmlDoc.CreateAttribute("epiVersion"));
attr = xmlDoc.CreateAttribute("createDate");
attr.Value = DateTime.Now.ToString(CultureInfo.InvariantCulture.DateTimeFormat);
root.Attributes.Append(attr);
attr = xmlDoc.CreateAttribute("useMetadataDbForCollectedData");
attr.Value = string.Empty;
root.Attributes.Append(attr);
attr = xmlDoc.CreateAttribute("useBackgroundOnAllPages");
attr.Value = Epi.Defaults.UseBackgroundOnAllPages.ToString();
root.Attributes.Append(attr);
// Add Collected data node
XmlElement xCollectedData = xmlDoc.CreateElement("CollectedData");
root.AppendChild(xCollectedData);
XmlElement xDb = xmlDoc.CreateElement("Database");
xCollectedData.AppendChild(xDb);
xDb.Attributes.Append(xmlDoc.CreateAttribute("connectionString"));
xDb.Attributes.Append(xmlDoc.CreateAttribute("dataDriver"));
// Add Metadata node.
XmlElement xMetadata = xmlDoc.CreateElement("Metadata");
root.AppendChild(xMetadata);
attr = xmlDoc.CreateAttribute("source");
attr.Value = ((int)MetadataSource.Unknown).ToString();
xMetadata.Attributes.Append(attr);
// Add Enter and MakeView Interpreter node.
XmlElement xEnter_MakeViewInterpreter = xmlDoc.CreateElement("EnterMakeviewInterpreter");
root.AppendChild(xEnter_MakeViewInterpreter);
attr = xmlDoc.CreateAttribute("source");
attr.Value = "Epi.Core.EnterInterpreter";
xEnter_MakeViewInterpreter.Attributes.Append(attr);
}
public Project(string filePath)
{
Construct(filePath);
}
public Project(string filePath, bool useMetaDataTable)
{
_useMetaDataSet = useMetaDataTable;
Construct(filePath);
}
private void PreConstruct()
{
xmlDoc = new XmlDocument();
metaDbInfo = new DbDriverInfo();
collectedDataDbInfo = new DbDriverInfo();
collectedData = new CollectedDataProvider(this);
}
private void Construct(string filePath)
{
PreConstruct();
try
{
filePath = Environment.ExpandEnvironmentVariables(filePath);
xmlDoc.Load(filePath);
ValidateXmlDoc();
FileInfo fileInfo = new FileInfo(filePath);
if (string.IsNullOrEmpty(Location))
{
Location = fileInfo.DirectoryName;
Save();
}
else
{
if (string.Compare(fileInfo.DirectoryName, Location, true) != 0)
{
Location = fileInfo.DirectoryName;
Save();
}
}
string[] Driver = this.CollectedDataDriver.Split(',');
if (Driver[1].Trim().ToLowerInvariant() == Configuration.WebDriver.ToLowerInvariant())
{
this.collectedData.IsWebMode = true;
switch (Driver[0].Trim())
{
case "Epi.Data.MySQL.MySQLDBFactory":
this.CollectedDataDriver = Configuration.MySQLDriver;
break;
case "Epi.Data.Office.AccessDBFactory":
this.CollectedDataDriver = Configuration.AccessDriver;
break;
case "Epi.Data.SqlServer.SqlDBFactory":
default:
this.CollectedDataDriver = Configuration.SqlDriver;
break;
}
}
this.collectedDataDbInfo.DBCnnStringBuilder.ConnectionString = this.CollectedDataConnectionString;
collectedData.Initialize(this.collectedDataDbInfo, this.CollectedDataDriver, false);
if (MetadataSource == MetadataSource.Xml)
{
metadata = new MetadataXmlProvider(this);
}
else
{
if (_useMetaDataSet)
{
metadata = new MetadataDataSet(this);
}
else
{
metadata = new MetadataDbProvider(this);
if (MetadataSource == MetadataSource.SameDb)
{
metadata.AttachDbDriver(CollectedData.GetDbDriver());
}
else
{
this.metaDbInfo.DBCnnStringBuilder.ConnectionString = this.MetadataConnectionString;
metadata.Initialize(this.metaDbInfo, this.MetadataDriver, false);
}
}
}
}
finally
{
}
}
#endregion Constructors
#region Public Properties
public bool UseMetaDataSet
{
set
{
_useMetaDataSet = value;
}
}
/// <summary>
/// Gets/sets the path name of project file.
/// </summary>
public string Location
{
get
{
return xmlDoc.DocumentElement.Attributes["location"].Value;
}
set
{
xmlDoc.DocumentElement.Attributes["location"].Value = value;
}
}
public string EnterMakeviewIntepreter
{
get
{
return xmlDoc.DocumentElement["EnterMakeviewInterpreter"].Attributes["source"].Value;
}
set
{
xmlDoc.DocumentElement["EnterMakeviewInterpreter"].Attributes["source"].Value = value;
}
}
/// <summary>
/// Gets project display name.
/// </summary>
public string DisplayName
{
get
{
return Name;
}
}
/// <summary>
/// Project name.
/// </summary>
public string Name
{
get
{
return xmlDoc.DocumentElement.Attributes["name"].Value;
}
set
{
xmlDoc.DocumentElement.Attributes["name"].Value = value;
}
}
/// <summary>
/// The width of the panel that contains the controls.
/// </summary>
public string PageWidth
{
get
{
return xmlDoc.DocumentElement.Attributes["pageWidth"].Value;
}
set
{
xmlDoc.DocumentElement.Attributes["pageWidth"].Value = value;
}
}
/// <summary>
/// The height of the panel that contains the controls.
/// </summary>
public string PageHeight
{
get
{
return xmlDoc.DocumentElement.Attributes["pageHeight"].Value;
}
set
{
xmlDoc.DocumentElement.Attributes["pageHeight"].Value = value;
}
}
/// <summary>
/// Returns the file name of the project.
/// </summary>
public string FileName
{
get
{
if (!string.IsNullOrEmpty(Name))
{
return Name.Replace(FileExtension, string.Empty) + FileExtension;
}
else
{
return string.Empty;
}
}
}
/// <summary>
/// Returns the full name of the data source.
/// </summary>
public string FilePath
{
get
{
if (string.IsNullOrEmpty(Location) || string.IsNullOrEmpty(FileName))
{
return string.Empty;
}
else
{
return Path.Combine(Location, FileName);
}
}
}
/// <summary>
/// Returns the full name of the data source.
/// </summary>
public string FullName
{
get
{
return FilePath;
}
}
/// <summary>
/// Returns use metadata for collected data flag.
/// </summary>
public virtual bool UseMetadataDbForCollectedData
{
get
{
return bool.Parse(xmlDoc.DocumentElement.Attributes["useMetadataDbForCollectedData"].Value);
}
set
{
xmlDoc.DocumentElement.Attributes["useMetadataDbForCollectedData"].Value = value.ToString();
}
}
/// <summary>
/// Determines if the project is empty.
/// </summary>
public bool IsEmpty
{
get
{
return (string.IsNullOrEmpty(FullName));
}
}
/// <summary>
/// Project metadata.
/// </summary>
public IMetadataProvider Metadata
{
get
{
return metadata;
}
}
/// <summary>
/// Project collected data.
/// </summary>
public CollectedDataProvider CollectedData
{
get
{
return collectedData;
}
}
/// <summary>
/// Project metadata.
/// </summary>
public IMetadataProvider CodeData
{
get
{
return Metadata;
}
}
/// <summary>
/// Determines if this data source is actually an Epi (2000 or 7)collected data.
/// </summary>
public virtual bool IsEpiCollectedData
{
get
{
return false;
}
}
/// <summary>
/// Returns a globally unique identifier for the project.
/// </summary>
public System.Guid Id
{
get
{
if (id.Equals(Guid.Empty))
{
if (string.IsNullOrEmpty(FilePath))
{
//return Guid.Empty;
id = Guid.NewGuid();
}
else
{
id = Util.GetFileGuid(FilePath);
}
}
return id;
}
set
{
xmlDoc.DocumentElement.Attributes["id"].Value = value.ToString();
}
}
/// <summary>
/// Views of the project.
/// </summary>
public ViewCollection Views
{
get
{
if (views == null)
{
LoadViews();
}
return views;
}
}
/// <summary>
/// Returns the original Epi Info version of the project.
/// </summary>
public string EpiVersion
{
get
{
return xmlDoc.DocumentElement.Attributes["epiVersion"].Value;
}
}
/// <summary>
/// Returns date the project was created.
/// </summary>
public DateTime CreateDate
{
get
{
return DateTime.Parse(xmlDoc.DocumentElement.Attributes["createDate"].Value, CultureInfo.InvariantCulture.DateTimeFormat);
}
}
/// <summary>
/// Project description.
/// </summary>
public string Description
{
get
{
return xmlDoc.DocumentElement.Attributes["description"].Value;
}
set
{
xmlDoc.DocumentElement.Attributes["description"].Value = value;
}
}
private XmlNode GetMetadataDbNode()
{
return xmlDoc.DocumentElement.SelectSingleNode("/Project/Metadata/Database");
}
private XmlNode GetCollectedDataDbNode()
{
return xmlDoc.DocumentElement.SelectSingleNode("/Project/CollectedData/Database");
}
/// <summary>
/// Connection string for the Metadata database.
/// </summary>
public string MetadataConnectionString
{
get
{
return GetMetadataDbNode().Attributes["connectionString"].Value;
}
set
{
GetMetadataDbNode().Attributes["connectionString"].Value = value;
}
}
/// <summary>
/// Driver name for the Metadata database.
/// </summary>
public string MetadataDriver
{
get
{
return GetMetadataDbNode().Attributes["dataDriver"].Value;
}
set
{
GetMetadataDbNode().Attributes["dataDriver"].Value = value.ToString();
}
}
private DbDriverInfo metaDbInfo;
/// <summary>
/// Information for the Metadata database.
/// </summary>
public DbDriverInfo MetaDbInfo
{
get
{
return metaDbInfo;
}
set
{
metaDbInfo = value;
}
}
private DbDriverInfo collectedDataDbInfo;
/// <summary>
/// Information for the Collected database.
/// </summary>
public DbDriverInfo CollectedDataDbInfo
{
get
{
return collectedDataDbInfo;
}
set
{
collectedDataDbInfo = value;
}
}
/// <summary>
/// Gets/sets the metadata source. Possible values are Database and Xml.
/// </summary>
public Epi.MetadataSource MetadataSource
{
get
{
XmlNode metadataNode = GetMetadataNode();
XmlAttribute sourceAttribute = metadataNode.Attributes.GetNamedItem("source") as XmlAttribute;
if (sourceAttribute == null)
{
return MetadataSource.Unknown;
}
else
{
return (MetadataSource)int.Parse(sourceAttribute.Value);
}
}
set
{
XmlNode metadataNode = GetMetadataNode();
metadataNode.Attributes["source"].Value = ((int)value).ToString();
switch (value)
{
case MetadataSource.Xml:
metadata = new MetadataXmlProvider(this);
break;
case MetadataSource.SameDb:
metadata = new MetadataDbProvider(this);
break;
case MetadataSource.DifferentDb:
metadata = new MetadataDbProvider(this);
XmlElement xDb = xmlDoc.CreateElement("Database");
GetMetadataNode().AppendChild(xDb);
xDb.Attributes.Append(xmlDoc.CreateAttribute("connectionString"));
xDb.Attributes.Append(xmlDoc.CreateAttribute("dataDriver"));
break;
default:
break;
}
}
}
/// <summary>
/// Connection string for the Collected database.
/// </summary>
public string CollectedDataConnectionString
{
get
{
if (string.IsNullOrEmpty(collectedDataConnectionString))
{
collectedDataConnectionString = Configuration.Decrypt(GetCollectedDataDbNode().Attributes["connectionString"].Value);
}
if (this.CollectedDataDriver == "Epi.Data.Office.AccessDBFactory, Epi.Data.Office")
return this.SetOleDbDatabaseFilePath(collectedDataConnectionString);
else
return collectedDataConnectionString;
}
set
{
GetCollectedDataDbNode().Attributes["connectionString"].Value = Configuration.Encrypt(value);
collectedDataConnectionString = value;
}
}
/// <summary>
/// Data Driver for the Collected database.
/// </summary>
public string CollectedDataDriver
{
get
{
return GetCollectedDataDbNode().Attributes["dataDriver"].Value;
}
set
{
GetCollectedDataDbNode().Attributes["dataDriver"].Value = value.ToString();
}
}
#endregion Public Properties
#region Static Methods
/// <summary>
/// Checks the name of a project to make sure the syntax is valid.
/// </summary>
/// <param name="projectName">The name of the project to validate</param>
/// <param name="validationStatus">The message that is passed back to the calling method regarding the status of the validation attempt</param>
/// <returns>Whether or not the name passed validation; true for a valid name, false for an invalid name</returns>
public static bool IsValidProjectName(string projectName, ref string validationStatus)
{
// assume valid by default
bool valid = true;
if (string.IsNullOrEmpty(projectName.Trim()))
{
// if the project name is empty, or just a series of spaces, invalidate it
validationStatus = SharedStrings.MISSING_PROJECT_NAME;
valid = false;
}
else if (projectName.Contains("'"))
{
// if the project name contains an apostrophe.
validationStatus = SharedStrings.INVALID_PROJECT_NAME;
valid = false;
}
else if (AppData.Instance.IsReservedWord(projectName))
{
// if the project name is a reserved word, invalidate it
validationStatus = SharedStrings.INVALID_PROJECT_NAME_RESERVED_WORD;
valid = false;
}
else if (projectName.Length > 64)
{
validationStatus = SharedStrings.INVALID_PROJECT_NAME_TOO_LONG;
valid = false;
}
else
{
// if the project name is not empty or in the list of reserved words...
System.Text.RegularExpressions.Match numMatch = System.Text.RegularExpressions.Regex.Match(projectName.Substring(0, 1), "[0-9]");
if (numMatch.Success)
{
// if the project name has numbers for the first character, invalidate it
validationStatus = SharedStrings.PROJECT_NAME_BEGIN_NUMERIC;
valid = false;
}
// if the project name doesn't have a number as the first character...
else
{
// iterate over all of the characters in the project name
for (int i = 0; i < projectName.Length; i++)
{
string viewChar = projectName.Substring(i, 1);
System.Text.RegularExpressions.Match m = System.Text.RegularExpressions.Regex.Match(viewChar, "[A-Za-z0-9_]");
// if the project name does not consist of only letters and numbers...
if (!m.Success)
{
// we found an invalid character; invalidate the project name
validationStatus = SharedStrings.INVALID_PROJECT_NAME;
valid = false;
break; // stop the for loop here, no point in continuing
}
}
}
}
return valid;
}
#endregion // Static Methods
#region Public Methods
/// <summary>
/// Returns the Project Id
/// </summary>
/// <returns>The GUID representation of the project Id</returns>
public Guid GetProjectId()
{
return this.Id;
}
/// <summary>
/// Disposes the object.
/// </summary>
public virtual void Dispose()
{
if (metadata != null)
{
//metadata.Dispose();
metadata = null;
}
if (collectedData != null)
{
//collectedData.Dispose();
collectedData = null;
}
if (views != null)
{
//views.Dispose();
views = null;
}
}
/// <summary>
/// Saves the XML document for the project using the specified <see cref="System.Xml.XmlWriter"/>.
/// </summary>
public virtual void Save()
{
try
{
xmlDoc.Save(FilePath);
}
catch (UnauthorizedAccessException ex)
{
throw ex;
}
catch (XmlException xmlEx)
{
throw xmlEx;
}
}
/// <summary>
/// Returns the Xml document for the project.
/// </summary>
/// <returns>Xml Document object.</returns>
public XmlDocument GetXmlDocument()
{
return xmlDoc;
}
/// <summary>
/// Returns all views in the current project as a DataTable
/// </summary>
/// <returns>Contents of view's data table.</returns>
public virtual DataTable GetViewsAsDataTable()
{
return (Metadata.GetViewsAsDataTable());
}
/// <summary>
/// Returns list of tables that are <see cref="Epi.View"/>s.
/// </summary>
/// <returns>Listof view names</returns>
public virtual List<string> GetViewNames()
{
DataTable dt = Metadata.GetViewsAsDataTable();
List<String> list = new List<String>();
foreach (DataRow row in dt.Rows)
{
list.Add(row[ColumnNames.NAME].ToString());
}
return list;
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public virtual List<string> GetParentViewNames()
{
DataTable dt = Metadata.GetViewsAsDataTable();
List<String> list = new List<String>();
//If SQL permissions denied, returns dt with no rows--checked here. den4 11/23/2010
if (dt == null || dt.Rows.Count == 0 )
{
return list;
}
DataRow[] rows = dt.Select(ColumnNames.IS_RELATED_VIEW + "=false");
foreach (DataRow row in rows)
{
list.Add(row[ColumnNames.NAME].ToString());
}
return list;
}
/// <summary>
/// Is View (by name) flag.
/// </summary>
/// <remarks>
/// returns true if the named object is (or has) a view
/// </remarks>
/// <param name="name">Name of view to check.</param>
/// <returns>True/False</returns>
public bool IsView(string name)
{
// dcs0 If it's not in MetaViews - it's not a view - period!
//if (name.ToLowerInvariant().StartsWith("view"))
//{
// return true;
//}
//else
//{
//List<string> list = GetViewNames();
// dcs0 was case sensitive
foreach (string s in GetViewNames())
{
if (string.Compare(s, name, true) == 0)
{
return true;
}
//return (list.Contains(name));
}
return false;
// return GetViewNames().Contains(name);
}
/// <summary>
/// Returns a view by it's name
/// </summary>
/// <param name="viewName"></param>
/// <returns>Project <see cref="Epi.View"/></returns>
public View GetViewByName(string viewName)
{
//foreach (View view in GetViews())
foreach (View view in Views)
{
if (string.Compare(view.Name, viewName, true) == 0)
{
return view;
}
}
throw new System.ApplicationException(string.Format(SharedStrings.ERROR_LOADING_VIEW, viewName));
}
/// <summary>
/// Returns table column names.
/// </summary>
/// <param name="tableName">Name of table.</param>
/// <returns>Listof table column names.</returns>
public List<string> GetTableColumnNames(string tableName)
{
return CollectedData.GetTableColumnNames(tableName);
}
/// <summary>
/// Returns Primary key names.
/// </summary>
/// <param name="tableName">Name of table.</param>
/// <returns>List of primary key names.</returns>
public List<string> GetPrimaryKeyNames(string tableName)
{
DataTable dt = CollectedData.GetPrimaryKeysAsDataTable(tableName);
List<string> list = new List<string>();
foreach (DataRow row in dt.Rows)
{
list.Add(row[ColumnNames.COLUMN_NAME].ToString());
}
return list;
}
/// <summary>
/// Returns contents of all nonview (collected data) tables.
/// </summary>
/// <returns>Contents of nonview tables.</returns>
public virtual DataTable GetNonViewTablesAsDataTable()
{
return CollectedData.GetNonViewTablesAsDataTable();
}
/// <summary>
/// Returns names of all nonview (collected data) tables.
/// </summary>
/// <returns>List of nonview table names.</returns>
public List<string> GetNonViewTableNames()
{
DataTable dt = Metadata.GetNonViewTablesAsDataTable();
List<string> list = new List<string>();
foreach (DataRow row in dt.Rows)
{
list.Add(row[ColumnNames.NAME].ToString());
}
return list;
}
/// <summary>
/// Gets <see cref="Epi.View"/> by view Id.
/// </summary>
/// <param name="viewId">Id of <see cref="Epi.View"/> to get.</param>
/// <returns>Project <see cref="Epi.View"/></returns>
public View GetViewById(int viewId)
{
return (Views.GetViewById(viewId));
}
/// <summary>
/// Saves project information
/// </summary>
public virtual Project CreateProject(string projectName, string projectDescription, string projectLocation, string collectedDataDriver, DbDriverInfo collectedDataDBInfo)
{
Project newProject = new Project();
newProject.Name = projectName;
newProject.Location = Path.Combine(projectLocation, projectName);
if (collectedDataDBInfo.DBCnnStringBuilder.ContainsKey("Provider") && (collectedDataDBInfo.DBCnnStringBuilder["Provider"].ToString() == "Microsoft.Jet.OLEDB.4.0"))
{
collectedDataDBInfo.DBCnnStringBuilder["Data Source"] = newProject.FilePath.Substring(0, newProject.FilePath.Length - 4) + ".mdb";
}
if (!Directory.Exists(newProject.Location))
{
Directory.CreateDirectory(newProject.Location);
}
newProject.Id = newProject.GetProjectId();
if (File.Exists(newProject.FilePath))
{
DialogResult dr = MessageBox.Show(string.Format(SharedStrings.PROJECT_ALREADY_EXISTS, newProject.FilePath), SharedStrings.PROJECT_ALREADY_EXISTS_TITLE, MessageBoxButtons.YesNo, MessageBoxIcon.Warning);
switch (dr)
{
case DialogResult.Yes:
break;
case DialogResult.No:
return null;
}
}
newProject.Description = projectDescription;
// Collected data ...
newProject.CollectedDataDbInfo = collectedDataDBInfo;
newProject.CollectedDataConnectionString = collectedDataDBInfo.DBCnnStringBuilder.ToString();
newProject.CollectedDataDriver = collectedDataDriver;
newProject.CollectedData.Initialize(collectedDataDBInfo, collectedDataDriver, true);
// Check that there isn't an Epi 7 project already here.
if (newProject.CollectedDataDriver != "Epi.Data.Office.AccessDBFactory, Epi.Data.Office")
{
List<string> tableNames = new List<string>();
tableNames.Add("metaBackgrounds");
tableNames.Add("metaDataTypes");
tableNames.Add("metaDbInfo");
tableNames.Add("metaFields");
tableNames.Add("metaFieldTypes");
tableNames.Add("metaGridColumns");
tableNames.Add("metaImages");
tableNames.Add("metaLayerRenderTypes");
tableNames.Add("metaLayers");
tableNames.Add("metaMapLayers");
tableNames.Add("metaMapPoints");
tableNames.Add("metaMaps");
tableNames.Add("metaPages");
tableNames.Add("metaPatterns");
tableNames.Add("metaPrograms");
tableNames.Add("metaViews");
bool projectExists = false;
foreach (string s in tableNames)
{
if (newProject.CollectedData.TableExists(s))
{
projectExists = true;
break;
}
}
if (projectExists)
{
DialogResult result = MessageBox.Show(SharedStrings.WARNING_PROJECT_MAY_ALREADY_EXIST, SharedStrings.WARNING_PROJECT_MAY_ALREADY_EXIST_SHORT, MessageBoxButtons.OKCancel, MessageBoxIcon.Warning);
if (result == DialogResult.Cancel)
{
Logger.Log(DateTime.Now + ": " + "Project creation aborted by user [" + System.Security.Principal.WindowsIdentity.GetCurrent().Name.ToString() + "] after being prompted to overwrite existing Epi Info 7 project metadata.");
return null;
}
else
{
Logger.Log(DateTime.Now + ": " + "Project creation proceeded by user [" + System.Security.Principal.WindowsIdentity.GetCurrent().Name.ToString() + "] after being prompted to overwrite existing Epi Info 7 project metadata.");
}
}
}
Logger.Log(DateTime.Now + ": " + string.Format("Project [{0}] created in {1} by user [{2}].", newProject.Name, newProject.Location, System.Security.Principal.WindowsIdentity.GetCurrent().Name.ToString()));
// Metadata ..
newProject.MetadataSource = MetadataSource.SameDb;
MetadataDbProvider typedMetadata = newProject.Metadata as MetadataDbProvider;
typedMetadata.AttachDbDriver(newProject.CollectedData.GetDbDriver());
typedMetadata.CreateMetadataTables();
try
{
newProject.Save();
return newProject;
}
catch (UnauthorizedAccessException ex)
{
MessageBox.Show(ex.Message);
return newProject;
}
}
/// <summary>
/// Default Constructor
/// </summary>
/// <param name="viewName"></param>
/// <returns>New project <see cref="Epi.View"/></returns>
public View CreateView(string viewName)
{
return CreateView(viewName, false);
}
/// <summary>
/// Constructor
/// </summary>
/// <param name="viewName">New <see cref="Epi.View"/> name.</param>
/// <param name="isChildView">Is Related (child) view flag.</param>
/// <returns>New project <see cref="Epi.View"/></returns>
public View CreateView(string viewName, bool isChildView)
{
View newView = new View(this);
newView.Name = viewName;
newView.SetTableName(newView.Name);
newView.IsRelatedView = isChildView;
if (!Views.Contains(newView))
{
Views.Add(newView);
}
Metadata.InsertView(newView);
currentViewElement = newView.ViewElement;
LoadViews();
return newView;
}
/// <summary>
/// Returns a list of programs saved in the project.
/// </summary>
/// <returns>DataTable containing a list of programs</returns>
public virtual DataTable GetPgms()
{
return (Metadata.GetPgms());
}
/// <summary>
/// Returns a list of program names.
/// </summary>
/// <returns>List of program names.</returns>
public List<string> GetPgmNames()
{
DataTable dt = Metadata.GetPgms();
List<string> list = new List<String>();
foreach (DataRow row in dt.Rows)
{
list.Add(row[ColumnNames.NAME].ToString());
}
return list;
}
/// <summary>
/// Inserts a program into the database
/// </summary>
/// <param name="name">Name of the program</param>
/// <param name="content">Content of the program</param>
/// <param name="comment">Comment for the program</param>
/// <param name="author">Author of the program</param>
public virtual void InsertPgm(string name, string content, string comment, string author)
{
Metadata.InsertPgm(name, content, comment, author);
}
/// <summary>
/// Inserts a program into the database
/// </summary>
/// <param name="pgmRow">A DataRow with all of the parameters</param>
public virtual void InsertPgm(DataRow pgmRow)
{
// dcs TODO temporary; overload MetaData method
Metadata.InsertPgm(pgmRow[ColumnNames.PGM_NAME].ToString(), pgmRow[ColumnNames.PGM_CONTENT].ToString(),
pgmRow[ColumnNames.PGM_COMMENT].ToString(), pgmRow[ColumnNames.PGM_AUTHOR].ToString());
}
/// <summary>
/// Deletes a program from the database
/// </summary>
/// <param name="programName">Name of the program to be deleted</param>
/// <param name="programId">Id of the program to be deleted</param>
public virtual void DeletePgm(string programName, int programId)
{
Metadata.DeletePgm(programId);
}
/// <summary>
/// Updates a program saved in the database
/// </summary>
/// <param name="programId">Id of the program</param>
/// <param name="name">Name of the program</param>
/// <param name="content">Content of the program</param>
/// <param name="comment">Comment for the program</param>
/// <param name="author">Author of the program</param>
public virtual void UpdatePgm(int programId, string name, string content, string comment, string author)
{
Metadata.UpdatePgm(programId, name, content, comment, author);
}
/// <summary>
/// Returns a list of data table names.
/// </summary>
/// <remarks>Make same call as Project.GetDataTableNames().</remarks>
/// <returns>List of data table names.</returns>
public virtual List<string> GetDataTableList()
{
return (Metadata.GetDataTableList());
}
/// <summary>
/// Returns a list of data table names.
/// </summary>
/// <remarks>Makes same call as Project.GetDataTableList().</remarks>
/// <returns>List of data table names.</returns>
public List<string> GetDataTableNames()
{
return Metadata.GetDataTableList();
}
/// <summary>
/// Create a new code table.
/// </summary>
/// <param name="tableName">Name of new code table.</param>
/// <param name="columnNames">List of new columns to create in new code table.</param>
public virtual void CreateCodeTable(string tableName, string[] columnNames)
{
CodeData.CreateCodeTable(tableName, columnNames);
}
/// <summary>
/// Create a new code table.
/// </summary>
/// <param name="tableName">Name of new code table.</param>
/// <param name="columnName">Name of column to create in new code table.</param>
public virtual void CreateCodeTable(string tableName, string columnName)
{
CodeData.CreateCodeTable(tableName, columnName);
}
/// <summary>
/// Save code table data.
/// </summary>
/// <param name="dataTable"><see cref="System.Data.DataTable"/> containing code table data.</param>
/// <param name="tableName">Name of code table.</param>
/// <param name="columnName">Name of code table column.</param>
public virtual void SaveCodeTableData(DataTable dataTable, string tableName, string columnName)
{
CodeData.SaveCodeTableData(dataTable, tableName, columnName);
}
/// <summary>
/// Save code table data
/// </summary>
/// <param name="dataTable"><see cref="System.Data.DataTable"/> containing code table data.</param>
/// <param name="tablename">Name of code table.</param>
/// <param name="columnNames">List of code table column names.</param>
public virtual void SaveCodeTableData(DataTable dataTable, string tablename, string[] columnNames)
{
CodeData.SaveCodeTableData(dataTable, tablename, columnNames);
}
/// <summary>
/// Insert code table data
/// </summary>
/// <param name="dataTable"><see cref="System.Data.DataTable"/> containing code table data.</param>
/// <param name="tablename">Name of code table.</param>
/// <param name="columnNames">List of code table column names.</param>
public virtual void InsertCodeTableData(DataTable dataTable, string tablename, string[] columnNames)
{
CodeData.InsertCodeTableData(dataTable, tablename, columnNames);
}
/// <summary>
/// Obsolete calls to return code table data by code table name.
/// </summary>
/// <param name="codeTableName">Code table name</param>
/// <returns>Code table data.</returns>
[Obsolete("Use of DataTable in this context is no different than the use of a multidimensional System.Object array (not recommended).", false)]
public virtual DataTable GetCodeTableData(string codeTableName)
{
return CodeData.GetCodeTableData(codeTableName);
}
/// <summary>
/// Returns table data
/// </summary>
/// <param name="tableName">Name of data table.</param>
/// <returns>Contents of data table.</returns>
public virtual DataTable GetTableData(string tableName)
{
if (tableName.StartsWith("code", StringComparison.InvariantCultureIgnoreCase)) // it is a code table
{
return CodeData.GetCodeTableData(tableName);
}
else // It is a data table
{
return CollectedData.GetTableData(tableName);
}
}
/// <summary>
/// Returns table data
/// </summary>
/// <param name="tableName">Name of data table.</param>
/// <param name="columnNames">List of column names in data table.</param>
/// <returns>Contents of data table.</returns>
public virtual DataTable GetTableData(string tableName, string columnNames)
{
if (tableName.StartsWith("code", StringComparison.InvariantCultureIgnoreCase)) // it is a code table
{
return CodeData.GetCodeTableData(tableName, columnNames);
}
else // It is a data table
{
return CollectedData.GetTableData(tableName, columnNames);
}
}
/// <summary>
/// Returns table data
/// </summary>
/// <param name="tableName">Name of data table.</param>
/// <param name="columnNames">Name of column in data table.</param>
/// <param name="sortCriteria"></param>
/// <returns>Contents of data table.</returns>
public virtual DataTable GetTableData(string tableName, string columnNames, string sortCriteria)
{
if (tableName.StartsWith("code", StringComparison.InvariantCultureIgnoreCase)) // it is a code table
{
return CodeData.GetCodeTableData(tableName, columnNames, sortCriteria);
}
else // It is a data table
{
return CollectedData.GetTableData(tableName, columnNames, sortCriteria);
}
}
/// <summary>
/// Creates a link to a table from this project
/// </summary>
/// <param name="linkName">Name of link to make.</param>
/// <param name="tableName">Name of table to link.</param>
/// <param name="connectionString">Remote table connection information.</param>
public virtual void CreateLinkTable(string linkName, string tableName, string connectionString)
{
// ??? Add the link to the project's XML file
}
/// <summary>
/// Deletes link table from the project.
/// </summary>
/// <param name="linkName">Name of link to delete.</param>
public void DeleteLinkTable(string linkName)
{
// ??? Delete the link from the XML file
}
/// <summary>
/// Compares this project against the other and determines if they are same.
/// </summary>
/// <param name="other">Epi.Project to compare.</param>
/// <returns>True/False</returns>
public virtual bool Equals(Project other)
{
return (this.Id == other.Id);
}
/// <summary>
/// Retrieves a list of all code tables.
/// </summary>
/// <returns>List of all code tables</returns>
public DataSets.TableSchema.TablesDataTable GetCodeTableList()
{
return CodeData.GetCodeTableList();
}
/// <summary>
/// Returns a list of CodeTableNames
/// </summary>
/// <returns>List of code table names.</returns>
public List<String> GetCodeTableNames()
{
DataTable dt = CodeData.GetCodeTableList();
List<string> list = new List<string>();
foreach (DataRow row in dt.Rows)
{
if (dt.Columns.Contains(ColumnNames.TABLE_NAME))
{
list.Add(row[ColumnNames.TABLE_NAME].ToString());
}
else
{
list.Add(row[ColumnNames.NAME].ToString());
}
}
return list;
}
/// <summary>
/// Load Views
/// </summary>
public virtual void LoadViews()
{
if (MetadataSource == MetadataSource.Unknown)
{
throw new GeneralException(SharedStrings.ERROR_LOADING_METADATA_UNKNOWN_SOURCE);
}
if (MetadataSource != MetadataSource.Xml)
{
views = Metadata.GetViews();
}
else
{
XmlNode viewsNode = GetViewsNode();
views = Metadata.GetViews(currentViewElement, viewsNode);
}
}
#region OBSOLETE
// public void CopyCodeTablesTo(Project destination)
// {
// List<String> codeTableList = GetCodeTableNames();
//// DataTable codeTableList = GetCodeTableList();
//// foreach (DataRow codeTableRow in codeTableList.Rows)
// foreach (string codeTableName in codeTableList)
// {
//// string codeTableName = codeTableRow["TABLE_NAME"].ToString();
// // Raise event indicating the copy has begun.
// if (TableCopyBeginEvent != null)
// {
// TableCopyBeginEvent(this, new MessageEventArgs(codeTableName));
// }
// DataTable columns = CodeData.GetCodeTableColumnSchema(codeTableName);
// string[] columnNames = new string[columns.Rows.Count];
// for (int x = 0; x < columns.Rows.Count; x++)
// {
// columnNames[x] = columns.Rows[x]["COLUMN_NAME"].ToString();
// }
// destination.CreateCodeTable(codeTableName, columnNames);
// DataTable CodeTable = CodeData.GetCodeTableData(codeTableName);
// int rowIndex = 0;
// foreach (DataRow CodeRow in CodeTable.Rows)
// {
// rowIndex++;
// string[] columnData = new string[columnNames.Length];
// for (int x = 0; x < columnNames.Length; x++)
// {
// columnData[x] = CodeRow[columnNames[x]].ToString();
// }
// destination.Metadata.CreateCodeTableRecord(codeTableName, columnNames, columnData);
// RaiseEventTableCopyStatus(codeTableName, rowIndex);
// // RaiseEventImportStatus(codeTableName + " (" + rowIndex + " reocrds copied)");
// }
// if (this.TableCopyEndEvent != null)
// {
// TableCopyEndEvent(this, new MessageEventArgs(codeTableName));
// }
// }
// }
///// <summary>
///// Creates project's relevant databases
///// </summary>
//public void Initialize()
//{
// //this is a hack to ensure that relative file paths are read
// //correctly
// string oldCurrentDirectory = Directory.GetCurrentDirectory();
// string tempCurrentDirectory = this.Location;
// Directory.SetCurrentDirectory(tempCurrentDirectory);
// //if (!this.metadataSource.Equals(MetadataSource.Xml))
// //{
// if (metadata is MetadataDbProvider)
// {
// if (!string.IsNullOrEmpty(this.MetadataDriver))
// {
// ((MetadataDbProvider)metadata).Initialize(this.MetaDbInfo, this.MetadataDriver, true);
// }
// }
// //}
// //else
// //{
// //}
// if (!UseMetadataDbForCollectedData)
// {
// collectedData.Initialize(this.collectedDataDbInfo, this.CollectedDataDriver, true);
// }
// else
// {
// collectedData.Initialize(this.metaDbInfo, this.MetadataDriver, false);
// }
// Directory.SetCurrentDirectory(oldCurrentDirectory);
//}
// public void SetMetadataDbInfo(string ConnectionString, string driver);
#endregion OBSOLETE
#endregion Public Methods
#region Protected properties
/// <summary>
/// Project file extension.
/// </summary>
protected virtual string FileExtension
{
get
{
return Epi.FileExtensions.EPI_PROJ;
}
}
#endregion Protected properties
#region Protected Methods
#endregion Protected Methods
#region Private properties
private bool IsNew
{
get
{
return (id.Equals(Guid.Empty));
}
}
#endregion Private properties
#region Private Methods
#region Deprecated
//private void FillXmlDoc()
//{
// XmlNode root = xmlDoc.DocumentElement;
// if (IsNew) // This is a newly created project.
// {
// ApplicationIdentity appId = new ApplicationIdentity(typeof(Configuration).Assembly);
// // id = Guid.NewGuid();
// id = Util.GetFileGuid(FilePath);
// EpiVersion = appId.Version;
// createDate = System.DateTime.Now;
// // If Metadata Db is used for Collected data, remove the collected data Db node.
// if (UseMetadataDbForCollectedData)
// {
// XmlNode collectedDataNode = root.SelectSingleNode("/Project/CollectedData");
// if (collectedDataNode != null)
// {
// root.RemoveChild(collectedDataNode);
// }
// }
// }
// root.Attributes["id"].Value = Id.ToString();
// root.Attributes["name"].Value = Name;
// root.Attributes["location"].Value = Location;
// root.Attributes["useMetadataDbForCollectedData"].Value = useMetadataDbForCollectedData.ToString();
// //root.Attributes["databaseFormat"].Value = ((short)DbFormatType).ToString();
// root.Attributes["description"].Value = Description ;
// root.Attributes["epiVersion"].Value = EpiVersion ;
// root.Attributes["createDate"].Value = CreateDate.ToString(CultureInfo.InvariantCulture.DateTimeFormat);
// XmlNode metadataDbNode = root.SelectSingleNode("/Project/Metadata/Database");
// Metadata.Db.FillXmlDoc(metadataDbNode);
// if (UseMetadataDbForCollectedData == false)
// {
// XmlNode collectedDataDbNode = root.SelectSingleNode("/Project/CollectedData/Database");
// CollectedData.FillXmlDoc(collectedDataDbNode);
// }
//}
//private void LoadFromXml(XmlNode rootNode)
//{
// //id = new Guid(rootNode.Attributes["id"].Value);
// Name = rootNode.Attributes["name"].Value;
// Location = rootNode.Attributes["location"].Value;
// epiVersion = rootNode.Attributes["epiVersion"].Value;
// string createDateString = rootNode.Attributes["createDate"].Value;
// createDate = DateTime.Parse(createDateString, CultureInfo.InvariantCulture.DateTimeFormat);
// //DbFormatType = (DbFormatType)(short.Parse(rootNode.Attributes["databaseFormat"].Value));
// UseMetadataDbForCollectedData = bool.Parse(rootNode.Attributes["useMetadataDbForCollectedData"].Value);
// Description = rootNode.Attributes["description"].Value;
// XmlNode dbNode = rootNode.SelectSingleNode("/Project/Metadata/Database");
// string dataDriver = dbNode.Attributes["dataDriver"].Value;
// string connString = dbNode.Attributes["connectionString"].Value;
// ConnectionStringInfo connInfo = new ConnectionStringInfo(connString);
// string fileName = connInfo.DataSource;
// // Metadata.Db = DbProvider.GetFileDatabase(fileName);
// Metadata.Db = DbProvider.GetDatabaseInstance(dataDriver);
// Metadata.Db.ConnectionString = connString;
// if (this.UseMetadataDbForCollectedData == false)
// {
// dbNode = rootNode.SelectSingleNode("/Project/CollectedData/Database");
// dataDriver = dbNode.Attributes["dataDriver"].Value;
// connString = dbNode.Attributes["connectionString"].Value;
// // connInfo = new ConnectionStringInfo(connString);
// // fileName = connInfo.DataSource;
// CollectedData = DbProvider.GetDatabaseInstance(dataDriver);
// CollectedData.ConnectionString = connString;
// }
//}
#endregion
/// <summary>
/// Get the metadata node
/// </summary>
/// <returns>Xml node</returns>
public XmlNode GetMetadataNode()
{
XmlNode metadataNode = xmlDoc.DocumentElement.SelectSingleNode("/Project/Metadata");
return metadataNode;
}
private XmlNode GetViewsNode()
{
// return xmlDoc.DocumentElement.SelectSingleNode("/Project/Views");
return xmlDoc.DocumentElement.SelectSingleNode("/Project/Metadata/Views");
}
private XmlNode GetFieldsNode()
{
return xmlDoc.DocumentElement.SelectSingleNode("/Project/Metadata/Views/View/Fields");
}
/// <summary>
/// Validates the XML doc read. Looks for schema differences and rejects if the schema is out of date.
/// </summary>
private void ValidateXmlDoc()
{
// Check schema version. If the schema is old, can't read the project.
if (xmlDoc.DocumentElement.HasAttribute("schemaVersion"))
{
int schemaVersion = int.Parse(xmlDoc.DocumentElement.Attributes["schemaVersion"].Value.ToString());
if (schemaVersion < currentSchemaVersion)
{
throw new GeneralException(SharedStrings.PROJECT_SCHEMA_OUT_OF_DATE);
}
}
else
{
throw new GeneralException(SharedStrings.PROJECT_SCHEMA_OUT_OF_DATE);
}
}
/// <summary>
/// Gets the Pages Node of the Project file
/// </summary>
/// <returns></returns>
private XmlNode GetPagesNode()
{
return xmlDoc.DocumentElement.SelectSingleNode("/Project/Views/View/Pages");
}
private String SetOleDbDatabaseFilePath(string pConnectionString)
{
System.Data.OleDb.OleDbConnectionStringBuilder connectionBuilder = new System.Data.OleDb.OleDbConnectionStringBuilder(pConnectionString);
connectionBuilder.DataSource = this.FilePath.Replace(".prj", ".mdb");
return connectionBuilder.ToString();
}
#endregion Private Methods
}
}
| apache-2.0 |
xtwxy/actor-editor | plugins/com.wincom.actor.editor.flow/src/com/wincom/actor/editor/flow/FlowImages.java | 1111 | /*******************************************************************************
* Copyright (c) 2003, 2010 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package com.wincom.actor.editor.flow;
import java.io.IOException;
import java.io.InputStream;
import org.eclipse.swt.graphics.Image;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author hudsonr
*/
public class FlowImages {
private static Logger log = LoggerFactory.getLogger(FlowImages.class);
public static final Image GEAR;
static {
log.info("check");
InputStream stream = FlowPlugin.class
.getResourceAsStream("images/gear.gif");
GEAR = new Image(null, stream);
try {
stream.close();
} catch (IOException ioe) {
}
}
}
| apache-2.0 |
xiaonanln/myleetcode-python | src/Minimum Path Sum.py | 1360 | class Solution:
# @param grid, a list of lists of integers
# @return an integer
def minPathSum(self, grid):
if not grid: return 0
if not grid[0]: return 0
ROWS = len(grid)
COLS = len(grid[0])
DP = []
for row in grid:
DP.append( [None] * len(row) )
DP[ROWS-1][COLS-1] = grid[ROWS-1][COLS-1]
def compute(row, col):
v = grid[row][col]
down, right = None, None
if row < ROWS-1:
down = DP[row + 1][col] + v
if col < COLS-1:
right = DP[row][col+1] + v
ms = None
if down is None: ms = right
elif right is None: ms = down
else: ms = min(down, right)
DP[row][col] = ms
for col in xrange(COLS-2, -1, -1):
compute(ROWS-1, col)
for row in xrange(ROWS-2, -1, -1):
for col in xrange(COLS-1, -1, -1):
compute(row, col)
return DP[0][0]
print Solution().minPathSum([
])
print Solution().minPathSum([
[1],
])
print Solution().minPathSum([
[1, 2, 3],
[3, 2, 1],
[0, 0, 0],
]) | apache-2.0 |
ConsecroMUD/ConsecroMUD | com/suscipio_solutions/consecro_mud/Abilities/Thief/Thief_SneakAttack.java | 2660 | package com.suscipio_solutions.consecro_mud.Abilities.Thief;
import com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability;
import com.suscipio_solutions.consecro_mud.Common.interfaces.CMMsg;
import com.suscipio_solutions.consecro_mud.Common.interfaces.PhyStats;
import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB;
import com.suscipio_solutions.consecro_mud.core.CMLib;
import com.suscipio_solutions.consecro_mud.core.interfaces.Environmental;
import com.suscipio_solutions.consecro_mud.core.interfaces.Physical;
import com.suscipio_solutions.consecro_mud.core.interfaces.Tickable;
public class Thief_SneakAttack extends ThiefSkill
{
@Override public String ID() { return "Thief_SneakAttack"; }
private final static String localizedName = CMLib.lang().L("Sneak Attack");
@Override public String name() { return localizedName; }
@Override public String displayText(){return "";}
@Override public int abstractQuality(){return Ability.QUALITY_MALICIOUS;}
@Override public int classificationCode(){return Ability.ACODE_THIEF_SKILL|Ability.DOMAIN_DIRTYFIGHTING;}
@Override protected int canAffectCode(){return Ability.CAN_MOBS;}
@Override protected int canTargetCode(){return 0;}
@Override public boolean isAutoInvoked(){return true;}
@Override public boolean canBeUninvoked(){return false;}
protected boolean activated=false;
protected boolean oncePerRound=false;
@Override
public void affectPhyStats(Physical affected, PhyStats affectableStats)
{
super.affectPhyStats(affected,affectableStats);
if(activated)
{
final double prof=(proficiency())/100.0;
final double xlvl=super.getXLEVELLevel(invoker());
affectableStats.setDamage(affectableStats.damage()+(int)Math.round((((affectableStats.damage())/4.0)+xlvl)*prof));
affectableStats.setAttackAdjustment(affectableStats.attackAdjustment()+(int)Math.round((50.0+(10.0*xlvl))*prof));
}
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(!super.okMessage(myHost,msg)) return false;
if((affected==null)||((!(affected instanceof MOB)))) return true;
if(activated
&&(!oncePerRound)
&&msg.amISource((MOB)affected)
&&(msg.targetMinor()==CMMsg.TYP_DAMAGE))
{
oncePerRound=true;
helpProficiency((MOB)affected, 0);
}
return true;
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if(CMLib.flags().isHidden(affected))
{
if(!activated)
{
activated=true;
affected.recoverPhyStats();
}
}
else
if(activated)
{
activated=false;
affected.recoverPhyStats();
}
if(oncePerRound) oncePerRound=false;
return super.tick(ticking,tickID);
}
}
| apache-2.0 |
nextsmsversion/macchina.io | platform/JS/V8/v8-3.28.4/src/arm64/decoder-arm64.cc | 2556 | // Copyright 2013 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "src/v8.h"
#if V8_TARGET_ARCH_ARM64
#include "src/arm64/decoder-arm64.h"
#include "src/globals.h"
#include "src/utils.h"
namespace v8 {
namespace internal {
void DispatchingDecoderVisitor::AppendVisitor(DecoderVisitor* new_visitor) {
visitors_.remove(new_visitor);
visitors_.push_front(new_visitor);
}
void DispatchingDecoderVisitor::PrependVisitor(DecoderVisitor* new_visitor) {
visitors_.remove(new_visitor);
visitors_.push_back(new_visitor);
}
void DispatchingDecoderVisitor::InsertVisitorBefore(
DecoderVisitor* new_visitor, DecoderVisitor* registered_visitor) {
visitors_.remove(new_visitor);
std::list<DecoderVisitor*>::iterator it;
for (it = visitors_.begin(); it != visitors_.end(); it++) {
if (*it == registered_visitor) {
visitors_.insert(it, new_visitor);
return;
}
}
// We reached the end of the list. The last element must be
// registered_visitor.
ASSERT(*it == registered_visitor);
visitors_.insert(it, new_visitor);
}
void DispatchingDecoderVisitor::InsertVisitorAfter(
DecoderVisitor* new_visitor, DecoderVisitor* registered_visitor) {
visitors_.remove(new_visitor);
std::list<DecoderVisitor*>::iterator it;
for (it = visitors_.begin(); it != visitors_.end(); it++) {
if (*it == registered_visitor) {
it++;
visitors_.insert(it, new_visitor);
return;
}
}
// We reached the end of the list. The last element must be
// registered_visitor.
ASSERT(*it == registered_visitor);
visitors_.push_back(new_visitor);
}
void DispatchingDecoderVisitor::RemoveVisitor(DecoderVisitor* visitor) {
visitors_.remove(visitor);
}
#define DEFINE_VISITOR_CALLERS(A) \
void DispatchingDecoderVisitor::Visit##A(Instruction* instr) { \
if (!(instr->Mask(A##FMask) == A##Fixed)) { \
ASSERT(instr->Mask(A##FMask) == A##Fixed); \
} \
std::list<DecoderVisitor*>::iterator it; \
for (it = visitors_.begin(); it != visitors_.end(); it++) { \
(*it)->Visit##A(instr); \
} \
}
VISITOR_LIST(DEFINE_VISITOR_CALLERS)
#undef DEFINE_VISITOR_CALLERS
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_ARM64
| apache-2.0 |
lunisolar/magma | magma-asserts/src/test/java/eu/lunisolar/magma/asserts/func/supplier/LCharSupplierAssertTest.java | 4366 | /*
* This file is part of "lunisolar-magma".
*
* (C) Copyright 2014-2019 Lunisolar (http://lunisolar.eu/).
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.lunisolar.magma.asserts.func.supplier;
import eu.lunisolar.magma.func.*; // NOSONAR
import javax.annotation.Nonnull; // NOSONAR
import javax.annotation.Nullable; // NOSONAR
import java.util.Objects;// NOSONAR
import eu.lunisolar.magma.basics.meta.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.type.*; // NOSONAR
import eu.lunisolar.magma.basics.meta.functional.domain.*; // NOSONAR
import eu.lunisolar.magma.asserts.DefaultAttests;
import eu.lunisolar.magma.func.supplier.LCharSupplier;
import org.assertj.core.api.Assertions; //NOSONAR
import org.assertj.core.api.ObjectAssert;//NOSONAR
import org.testng.annotations.*; //NOSONAR
import java.util.regex.Pattern; //NOSONAR
import java.text.ParseException; //NOSONAR
import eu.lunisolar.magma.basics.exceptions.*; //NOSONAR
import java.util.concurrent.atomic.*; //NOSONAR
import static org.assertj.core.api.Assertions.*; //NOSONAR
import java.util.function.*; //NOSONAR
@SuppressWarnings("ALL")
public class LCharSupplierAssertTest {
private char testValue = '\u0100';
@SuppressWarnings("unchecked") public static final DefaultAttests<ObjectAssert> A = new DefaultAttests() {
};
private LCharSupplier function = () ->
testValue;
private LCharSupplier functionThrowing = () -> {
throw new UnsupportedOperationException();
};
@Test
public void testAssertPositive() throws ParseException {
A.attestCharSup(function)
.doesGetAsChar()
.to(a -> a.isEqualTo(testValue));
}
@Test(expectedExceptions = AssertionError.class)
public void testAssertNegative() throws ParseException {
A.attestCharSup(function)
.doesGetAsChar()
.to( a -> a.isEqualTo(2));
}
@Test(expectedExceptions = AssertionError.class, expectedExceptionsMessageRegExp = "Case .* should evaluate without problem.")
public void testAssertThrowsUnexpected() throws ParseException {
A.attestCharSup(functionThrowing)
.doesGetAsChar()
.to( a -> a.isEqualTo(1));
}
@Test
public void testAssertThrowsExpected() throws ParseException {
A.attestCharSup(functionThrowing)
.doesGetAsChar().withException(a -> a
.isExactlyInstanceOf(UnsupportedOperationException.class)
.hasMessage(null));
}
@Test
public void testRecurringAssertsPositive() throws ParseException {
final AtomicInteger recurringAssertsCalls = new AtomicInteger(0);
A.attestCharSup(function)
.inAllFollowingCases(a-> {
recurringAssertsCalls.incrementAndGet();
a.isEqualTo(testValue);
})
.doesGetAsChar()
.to(a -> a.isEqualTo(testValue))
.doesGetAsChar()
.to(a -> a.isEqualTo(testValue));
assertThat(recurringAssertsCalls.get()).isEqualTo(2);
}
@Test(expectedExceptions = AssertionError.class, expectedExceptionsMessageRegExp = "(?s).*Recurring assertion failed.*")
public void testRecurringAssertsNegative() throws ParseException {
final AtomicInteger recurringAssertsCalls = new AtomicInteger(0);
A.attestCharSup(function)
.inAllFollowingCases(a-> {
int i = recurringAssertsCalls.incrementAndGet();
if (i>1) {
a.isEqualTo(0);
}
})
.doesGetAsChar()
.to(a -> a.isEqualTo(testValue))
.doesGetAsChar()
.to(a -> a.isEqualTo(testValue));
assertThat(recurringAssertsCalls.get()).isEqualTo(2);
}
}
| apache-2.0 |
fungku/netsuite-php | src/Classes/SupportCaseStatusStage.php | 927 | <?php
/**
* This file is part of the SevenShores/NetSuite library
* AND originally from the NetSuite PHP Toolkit.
*
* New content:
* @package ryanwinchester/netsuite-php
* @copyright Copyright (c) Ryan Winchester
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache-2.0
* @link https://github.com/ryanwinchester/netsuite-php
*
* Original content:
* @copyright Copyright (c) NetSuite Inc.
* @license https://raw.githubusercontent.com/ryanwinchester/netsuite-php/master/original/NetSuite%20Application%20Developer%20License%20Agreement.txt
* @link http://www.netsuite.com/portal/developers/resources/suitetalk-sample-applications.shtml
*
* generated: 2019-06-12 10:27:00 AM PDT
*/
namespace NetSuite\Classes;
class SupportCaseStatusStage {
static $paramtypesmap = array(
);
const _closed = "_closed";
const _escalated = "_escalated";
const _open = "_open";
}
| apache-2.0 |
fiskinator/WAB2.0_JBox_MutualAid | widgets/Search/nls/zh-cn/strings.js | 219 | define(
({
_widgetLabel: "搜索",
searchResult: "搜索结果",
showAllResults: "显示搜索结果 ",
showAll: "显示搜索结果",
more: "更多",
untitled: "无标题"
})
); | apache-2.0 |
amitdhiman000/MyOffers | backup/CRUDModel.py | 1951 | from django.db import models
import logging
class CRUDQueryset(models.query.QuerySet):
def withurl(self):
print('This is called from custom query set')
for item in self:
item.url = item.url()
print(item.url)
return self
class CRUDManager(models.Manager):
def get_queryset(self):
return CRUDQueryset(self.model, using=self._db)
def withurl(self):
return self.get_queryset().withurl()
class CRUDModel(models.Model):
id = models.BigAutoField(primary_key=True)
objects = CRUDManager()
'''
def __init__(self, *args, **kwargs):
print('Called init')
super().__init__(self, *args, **kwargs)
self.url = self.url()
'''
class Meta:
abstract = True
def url(self):
## class name to lower
class_name = self.__class__.__name__.lower()
## remove ".models" from module name
module_name = self.__class__.__module__[:-7]
if module_name == class_name:
return "/{0}/{1}/".format(class_name, self.id)
#return '/locus/' + (self.id)+ '/'
else:
return "/{0}/{1}/{2}/".format(module_name, class_name, self.id)
#return '/locus/address/'+ str(self.id) + '/'
@classmethod
def create(klass, values):
try:
obj = klass.objects.get_or_create(**values)[0]
return obj
except Exception as ex:
logging.error(ex)
return None
@classmethod
def update(klass, values):
try:
id_ = values['id']
obj = klass.objects.filter(id=id_).update(**values)
return obj
except Exception as ex:
logging.error(ex)
return None
@classmethod
def remove(klass, values):
try:
obj = klass.objects.filter(**values)
if obj.exists():
obj.delete()
return True
except Exception as ex:
logging.error(ex)
return False
@classmethod
def fetch(klass, filters, start=0, count=0):
try:
if count > 0:
return klass.objects.filter(**filters)[:start:(start+count)]
else:
return klass.objects.filter(**filters)
except Exception as ex:
logging.error(ex)
return None
| apache-2.0 |
socrata-platform/spandex | spandex-integration-tests/src/test/scala/com/socrata/spandex/secondary/CopyDropHandlerSpec.scala | 2085 | package com.socrata.spandex.secondary
import org.scalatest.prop.PropertyChecks
import org.scalatest.{FunSuiteLike, Matchers}
import com.socrata.datacoordinator.secondary.LifecycleStage
import com.socrata.spandex.common.SpandexIntegrationTest
import com.socrata.spandex.common.client._
class CopyDropHandlerSpec extends FunSuiteLike
with Matchers
with PropertyChecks
with SpandexIntegrationTest {
override def copies(dataset: String): Seq[DatasetCopy] = {
val snapshot = DatasetCopy(dataset, 1, 5, LifecycleStage.Snapshotted) // scalastyle:ignore magic.number
val published = DatasetCopy(dataset, 2, 10, LifecycleStage.Published) // scalastyle:ignore magic.number
val workingCopy = DatasetCopy(dataset, 3, 15, LifecycleStage.Unpublished) // scalastyle:ignore magic.number
val published2 = DatasetCopy(dataset, 4, 20, LifecycleStage.Published) // scalastyle:ignore magic.number
Seq(snapshot, published, workingCopy, published2).sortBy(_.copyNumber)
}
// Make batches teensy weensy to expose any batching issues
val handler = new VersionEventsHandler(client, 2)
test("drop unpublished copies") {
val datasetName = datasets(0)
val copiesBefore = List(Snapshotted, Unpublished, Discarded).flatMap { stage =>
client.datasetCopiesByStage(datasetName, stage)
}
copiesBefore should not be empty
// set last published copy to snapshotted, simulating behavior of PublishHandler
val lastPublished = client.datasetCopy(datasetName, 2).map(_.copy(stage=LifecycleStage.Snapshotted)).get
client.updateDatasetCopyVersion(lastPublished, refresh = Immediately)
client.refresh()
new CopyDropHandler(client).dropUnpublishedCopies(datasetName)
client.refresh()
val copiesAfter = List(Snapshotted, Unpublished, Discarded).flatMap { stage =>
client.datasetCopiesByStage(datasetName, stage)
}
copiesAfter shouldBe empty
val publishedCopy = client.datasetCopyLatest(datasetName, Some(Published))
publishedCopy should be(Some(DatasetCopy(datasetName, 4, 20, LifecycleStage.Published)))
}
}
| apache-2.0 |
degauhta/dgagarsky | chapter_009/lesson_3/src/test/java/ru/dega/package-info.java | 88 | /**
* package-info class.
*
* @author Denis
* @since 09.08.2017
*/
package ru.dega; | apache-2.0 |
andresvie/rootlang | parser/parser_test.go | 22560 | package parser
import (
"testing"
"rootlang/lexer"
"rootlang/ast"
"strconv"
)
func TestLetStatementLiteralValue(t *testing.T) {
input := `
let x = 5;
let y = 10;
let foobar = 838383;
`
l := lexer.New(input)
p := New(l)
statementsExpected := []ast.LetStatement{createLiteralLetStatement("x", "5"), createLiteralLetStatement("y", "10"), createLiteralLetStatement("foobar", "838383")}
program := p.ParseProgram()
if len(program.Statements) != 3 {
t.Fatal("program should has 3 statements")
return
}
for i := 0; i < len(program.Statements); i++ {
var let *ast.LetStatement = program.Statements[i].(*ast.LetStatement)
if !assertLiteralLetStatement(let, &statementsExpected[i]) {
t.Errorf("literatel let statement %s should be equal to %s", statementsExpected[i].TokenLiteral(), program.Statements[i].TokenLiteral())
}
}
}
func TestStringExpression(t *testing.T) {
input := `"carlos viera"`
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != 1 {
t.Error("should have 1 statement")
showParserErrors(p, t)
showPrefixParserError(p, t)
return
}
expression, ok := program.Statements[0].(*ast.ExpressionStatement)
if !ok {
t.Error("Expression Statements is expected")
return
}
stringExpression, okStringExpression := expression.Exp.(*ast.StringExpression)
if !okStringExpression {
t.Error("String Expression is expected")
return
}
if stringExpression.Value != "carlos viera" {
t.Error(`"carlos viera" is expected`)
return
}
}
func TestIntegerExpression(t *testing.T) {
input := `5`
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != 1 {
t.Error("should statements 1")
return
}
expression, ok := program.Statements[0].(*ast.ExpressionStatement)
if !ok {
t.Error("Expression Statements is expected")
return
}
integerExpression, okIntegerExpression := expression.Exp.(*ast.IntegerLiteral)
if !okIntegerExpression {
t.Error("Integer expression is expected")
return
}
if integerExpression.Value != 5 {
t.Error("5 values is expected")
return
}
}
func TestBooleanExpression(t *testing.T) {
input := `false`
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != 1 {
t.Error("should statements 1")
return
}
expression, ok := program.Statements[0].(*ast.ExpressionStatement)
if !ok {
t.Error("Expression Statements is expected")
return
}
booleanExpression, okBooleanExpression := expression.Exp.(*ast.BoolExpression)
if !okBooleanExpression {
t.Error("Boolean expression is expected")
return
}
if booleanExpression.Value != "false" {
t.Error("false values is expected")
return
}
}
func TestGroupedExpression(t *testing.T) {
input := `
let x = (a + b) * c;
let y = a + b + (a * b);
`
expectedStatements := []string{
"let x = ((a + b) * c);", "let y = ((a + b) + (a * b));"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
for index, statement := range program.Statements {
if expectedStatements[index] != statement.String() {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("statement expected is %s and got %s", expectedStatements[index], statement.String())
}
}
}
func TestBoolExpression(t *testing.T) {
input := `
let x = false;
let y = true;
return false == true;
return false;
return true;
`
expectedStatements := []string{
"let x = false;", "let y = true;", "return (false == true);", "return false;", "return true;"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
for index, statement := range program.Statements {
if expectedStatements[index] != statement.String() {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("statement expected is %s and got %s", expectedStatements[index], statement.String())
}
}
}
func TestInfixExpression(t *testing.T) {
/*
,
*/
input := `
let x = a + b;
let y = a * b + c;
let foobar = a + c * b;
return a + b / c;
return -a + b - c;
return a > b;
return a < b;
return a == b;
return a != b;
return net::listen();
`
expectedStatements := []string{"let x = (a + b);", "let y = ((a * b) + c);", "let foobar = (a + (c * b));",
"return (a + (b / c));",
"return ((-(a) + b) - c);", "return (a > b);", "return (a < b);",
"return (a == b);", "return (a != b);",
"return (net :: listen());"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
for index, statement := range program.Statements {
if expectedStatements[index] != statement.String() {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("statement expected is %s and got %s", expectedStatements[index], statement.String())
}
}
}
func TestIfExpression(t *testing.T) {
input := `
if (x > y)
{
return y;
}
`
expectedStatements := []string{"if((x > y)){return y;}"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
expressionStatement, ok := program.Statements[0].(*ast.ExpressionStatement)
if !ok {
t.Error("Expression Statement is expected")
showParserErrors(p, t)
showPrefixParserError(p, t)
return
}
ifExpression, ok := expressionStatement.Exp.(*ast.IfExpression)
if !ok {
t.Error("if Expression is expected")
showParserErrors(p, t)
showPrefixParserError(p, t)
return
}
if (ifExpression.AlternativeBlock != nil) {
t.Error("else expression is not expected")
showParserErrors(p, t)
showPrefixParserError(p, t)
return
}
if (ifExpression.String() != expectedStatements[0]) {
t.Errorf("if expression expected %s and got %s", expectedStatements[0], ifExpression.String())
showParserErrors(p, t)
showPrefixParserError(p, t)
return
}
}
func TestIfExpressionWithElse(t *testing.T) {
input := `
if (x > y)
{
return y;
}else
{
return x;
}
`
expectedStatements := []string{"if((x > y)){return y;}else{return x;}"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
expressionStatement, ok := program.Statements[0].(*ast.ExpressionStatement)
if !ok {
t.Error("Expression Statement is expected")
showParserErrors(p, t)
showPrefixParserError(p, t)
return
}
ifExpression, ok := expressionStatement.Exp.(*ast.IfExpression)
if !ok {
t.Error("if Expression is expected")
showParserErrors(p, t)
showPrefixParserError(p, t)
return
}
if (ifExpression.AlternativeBlock == nil) {
t.Error("else expression is expected")
showParserErrors(p, t)
showPrefixParserError(p, t)
return
}
if (ifExpression.String() != expectedStatements[0]) {
t.Errorf("if expression expected %s and got %s", expectedStatements[0], ifExpression.String())
showParserErrors(p, t)
showPrefixParserError(p, t)
return
}
}
func TestCallFunctionExpressionWithoutArguments(t *testing.T) {
input := `add();`
expectedStatements := []string{"add()"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
expressionStatement, okExpression := program.Statements[0].(*ast.ExpressionStatement)
if !okExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("expression statement is expected")
return
}
callFunctionExpression, okCallFunctionExpression := expressionStatement.Exp.(*ast.CallFunctionExpression)
if !okCallFunctionExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("call function expression is expected")
return
}
args := callFunctionExpression.Arguments
if len(args) != 0 {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("number of argument expected is 0 and got %d", len(args))
return
}
if callFunctionExpression.String() != expectedStatements[0] {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("call function expression should %s and got %s", expectedStatements[0], callFunctionExpression.String())
return
}
}
func TestCallFunctionExpression(t *testing.T) {
input := `add(2, x(2,3))`
expectedStatements := []string{"add(2,x(2,3))"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(p.GetErrors()) != 0{
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Error("not error was expected\n")
return
}
if len(program.Statements) != len(expectedStatements) {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
expressionStatement, okExpression := program.Statements[0].(*ast.ExpressionStatement)
if !okExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("expression statement is expected")
return
}
callFunctionExpression, okCallFunctionExpression := expressionStatement.Exp.(*ast.CallFunctionExpression)
if !okCallFunctionExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("call function expression is expected")
return
}
args := callFunctionExpression.Arguments
if len(args) != 2 {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("number of argument expected is 2 and got %d", len(args))
return
}
if callFunctionExpression.String() != expectedStatements[0] {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("call function expression should %s and got %s", expectedStatements[0], callFunctionExpression.String())
return
}
}
func TestFunctionExpressionWithoutParams(t *testing.T) {
input := ` () =>
{
return 5;
}
`
expectedStatements := []string{"()=>{return 5;}"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
expressionStatement, okExpression := program.Statements[0].(*ast.ExpressionStatement)
if !okExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("expression statement is expected")
return
}
functionExpression, okFunctionExpression := expressionStatement.Exp.(*ast.FunctionExpression)
if !okFunctionExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("function expression is expected")
return
}
if functionExpression.String() != expectedStatements[0] {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("function expression should %s and got %s", expectedStatements[0], functionExpression.String())
return
}
}
func TestFunctionLambdaShorcut(t *testing.T) {
input := ` () => 5;
`
expectedStatements := []string{"()=>{return 5;}"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
expressionStatement, okExpression := program.Statements[0].(*ast.ExpressionStatement)
if !okExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("expression statement is expected")
return
}
functionExpression, okFunctionExpression := expressionStatement.Exp.(*ast.FunctionExpression)
if !okFunctionExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("function expression is expected")
return
}
if functionExpression.String() != expectedStatements[0] {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("function expression should %s and got %s", expectedStatements[0], functionExpression.String())
return
}
}
func TestFunctionExpression(t *testing.T) {
input := ` (x,y) =>
{
return x;
}
`
expectedStatements := []string{"(x,y)=>{return x;}"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
expressionStatement, okExpression := program.Statements[0].(*ast.ExpressionStatement)
if !okExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("expression statement is expected")
return
}
functionExpression, okFunctionExpression := expressionStatement.Exp.(*ast.FunctionExpression)
if !okFunctionExpression {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("function expression is expected")
return
}
if (functionExpression.String() != expectedStatements[0]) {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("function expression should %s and got %s", expectedStatements[0], functionExpression.String())
return
}
}
func TestPrefixExpression(t *testing.T) {
input := `
let x = -5;
let y = !a;
return !a;
return -4;
return -w;
`
expectedStatements := []string{"let x = -(5);", "let y = !(a);", "return !(a);", "return -(4);", "return -(w);"}
l := lexer.New(input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != len(expectedStatements) {
t.Errorf("number of statement expected is %d and got %d", len(expectedStatements), len(program.Statements))
return
}
for index, statement := range program.Statements {
if expectedStatements[index] != statement.String() {
showParserErrors(p, t)
showPrefixParserError(p, t)
t.Errorf("statement expected is %s and got %s", expectedStatements[index], statement.String())
}
}
}
func showParserErrors(p *Parser, t *testing.T) {
for _, errorText := range p.errors {
t.Logf("%s\n", errorText)
}
}
func showPrefixParserError(p *Parser, t *testing.T) {
for _, errorText := range p.prefixErrors {
t.Logf("%s\n", errorText)
}
}
func TestBlockStatement(t *testing.T) {
input := `{
5 + x;
let x = y != true;
return x;
}
`
l := lexer.New(input)
p := New(l)
statementsExpected := []string{"{(5 + x);let x = (y != true);return x;}"}
program := p.ParseProgram()
if len(program.Statements) != len(statementsExpected) {
t.Error("program should has 1 statements")
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
statement, ok := program.Statements[0].(*ast.BlockStatement)
if !ok {
t.Error("Block Statement is Expected")
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
if len(statement.Statements) != 3 {
t.Error("Three statement is expected in this block statement")
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
_, okExpression := statement.Statements[0].(*ast.ExpressionStatement)
if !okExpression {
t.Error("waiting at index 0 Expression Statement")
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
_, okLet := statement.Statements[1].(*ast.LetStatement)
if !okLet {
t.Error("waiting at index 1 Let Statement")
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
_, okRet := statement.Statements[2].(*ast.ReturnStatement)
if !okRet {
t.Error("waiting at index 2 Ret Statement")
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
if statementsExpected[0] != statement.String() {
t.Errorf("Expression expected is %s and got %s", statementsExpected[0], statement.String())
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
}
func TestExpressionStatement(t *testing.T) {
input := `
5 + x;
x != true
`
l := lexer.New(input)
p := New(l)
statementsExpected := []string{"(5 + x);", "(x != true);"}
program := p.ParseProgram()
if len(program.Statements) != len(statementsExpected) {
t.Error("program should has 2 statements")
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
for i := 0; i < len(program.Statements); i++ {
statement, ok := program.Statements[i].(*ast.ExpressionStatement)
if !ok {
t.Error("Expression Statement is Expected")
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
if statementsExpected[i] != statement.String() {
t.Errorf("Expression expected is %s and got %s", statementsExpected[i], statement.String())
showPrefixParserError(p, t)
showParserErrors(p, t)
return
}
}
}
func TestImportStatement(t *testing.T) {
tests := []struct {
input string
importStatement *ast.ImportStatement
}{
{`import "net"`, &ast.ImportStatement{Path: "net", Token: createToken(lexer.IMPORT, "import"), Name: &ast.Identifier{Value: "net"}}},
{`import "tmp/carlos" as test`, &ast.ImportStatement{Path: "tmp/carlos", Token: createToken(lexer.IMPORT, "import"), Name: &ast.Identifier{Value: "test"}}},
{`import "multiprocessing/threads/green"`, &ast.ImportStatement{Path: "multiprocessing/threads/green", Token: createToken(lexer.IMPORT, "import"), Name: &ast.Identifier{Value: "green"}}},
}
for _, test := range tests {
l := lexer.New(test.input)
p := New(l)
program := p.ParseProgram()
if len(program.Statements) != 1 {
t.Fatal("program should has 1 statements")
return
}
importStatement, ok := program.Statements[0].(*ast.ImportStatement)
if !ok {
t.Fatal("expected import statement")
return
}
if importStatement.Path != test.importStatement.Path {
t.Errorf("import expected path %s and got %s", importStatement.Path, test.importStatement.Path)
return
}
if importStatement.Name.Value != test.importStatement.Name.Value {
t.Errorf("import expected name %s and got %s", importStatement.Name.Value, test.importStatement.Name.Value)
return
}
}
}
func TestReturnStatementLiteralValue(t *testing.T) {
input := `
return 5;
return x;
return 838383;
`
l := lexer.New(input)
p := New(l)
statementsExpected := []ast.ReturnStatement{createReturnValue("5"), createReturnIdentifier("x"), createReturnValue("838383")}
program := p.ParseProgram()
if len(program.Statements) != 3 {
t.Fatal("program should has 3 statements")
return
}
for i := 0; i < len(program.Statements); i++ {
var ret *ast.ReturnStatement = program.Statements[i].(*ast.ReturnStatement)
if !assertReturnStatement(ret, &statementsExpected[i]) {
t.Errorf("return literal let statement %s should be equal to %s", statementsExpected[i].TokenLiteral(), program.Statements[i].TokenLiteral())
}
}
}
func assertReturnStatement(ret, retExpected *ast.ReturnStatement) bool {
isEqual := isTokenEqual(ret.Token, retExpected.Token)
return isEqual && assertExpression(ret.Value, retExpected.Value)
}
func assertExpression(exp, expExpected ast.Expression) bool {
if expectedIntExpression(exp, expExpected) {
ex1 := exp.(*ast.IntegerLiteral)
ex2 := expExpected.(*ast.IntegerLiteral)
return ex1.Value == ex2.Value && isTokenEqual(ex1.Token, ex2.Token)
}
if expectedIdentifierExpression(exp, expExpected) {
ex1 := exp.(*ast.Identifier)
ex2 := expExpected.(*ast.Identifier)
return ex1.Value == ex2.Value && isTokenEqual(ex1.Token, ex2.Token)
}
return false
}
func expectedIntExpression(exp, expExpected ast.Expression) bool {
return isIntExpression(exp) && isIntExpression(expExpected)
}
func expectedIdentifierExpression(exp, expExpected ast.Expression) bool {
return isIdentifierExpression(exp) && isIdentifierExpression(expExpected)
}
func isIntExpression(exp ast.Expression) bool {
_, ok := exp.(*ast.IntegerLiteral)
return ok
}
func isIdentifierExpression(exp ast.Expression) bool {
_, ok := exp.(*ast.Identifier)
return ok
}
func createReturnIdentifier(name string) ast.ReturnStatement {
id := &ast.Identifier{Token: createToken(lexer.IDENT, name), Value: name}
return ast.ReturnStatement{Token: createToken(lexer.RETURN, "return"), Value: id}
}
func createReturnValue(value string) ast.ReturnStatement {
val, _ := strconv.ParseInt(value, 10, 0)
id := &ast.IntegerLiteral{Token: createToken(lexer.INT, value), Value: val}
return ast.ReturnStatement{Token: createToken(lexer.RETURN, "return"), Value: id}
}
func assertLiteralLetStatement(l, lExpected *ast.LetStatement) bool {
isEqual := isTokenEqual(l.Token, lExpected.Token)
isEqual = isEqual && isTokenEqual(l.Name.Token, lExpected.Name.Token)
isEqual = isEqual && l.Name.Value == lExpected.Name.Value
isEqual = isEqual && assertIntegerExpression(l.Value, lExpected.Value)
return isEqual
}
func assertIntegerExpression(intExpression, intExpressionExpected ast.Expression) bool {
var value *ast.IntegerLiteral = intExpression.(*ast.IntegerLiteral)
var valueExpected *ast.IntegerLiteral = intExpressionExpected.(*ast.IntegerLiteral)
isEqual := isTokenEqual(value.Token, valueExpected.Token)
return isEqual && value.Value == valueExpected.Value
}
func isTokenEqual(token, tokenExpected lexer.Token) bool {
isEqual := token.Literal == tokenExpected.Literal
return isEqual && token.Type == tokenExpected.Type
}
func createLiteralLetStatement(name, value string) ast.LetStatement {
id := &ast.Identifier{Token: createToken(lexer.IDENT, name), Value: name}
val, _ := strconv.ParseInt(value, 10, 0)
integerLiteral := ast.IntegerLiteral{Token: createToken(lexer.INT, value), Value: val}
return ast.LetStatement{Token: createToken(lexer.LET, "let"), Name: id, Value: &integerLiteral}
}
func createToken(typeToken lexer.TokenType, value string) lexer.Token {
return lexer.Token{Type: typeToken, Literal: value}
}
| apache-2.0 |
osgi/bundles | osgi.logger/src/osgi/logger/provider/AbstractLogger.java | 17125 | package osgi.logger.provider;
import java.io.IOException;
import java.io.PrintWriter;
import java.io.Writer;
import java.lang.reflect.Array;
import java.util.Formatter;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.osgi.framework.Bundle;
import org.osgi.framework.ServiceReference;
import org.osgi.service.log.LogService;
import org.slf4j.Marker;
import osgi.enroute.logger.api.Level;
import osgi.enroute.logger.api.LoggerAdmin.Control;
/**
* This is the base class for the class that must log information. It fully
* implements the slf4j Logger class and maps them to an {@link Entry} record
* which is then queued via the static (yuck) dispatcher.
*/
class AbstractLogger implements org.slf4j.Logger {
/*
* A pattern to discard classes in a stacktrace that we do not want to see
*/
// final static Pattern CUSTOM_CLASSES = Pattern
// .compile("(?!com\\.sun|sun|java\\.|osgi\\.enroute\\.logger\\.provider)(.+\\.)+(.*)");
String name;
Bundle bundle;
volatile boolean info;
volatile boolean trace;
volatile boolean debug;
volatile boolean warn;
volatile boolean error;
volatile boolean exceptions;
volatile boolean where;
boolean init;
boolean registered;
Level level;
AbstractLogger(Bundle bundle, String name) {
this.bundle = bundle;
this.name = name;
reset();
}
AbstractLogger() {
reset();
}
/*
* The init method is checked when a flag is set to true (done in reset()).
* The init method will set all flags correctly based on defaults, or, if
* there is a admin, from the settings in the admin. The init method returns
* true if the given level must be logged. I think this is the fastest
* possible way when you're not using a level. I.e. initially trace is set
* to true. At the first trace, init(TRACE) is called but this in general
* will set the trace flag to false. So the second time only the trace flag
* is checked. In general, extra work is only done when we actually are
* going to log a message.
*/
private synchronized boolean init(Level level) {
if (init)
return true;
init = true;
//
// We need to register once with the LoggerDispatcher
// so the admin can reset us when there are new settings
//
if (!registered) {
LoggerDispatcher.dispatcher.register(this);
registered = true;
}
LoggerAdminImpl admin = LoggerDispatcher.dispatcher.admin;
if (admin != null) {
//
// We have an admin. So we actually get our settins
// from this admin.
//
Control control = admin.getControl(this.name);
debug = control.level.ordinal() <= Level.DEBUG.ordinal();
info = control.level.ordinal() <= Level.INFO.ordinal();
trace = control.level.ordinal() <= Level.TRACE.ordinal();
warn = control.level.ordinal() <= Level.WARN.ordinal();
error = control.level.ordinal() <= Level.ERROR.ordinal();
level = control.level;
} else {
//
// Default Defaults if no admin present. There is a bit of a race
// condition since at this admin could have become active and reset
// us
// However, this is pretty rare and should be corrected on the next
// call
//
debug = false;
info = false;
trace = false;
warn = true;
error = true;
}
return isLevel(level);
}
/*
* Next time this logger is used it will try to get its configuration again
*/
synchronized void reset() {
init = false;
debug = true;
info = true;
trace = true;
warn = true;
error = true;
}
/*
* Check if this level is set
*/
private boolean isLevel(Level level) {
switch (level) {
case DEBUG :
return debug;
case ERROR :
return error;
case INFO :
return info;
case TRACE :
return trace;
case WARN :
return warn;
case R1 :
case R2 :
case R3 :
case AUDIT :
default :
return true;
}
}
/*
* Return the name
*/
@Override
public String getName() {
return name;
}
/*
* The core routine. We've committed to logging so now we have to create a
* logging message.
*/
void message(int level, String format, Object... arguments) {
try {
//
// Log4j is using {} as the %s ... :-(
// so we replace it with the %s
// TODO figure out markers
//
if (format.indexOf('{') >= 0) {
format = format.replaceAll("\\{\\}", "%s");
}
//
// We will log an entry to the queue
//
Entry entry = new Entry();
entry.level = level;
//
// Adjust the arguments since arrays print badly and we can do
// better for some other objects as well.
//
for (int i = 0; i < arguments.length; i++)
if (arguments[i] != null) {
if (entry.reference == null && arguments[i] instanceof ServiceReference< ? >) {
entry.reference = (ServiceReference< ? >) arguments[i];
} else if (entry.exception == null && arguments[i] instanceof Throwable) {
entry.exception = (Throwable) arguments[i];
} else if (!(arguments[i] instanceof String))
arguments[i] = toString(arguments[i]);
}
//
// Add a few more places so that errors in the format would refer to
// non-existent args. Logging should not throw exceptions.
//
Object nargs[] = new Object[arguments.length + 10];
System.arraycopy(arguments, 0, nargs, 0, arguments.length);
final StringBuilder sb = new StringBuilder();
try (Formatter formatter = new Formatter(sb)) {
if (name != null) {
sb.append(name).append(" :: ");
}
if (where) {
where(sb, 4);
}
formatter.format(format, nargs);
if (entry.exception != null && exceptions) {
sb.append("\n");
try (PrintWriter sw = getWriter(sb)) {
entry.exception.printStackTrace(sw);
}
}
}
entry.message = sb.toString();
entry.source = bundle;
//
// We will not block on the queue. So we attempt to put it in the
// queue and if we do not succeed, print it to std err.
//
if (!LoggerDispatcher.dispatcher.queue.offer(entry)) {
System.err.println("Overflowing log queue " + entry);
}
}
catch (Exception e) {
System.err.println("Shamefully have to admit the log service failed :-(" + e);
e.printStackTrace();
}
}
private PrintWriter getWriter(final StringBuilder sb) {
return new PrintWriter(new Writer() {
@Override
public void write(char[] cbuf, int off, int len) throws IOException {
for (int i = 0; i < len; i++)
sb.append(cbuf[i + off]);
}
@Override
public void flush() throws IOException {}
@Override
public void close() throws IOException {}
});
}
/*
* Create a more suitable text presentation for array objects
* @param object
* @return
*/
private String toString(Object object) {
if (object == null)
return "null";
if (object.getClass().isArray()) {
StringBuilder sb = new StringBuilder();
String del = "[";
for (int i = 0; i < Array.getLength(object); i++) {
sb.append(del).append(toString(Array.get(object, i)));
del = ", ";
}
sb.append("]");
return sb.toString();
}
return object.toString();
}
/*
* Get the current location of where the error was reported.
*/
protected void where(StringBuilder sb, int max) {
try {
throw new Exception();
}
catch (Exception e) {
StackTraceElement[] stackTrace = e.getStackTrace();
int n = 0;
for (int i = 2; i < sb.length(); i++) {
Matcher matcher = Pattern.compile(".*").matcher(stackTrace[i].getClassName());
if (matcher.matches()) {
String logMethod = stackTrace[i].getMethodName();
String logClass = matcher.group(2);
int line = stackTrace[i].getLineNumber();
sb.append("[").append(logClass).append(".").append(logMethod);
if (line != 0)
sb.append(":").append(line);
sb.append("] ");
n++;
if (n >= max)
return;
}
}
}
}
/**************************************************************************************************************/
// The rest is the SLF4J dump ... what backward compatibility does to you
// ... :-(
@Override
public void info(String format, Object... arguments) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, format, arguments);
}
@Override
public void debug(String format, Object... arguments) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, format, arguments);
}
@Override
public void error(String format, Object... arguments) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, format, arguments);
}
@Override
public void trace(String format, Object... arguments) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, format, arguments);
}
@Override
public void warn(String format, Object... arguments) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, format, arguments);
}
@Override
public boolean isInfoEnabled() {
return info && init(Level.INFO);
}
@Override
public boolean isDebugEnabled() {
return debug && init(Level.DEBUG);
}
@Override
public boolean isErrorEnabled() {
return error && init(Level.ERROR);
}
@Override
public boolean isTraceEnabled() {
return trace && init(Level.TRACE);
}
@Override
public boolean isWarnEnabled() {
return warn && init(Level.WARN);
}
public void close() {
LoggerDispatcher.dispatcher.unregister(this);
init = registered = info = trace = error = warn = debug = false;
}
@Override
public void debug(String string) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, string);
}
@Override
public void debug(String format, Object arguments) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, format, arguments);
}
@Override
public void debug(String string, Throwable t) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, string, t);
}
@Override
public void debug(Marker marker, String string) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, string, marker);
}
@Override
public void debug(String format, Object a, Object b) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, format, a, b);
}
@Override
public void debug(Marker marker, String format, Object a) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, format, a, marker);
}
@Override
public void debug(Marker marker, String format, Object... args) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, format, args);
}
@Override
public void debug(Marker marker, String format, Throwable t) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, format, t);
}
@Override
public void debug(Marker marker, String format, Object a, Object b) {
if (debug && init(Level.DEBUG))
message(LogService.LOG_DEBUG, format, a, b);
}
@Override
public void error(String string) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, string);
}
@Override
public void error(String format, Object arguments) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, format, arguments);
}
@Override
public void error(String string, Throwable t) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, string, t);
}
@Override
public void error(Marker marker, String string) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, string, marker);
}
@Override
public void error(String format, Object a, Object b) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, format, a, b);
}
@Override
public void error(Marker marker, String format, Object a) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, format, a, marker);
}
@Override
public void error(Marker marker, String format, Object... args) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, format, args);
}
@Override
public void error(Marker marker, String format, Throwable t) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, format, t);
}
@Override
public void error(Marker marker, String format, Object a, Object b) {
if (error && init(Level.ERROR))
message(LogService.LOG_ERROR, format, a, b);
}
@Override
public void info(String string) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, string);
}
@Override
public void info(String format, Object arguments) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, format, arguments);
}
@Override
public void info(String string, Throwable t) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, string, t);
}
@Override
public void info(Marker marker, String string) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, string, marker);
}
@Override
public void info(String format, Object a, Object b) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, format, a, b);
}
@Override
public void info(Marker marker, String format, Object a) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, format, a, marker);
}
@Override
public void info(Marker marker, String format, Object... args) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, format, args);
}
@Override
public void info(Marker marker, String format, Throwable t) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, format, t);
}
@Override
public void info(Marker marker, String format, Object a, Object b) {
if (info && init(Level.INFO))
message(LogService.LOG_INFO, format, a, b);
}
@Override
public boolean isDebugEnabled(Marker arg0) {
return debug && init(Level.DEBUG);
}
@Override
public boolean isErrorEnabled(Marker arg0) {
return error && init(Level.ERROR);
}
@Override
public boolean isInfoEnabled(Marker arg0) {
return info && init(Level.INFO);
}
@Override
public boolean isTraceEnabled(Marker arg0) {
return trace && init(Level.TRACE);
}
@Override
public boolean isWarnEnabled(Marker arg0) {
return warn && init(Level.WARN);
}
@Override
public void warn(String string) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, string);
}
@Override
public void warn(String format, Object arguments) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, format, arguments);
}
@Override
public void warn(String string, Throwable t) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, string, t);
}
@Override
public void warn(Marker marker, String string) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, string, marker);
}
@Override
public void warn(String format, Object a, Object b) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, format, a, b);
}
@Override
public void warn(Marker marker, String format, Object a) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, format, a, marker);
}
@Override
public void warn(Marker marker, String format, Object... args) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, format, args);
}
@Override
public void warn(Marker marker, String format, Throwable t) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, format, t);
}
@Override
public void warn(Marker marker, String format, Object a, Object b) {
if (warn && init(Level.WARN))
message(LogService.LOG_WARNING, format, a, b);
}
@Override
public void trace(String string) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, string);
}
@Override
public void trace(String format, Object arguments) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, format, arguments);
}
@Override
public void trace(String string, Throwable t) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, string, t);
}
@Override
public void trace(Marker marker, String string) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, string, marker);
}
@Override
public void trace(String format, Object a, Object b) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, format, a, b);
}
@Override
public void trace(Marker marker, String format, Object a) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, format, a, marker);
}
@Override
public void trace(Marker marker, String format, Object... args) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, format, args);
}
@Override
public void trace(Marker marker, String format, Throwable t) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, format, t);
}
@Override
public void trace(Marker marker, String format, Object a, Object b) {
if (trace && init(Level.TRACE))
message(LoggerAdminImpl.LOG_TRACE, format, a, b);
}
public void setBundle(Bundle bundle) {
this.bundle = bundle;
if (this.name == null) {
String name = this.bundle.getSymbolicName();
if (name == null)
name = bundle.getBundleId() + "";
if (bundle.getVersion() != null)
name += ";" + bundle.getVersion();
setName(name);
}
}
public void setName(String name) {
this.name = name;
}
}
| apache-2.0 |
MReichenbach/visitmeta | visualization/src/main/java/de/hshannover/f4/trust/visitmeta/network/GraphPool.java | 4531 | /*
* #%L
* =====================================================
* _____ _ ____ _ _ _ _
* |_ _|_ __ _ _ ___| |_ / __ \| | | | ___ | | | |
* | | | '__| | | / __| __|/ / _` | |_| |/ __|| |_| |
* | | | | | |_| \__ \ |_| | (_| | _ |\__ \| _ |
* |_| |_| \__,_|___/\__|\ \__,_|_| |_||___/|_| |_|
* \____/
*
* =====================================================
*
* Hochschule Hannover
* (University of Applied Sciences and Arts, Hannover)
* Faculty IV, Dept. of Computer Science
* Ricklinger Stadtweg 118, 30459 Hannover, Germany
*
* Email: trust@f4-i.fh-hannover.de
* Website: http://trust.f4.hs-hannover.de/
*
* This file is part of visitmeta-visualization, version 0.5.0,
* implemented by the Trust@HsH research group at the Hochschule Hannover.
* %%
* Copyright (C) 2012 - 2015 Trust@HsH
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package de.hshannover.f4.trust.visitmeta.network;
import de.hshannover.f4.trust.visitmeta.datawrapper.ExpandedLink;
import de.hshannover.f4.trust.visitmeta.datawrapper.NodeIdentifier;
import de.hshannover.f4.trust.visitmeta.datawrapper.NodeMetadata;
import de.hshannover.f4.trust.visitmeta.datawrapper.RichMetadata;
import de.hshannover.f4.trust.visitmeta.interfaces.Identifier;
import de.hshannover.f4.trust.visitmeta.interfaces.Link;
/**
* This is a class containing all pools which are necessary for a connection.
*/
public class GraphPool {
private PoolNodeIdentifier mIdentifierPool;
private PoolExpandedLink mLinkPool;
private PoolNodeMetadata mMetadataPool;
/**
* Initializes the pool classes and connects them correctly.
*/
public GraphPool() {
mMetadataPool = new PoolNodeMetadata();
mIdentifierPool = new PoolNodeIdentifier(mMetadataPool);
mLinkPool = new PoolExpandedLink(mIdentifierPool, mMetadataPool);
}
public PoolNodeIdentifier getIdentifierPool() {
return mIdentifierPool;
}
public PoolExpandedLink getLinkPool() {
return mLinkPool;
}
public PoolNodeMetadata getMetadataPool() {
return mMetadataPool;
}
/**
* Clears each pool.
*/
public void clearGraph() {
mIdentifierPool.clear();
mMetadataPool.clear();
mLinkPool.clear();
}
/**
* @see PoolNodeIdentifier#createIdentifier(Identifier)
* @param identifier
* @return NodeIdentifier or null.
*/
public NodeIdentifier createIdentifier(Identifier identifier) {
return mIdentifierPool.create(identifier);
}
/**
* @see PoolNodeIdentifier#getIdentifier(Identifier)
* @param identifier
* @return NodeIdetifier or null.
*/
public NodeIdentifier getIdentifier(Identifier identifier) {
return mIdentifierPool.getIdentifier(identifier);
}
/**
* @see PoolNodeIdentifier#release(Identifier)
* @param identifier
*/
public void releaseIdentifier(Identifier identifier) {
mIdentifierPool.release(identifier);
}
/**
* @see PoolExpandedLink#create(Link)
* @param link
* @return ExpandedLink or null.
*/
public ExpandedLink createLink(Link link) {
return mLinkPool.create(link);
}
/**
* @see PoolExpandedLink#getLink(Link)
* @param link
* @return ExpanedLink or null.
*/
public ExpandedLink getLink(Link link) {
return mLinkPool.getLink(link);
}
/**
* @see PoolExpandedLink#release(Link)
* @param link
*/
public void releaseLink(Link link) {
mLinkPool.release(link);
}
/**
* @see PoolNodeMetadata#create(RichMetadata)
* @param metadata
* @return NodeMetadata or null.
*/
public NodeMetadata createMetadata(RichMetadata metadata) {
return mMetadataPool.create(metadata);
}
/**
* @see PoolNodeMetadata#getMetadata(RichMetadata)
* @param metadata
* @return NodeMetadata or null.
*/
public NodeMetadata getMetadata(RichMetadata metadata) {
return mMetadataPool.getMetadata(metadata);
}
/**
* @see PoolNodeMetadata#release(RichMetadata)
* @param metadata
*/
public void releaseMetadata(RichMetadata metadata) {
mMetadataPool.release(metadata);
}
}
| apache-2.0 |
bruce-dunwiddie/tsql-parser | TSQL_Parser/TSQL_Parser/Tokens/TSQLIncompleteIdentifierToken.cs | 415 | using System;
namespace TSQL.Tokens
{
public class TSQLIncompleteIdentifierToken : TSQLIncompleteToken
{
internal TSQLIncompleteIdentifierToken(
int beginPosition,
string text) :
base(
beginPosition,
text)
{
}
#pragma warning disable 1591
public override TSQLTokenType Type
{
get
{
return TSQLTokenType.IncompleteIdentifier;
}
}
#pragma warning restore 1591
}
}
| apache-2.0 |