code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
export * from './admin.service';
export * from './database.service';
export * from './route.service';
export * from './server.service';
export * from './file.service';
export * from './controller.service';
export * from './service.service';
export * from './serve.auth.service';
export * from './interceptor.service';
| CaoYouXin/serveV2 | html/deploy/src/app/service/index.ts | TypeScript | apache-2.0 | 318 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.iot.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Describes an action that captures a CloudWatch metric.
* </p>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class CloudwatchMetricAction implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The IAM role that allows access to the CloudWatch metric.
* </p>
*/
private String roleArn;
/**
* <p>
* The CloudWatch metric namespace name.
* </p>
*/
private String metricNamespace;
/**
* <p>
* The CloudWatch metric name.
* </p>
*/
private String metricName;
/**
* <p>
* The CloudWatch metric value.
* </p>
*/
private String metricValue;
/**
* <p>
* The <a
* href="http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#Unit">metric
* unit</a> supported by CloudWatch.
* </p>
*/
private String metricUnit;
/**
* <p>
* An optional <a href=
* "http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#about_timestamp">Unix
* timestamp</a>.
* </p>
*/
private String metricTimestamp;
/**
* <p>
* The IAM role that allows access to the CloudWatch metric.
* </p>
*
* @param roleArn
* The IAM role that allows access to the CloudWatch metric.
*/
public void setRoleArn(String roleArn) {
this.roleArn = roleArn;
}
/**
* <p>
* The IAM role that allows access to the CloudWatch metric.
* </p>
*
* @return The IAM role that allows access to the CloudWatch metric.
*/
public String getRoleArn() {
return this.roleArn;
}
/**
* <p>
* The IAM role that allows access to the CloudWatch metric.
* </p>
*
* @param roleArn
* The IAM role that allows access to the CloudWatch metric.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CloudwatchMetricAction withRoleArn(String roleArn) {
setRoleArn(roleArn);
return this;
}
/**
* <p>
* The CloudWatch metric namespace name.
* </p>
*
* @param metricNamespace
* The CloudWatch metric namespace name.
*/
public void setMetricNamespace(String metricNamespace) {
this.metricNamespace = metricNamespace;
}
/**
* <p>
* The CloudWatch metric namespace name.
* </p>
*
* @return The CloudWatch metric namespace name.
*/
public String getMetricNamespace() {
return this.metricNamespace;
}
/**
* <p>
* The CloudWatch metric namespace name.
* </p>
*
* @param metricNamespace
* The CloudWatch metric namespace name.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CloudwatchMetricAction withMetricNamespace(String metricNamespace) {
setMetricNamespace(metricNamespace);
return this;
}
/**
* <p>
* The CloudWatch metric name.
* </p>
*
* @param metricName
* The CloudWatch metric name.
*/
public void setMetricName(String metricName) {
this.metricName = metricName;
}
/**
* <p>
* The CloudWatch metric name.
* </p>
*
* @return The CloudWatch metric name.
*/
public String getMetricName() {
return this.metricName;
}
/**
* <p>
* The CloudWatch metric name.
* </p>
*
* @param metricName
* The CloudWatch metric name.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CloudwatchMetricAction withMetricName(String metricName) {
setMetricName(metricName);
return this;
}
/**
* <p>
* The CloudWatch metric value.
* </p>
*
* @param metricValue
* The CloudWatch metric value.
*/
public void setMetricValue(String metricValue) {
this.metricValue = metricValue;
}
/**
* <p>
* The CloudWatch metric value.
* </p>
*
* @return The CloudWatch metric value.
*/
public String getMetricValue() {
return this.metricValue;
}
/**
* <p>
* The CloudWatch metric value.
* </p>
*
* @param metricValue
* The CloudWatch metric value.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CloudwatchMetricAction withMetricValue(String metricValue) {
setMetricValue(metricValue);
return this;
}
/**
* <p>
* The <a
* href="http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#Unit">metric
* unit</a> supported by CloudWatch.
* </p>
*
* @param metricUnit
* The <a
* href="http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#Unit"
* >metric unit</a> supported by CloudWatch.
*/
public void setMetricUnit(String metricUnit) {
this.metricUnit = metricUnit;
}
/**
* <p>
* The <a
* href="http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#Unit">metric
* unit</a> supported by CloudWatch.
* </p>
*
* @return The <a
* href="http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#Unit"
* >metric unit</a> supported by CloudWatch.
*/
public String getMetricUnit() {
return this.metricUnit;
}
/**
* <p>
* The <a
* href="http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#Unit">metric
* unit</a> supported by CloudWatch.
* </p>
*
* @param metricUnit
* The <a
* href="http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#Unit"
* >metric unit</a> supported by CloudWatch.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CloudwatchMetricAction withMetricUnit(String metricUnit) {
setMetricUnit(metricUnit);
return this;
}
/**
* <p>
* An optional <a href=
* "http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#about_timestamp">Unix
* timestamp</a>.
* </p>
*
* @param metricTimestamp
* An optional <a href=
* "http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#about_timestamp"
* >Unix timestamp</a>.
*/
public void setMetricTimestamp(String metricTimestamp) {
this.metricTimestamp = metricTimestamp;
}
/**
* <p>
* An optional <a href=
* "http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#about_timestamp">Unix
* timestamp</a>.
* </p>
*
* @return An optional <a href=
* "http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#about_timestamp"
* >Unix timestamp</a>.
*/
public String getMetricTimestamp() {
return this.metricTimestamp;
}
/**
* <p>
* An optional <a href=
* "http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#about_timestamp">Unix
* timestamp</a>.
* </p>
*
* @param metricTimestamp
* An optional <a href=
* "http://docs.aws.amazon.com/AmazonCloudWatch/latest/DeveloperGuide/cloudwatch_concepts.html#about_timestamp"
* >Unix timestamp</a>.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public CloudwatchMetricAction withMetricTimestamp(String metricTimestamp) {
setMetricTimestamp(metricTimestamp);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getRoleArn() != null)
sb.append("RoleArn: ").append(getRoleArn()).append(",");
if (getMetricNamespace() != null)
sb.append("MetricNamespace: ").append(getMetricNamespace()).append(",");
if (getMetricName() != null)
sb.append("MetricName: ").append(getMetricName()).append(",");
if (getMetricValue() != null)
sb.append("MetricValue: ").append(getMetricValue()).append(",");
if (getMetricUnit() != null)
sb.append("MetricUnit: ").append(getMetricUnit()).append(",");
if (getMetricTimestamp() != null)
sb.append("MetricTimestamp: ").append(getMetricTimestamp());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof CloudwatchMetricAction == false)
return false;
CloudwatchMetricAction other = (CloudwatchMetricAction) obj;
if (other.getRoleArn() == null ^ this.getRoleArn() == null)
return false;
if (other.getRoleArn() != null && other.getRoleArn().equals(this.getRoleArn()) == false)
return false;
if (other.getMetricNamespace() == null ^ this.getMetricNamespace() == null)
return false;
if (other.getMetricNamespace() != null && other.getMetricNamespace().equals(this.getMetricNamespace()) == false)
return false;
if (other.getMetricName() == null ^ this.getMetricName() == null)
return false;
if (other.getMetricName() != null && other.getMetricName().equals(this.getMetricName()) == false)
return false;
if (other.getMetricValue() == null ^ this.getMetricValue() == null)
return false;
if (other.getMetricValue() != null && other.getMetricValue().equals(this.getMetricValue()) == false)
return false;
if (other.getMetricUnit() == null ^ this.getMetricUnit() == null)
return false;
if (other.getMetricUnit() != null && other.getMetricUnit().equals(this.getMetricUnit()) == false)
return false;
if (other.getMetricTimestamp() == null ^ this.getMetricTimestamp() == null)
return false;
if (other.getMetricTimestamp() != null && other.getMetricTimestamp().equals(this.getMetricTimestamp()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getRoleArn() == null) ? 0 : getRoleArn().hashCode());
hashCode = prime * hashCode + ((getMetricNamespace() == null) ? 0 : getMetricNamespace().hashCode());
hashCode = prime * hashCode + ((getMetricName() == null) ? 0 : getMetricName().hashCode());
hashCode = prime * hashCode + ((getMetricValue() == null) ? 0 : getMetricValue().hashCode());
hashCode = prime * hashCode + ((getMetricUnit() == null) ? 0 : getMetricUnit().hashCode());
hashCode = prime * hashCode + ((getMetricTimestamp() == null) ? 0 : getMetricTimestamp().hashCode());
return hashCode;
}
@Override
public CloudwatchMetricAction clone() {
try {
return (CloudwatchMetricAction) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.iot.model.transform.CloudwatchMetricActionMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| dagnir/aws-sdk-java | aws-java-sdk-iot/src/main/java/com/amazonaws/services/iot/model/CloudwatchMetricAction.java | Java | apache-2.0 | 13,191 |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("IBM")]
[assembly: AssemblyProduct("IBM.Watson.VisualRecognition.v4.IntegrationTests")]
[assembly: AssemblyTrademark("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("be1b0edd-6427-4b58-8916-25c7510bf227")]
| watson-developer-cloud/dotnet-standard-sdk | src/IBM.Watson.VisualRecognition.v4/Test/Integration/Properties/AssemblyInfo.cs | C# | apache-2.0 | 863 |
/*
* Copyright 2012 The Clustermeister Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.nethad.clustermeister.provisioning.ec2.commands;
import com.github.nethad.clustermeister.provisioning.AbstractExecutableCommand;
import com.github.nethad.clustermeister.provisioning.CommandLineHandle;
import com.github.nethad.clustermeister.provisioning.ec2.AmazonCommandLineEvaluation;
import com.github.nethad.clustermeister.provisioning.ec2.AmazonNodeManager;
import com.github.nethad.clustermeister.provisioning.jppf.JPPFManagementByJobsClient;
import com.google.common.collect.Iterables;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.List;
import java.util.concurrent.ExecutionException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Super-class of all Amazon provisioning provider CLI commands.
*
* <p>
* Offers access to the Amazon provisioning infrastructure and some commonly
* used utilities.
* </p>
*
* @author daniel, thomas
*/
public abstract class AbstractAmazonExecutableCommand extends AbstractExecutableCommand {
/**
* Shared logger for subclasses.
*/
protected final static Logger logger =
LoggerFactory.getLogger(AbstractAmazonExecutableCommand.class);
/**
* Visual line separator for command line output layouting.
*/
protected static final String SEPARATOR_LINE = "-------------------------------------------------";
private AmazonCommandLineEvaluation commandLineEvaluation;
/**
* Creates a new command with a command line evaluation reference for access
* to the Clustermeister provisioning infrastructure.
*
* @param commandName the name of the command.
* @param arguments the arguments of the command, may be null.
* @param helpText the help text of the command.
* @param commandLineEvaluation the command line evaluation instance reference.
*/
public AbstractAmazonExecutableCommand(String commandName, String[] arguments,
String helpText, AmazonCommandLineEvaluation commandLineEvaluation) {
super(commandName, arguments, helpText);
this.commandLineEvaluation = commandLineEvaluation;
}
@Override
protected CommandLineHandle getCommandLineHandle() {
return commandLineEvaluation.getCommandLineHandle();
}
/**
* The node manager allows to interact with the provisioning infrastructure.
*
* @return the Amazon node manager.
*/
protected AmazonNodeManager getNodeManager() {
return commandLineEvaluation.getNodeManager();
}
/**
* The management client allows performing management tasks
* (such as shutdown or restart) on running JPPF nodes.
*
* @return the Amazon management client.
*/
protected JPPFManagementByJobsClient getManagementClient() {
return commandLineEvaluation.getManagementClient();
}
/**
* Wait for a list of futures to complete.
*
* <p>
* The futures are considered as failed when they return null or fail to return.
* </p>
* @param futures the futures to wait for.
* @param interruptedMessage
* Log this message when the thread waiting for the futures to return
* is interrupted. The exception's stack trace is appended to this message.
* @param executionExceptionMessage
* Log this message when the thread waiting for the futures throws an
* exception while waiting. The exception's stack trace is appended to
* this message.
* @param unsuccessfulFuturesMessage
* Log this message when at least one future failed (or returned null).
* Can be a formatted string where '{}' is replaced with the number of
* failed futures.
*
*/
protected void waitForFuturesToComplete(List<ListenableFuture<? extends Object>> futures,
String interruptedMessage, String executionExceptionMessage,
String unsuccessfulFuturesMessage) {
try {
List<Object> startedNodes = Futures.successfulAsList(futures).get();
int failedNodes = Iterables.frequency(startedNodes, null);
if(failedNodes > 0) {
logger.warn(unsuccessfulFuturesMessage, failedNodes);
}
} catch (InterruptedException ex) {
logger.warn(interruptedMessage, ex);
} catch (ExecutionException ex) {
logger.warn(executionExceptionMessage, ex);
}
}
}
| nethad/clustermeister | provisioning/src/main/java/com/github/nethad/clustermeister/provisioning/ec2/commands/AbstractAmazonExecutableCommand.java | Java | apache-2.0 | 5,156 |
package com.psychowood.yahapp;
import android.app.Activity;
import android.content.DialogInterface;
import android.os.Bundle;
import android.support.v7.app.AlertDialog;
import android.util.Log;
import android.view.WindowManager;
import com.psychowood.henkaku.HenkakuWebServer;
import com.psychowood.yahapp.storage.AssetsProxy;
import java.io.IOException;
import java.io.InputStream;
import java.net.InetAddress;
import java.net.SocketException;
import java.util.ArrayList;
import java.util.List;
import fi.iki.elonen.NanoHTTPD;
public class HenkakuWebServerActivity extends TextStatusActivityBase {
private static final String TAG = "HNKWebServerActivity";
private HenkakuWebServer server;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
@Override
protected void onResume() {
super.onResume();
List<InetAddress> addresses = null;
try {
addresses = HenkakuWebServer.getLocalIpV4Addresses();
} catch (SocketException e) {
Log.e(TAG,"Error getting addresses",e);
addresses = new ArrayList<>();
}
StringBuffer buf = new StringBuffer();
if (addresses == null || addresses.size() == 0) {
buf.append(getString(R.string.noNetworkAccess));
} else if (addresses.size() > 1){
buf.append(getString(R.string.connectAtTheseAddresses));
} else {
buf.append(getString(R.string.connectAtThisAddress));
}
buf.append("\n");
buf.append("\n");
try {
server = new HenkakuWebServer(new HenkakuWebServer.WebServerHandler() {
AssetsProxy proxy = new AssetsProxy(me);
@Override
public InputStream openResource(String resourceName) throws IOException {
return proxy.openAsset(AssetsProxy.HENKAKU_ASSETS_PREFIX,resourceName);
}
@Override
public void receivedRequest(final NanoHTTPD.IHTTPSession session) {
handler.post(new Runnable() {
@Override
public void run() {
textView.append(session.getUri());
textView.append("\n");
}
});
}
@Override
public void log(String tag, String s) {
Log.d(tag,s);
}
@Override
public void log(String tag, String s, Exception ex) {
Log.e(tag,s,ex);
}
@Override
public void done() {
handler.post(new Runnable() {
@Override
public void run() {
textView.append("\n");
textView.append(getString(R.string.install_completed));
textView.append("\n");
new AlertDialog.Builder(me)
.setTitle(getString(R.string.enjoy))
.setMessage(getString(R.string.install_completed))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
onPause();
finish();
}
})
.show();
}
});
}
});
server.start();
for (InetAddress inetAddress : addresses) {
buf.append("http://"
+ inetAddress.getHostAddress()
+ ":" + server.getCurrentPort() + "\n");
}
buf.append("\n");
buf.append(getString(R.string.serving));
buf.append("\n");
buf.append("\n");
textView.setText(buf.toString());
} catch (IOException e) {
textView.append(getString(R.string.errorStartingServer) + "\n");
textView.append(Log.getStackTraceString(e));
}
}
@Override
protected void onPause() {
super.onPause();
if (server != null)
server.stop();
}
} | psychowood/YAHapp | android/app/src/main/java/com/psychowood/yahapp/HenkakuWebServerActivity.java | Java | apache-2.0 | 4,651 |
from functools import wraps
def authorized_method(o):
o.authentication_required = o.slug
o.authorization_required = o.slug
return o
def authenticated_method(o):
o.authentication_required = o.slug
return o
def anonymous_method(o):
o.authentication_required = False
o.authorization_required = False
return o
class authorization_required(object):
"""
Class decorator for documents, collections, applications that require authorization to access.
Adds authentication_required and authorization_required attributes to the decorated class at a minimum. It is also
possible to specify a filter function that filters documents based on a user's authentication information and
each individual document. This is achieved via rethinkdb's filter API and must use rethinkdb predicates. This should
be a nested function::
def example_filter_function(auth_info, method):
username = auth_info.username
permission = 'can_' + method
return lambda(doc): \
doc[permission].contains(username)
Args:
*protected (str): Items should be 'read', 'write', or the name of a method
filter_function (function): Should be a function that accepts a decoded auth token and an access method, then
returns another function. The second function should accept a document instance and return True or False
whether the user has access to that document.
"""
def __init__(self, *protected, filter_function=None):
self.protected = protected
self.filter_function = filter_function
def __call__(self, cls):
cls.authentication_required = self.protected
cls.authorization_required = self.protected
if self.filter_function:
cls.document_level_authorization = True
cls.authorization_filter = self.filter_function
else:
cls.document_level_authorization = False
return cls
class authentication_required(object):
def __init__(self, *protected):
self.protected = protected
def __call__(self, cls):
cls.authentication_required = self.protected
return cls
| JeffHeard/sondra | sondra/auth/decorators.py | Python | apache-2.0 | 2,213 |
// TypeInfoGeneration.cpp
// Copyright (c) 2014 - 2015, zhiayang@gmail.com
// Licensed under the Apache License Version 2.0.
#include "codegen.h"
#include "typeinfo.h"
#include "pts.h"
using namespace Ast;
using namespace Codegen;
namespace TypeInfo
{
void addNewType(CodegenInstance* cgi, fir::Type* stype, StructBase* str, TypeKind etype)
{
#if 0
if(stype == 0) return;
for(auto k : cgi->rootNode->typeList)
{
if(stype->isStructType())
{
fir::StructType* strt = dynamic_cast<fir::StructType*>(stype);
iceAssert(strt);
if(std::get<0>(k) == strt->getStructName().str())
return;
}
else if(stype->isClassType())
{
fir::ClassType* clst = dynamic_cast<fir::ClassType*>(stype);
iceAssert(clst);
if(std::get<0>(k) == clst->getClassName().str())
return;
}
}
std::string id;
if(stype->isStructType()) id = stype->toStructType()->getStructName().str();
else if(stype->isClassType()) id = stype->toClassType()->getClassName().str();
cgi->rootNode->typeList.push_back(std::make_tuple(id, stype, etype));
#endif
}
size_t getIndexForType(Codegen::CodegenInstance* cgi, fir::Type* type)
{
#if 0
size_t i = 1;
for(auto k : cgi->rootNode->typeList)
{
if(std::get<1>(k) == type)
{
return i;
}
i++;
}
std::string name = type->str();
cgi->rootNode->typeList.push_back(std::make_tuple(name, type, TypeKind::BuiltinType));
return getIndexForType(cgi, type);
#endif
return 0;
}
void initialiseTypeInfo(CodegenInstance* cgi)
{
#if 0
EnumDef* enr = 0;
if(cgi->getTypeByString("Type") == nullptr)
{
enr = new EnumDef(Parser::Pin(), "Type");
enr->isStrong = true;
Number* num = new Number(Parser::Pin(), (int64_t) 1);
enr->cases.push_back(std::make_pair("Type", num));
// codegen() calls createType()
enr->codegen(cgi);
}
else
{
auto pair = cgi->getTypeByString("Type");
if(!pair) return;
iceAssert(pair);
iceAssert(pair->second.second == TypeKind::Enum);
enr = dynamic_cast<EnumDef*>(pair->second.first);
iceAssert(enr);
}
// create the Any type.
#if 1
if(cgi->getTypeByString("Any") == 0)
{
StructDef* any = new StructDef(Parser::Pin(), "Any");
{
VarDecl* type = new VarDecl(Parser::Pin(), "type", false);
type->ptype = pts::NamedType::create("Type");
VarDecl* data = new VarDecl(Parser::Pin(), "value", false);
data->ptype = new pts::PointerType(pts::NamedType::create(INT8_TYPE_STRING));
any->members.push_back(type);
any->members.push_back(data);
}
any->codegen(cgi);
}
#endif
#endif
}
void generateTypeInfo(CodegenInstance* cgi)
{
#if 0
EnumDef* enr = dynamic_cast<EnumDef*>(cgi->getTypeByString("Type")->second.first);
iceAssert(enr);
// start at 2, we already have 1
Number* num = new Number(Parser::Pin(), (int64_t) 2);
bool done = false;
for(auto t : cgi->rootNode->typeList)
{
if(std::get<0>(t) == INT8_TYPE_STRING)
{
done = true;
break;
}
}
if(!done)
{
auto kind = TypeKind::BuiltinType;
cgi->rootNode->typeList.push_back(std::make_tuple(INT8_TYPE_STRING, cgi->getExprTypeOfBuiltin(INT8_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(INT16_TYPE_STRING, cgi->getExprTypeOfBuiltin(INT16_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(INT32_TYPE_STRING, cgi->getExprTypeOfBuiltin(INT32_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(INT64_TYPE_STRING, cgi->getExprTypeOfBuiltin(INT64_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(INT128_TYPE_STRING, cgi->getExprTypeOfBuiltin(INT128_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(UINT8_TYPE_STRING, cgi->getExprTypeOfBuiltin(UINT8_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(UINT16_TYPE_STRING, cgi->getExprTypeOfBuiltin(UINT16_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(UINT32_TYPE_STRING, cgi->getExprTypeOfBuiltin(UINT32_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(UINT64_TYPE_STRING, cgi->getExprTypeOfBuiltin(UINT64_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(UINT128_TYPE_STRING, cgi->getExprTypeOfBuiltin(UINT128_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(FLOAT32_TYPE_STRING, cgi->getExprTypeOfBuiltin(FLOAT32_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(FLOAT64_TYPE_STRING, cgi->getExprTypeOfBuiltin(FLOAT64_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(FLOAT80_TYPE_STRING, cgi->getExprTypeOfBuiltin(FLOAT80_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(FLOAT128_TYPE_STRING, cgi->getExprTypeOfBuiltin(FLOAT128_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(BOOL_TYPE_STRING, cgi->getExprTypeOfBuiltin(BOOL_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(CHARACTER_TYPE_STRING, cgi->getExprTypeOfBuiltin(CHARACTER_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(STRING_TYPE_STRING, cgi->getExprTypeOfBuiltin(STRING_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(UNICODE_CHARACTER_TYPE_STRING,
cgi->getExprTypeOfBuiltin(UNICODE_CHARACTER_TYPE_STRING), kind));
cgi->rootNode->typeList.push_back(std::make_tuple(UNICODE_STRING_TYPE_STRING,
cgi->getExprTypeOfBuiltin(UNICODE_STRING_TYPE_STRING), kind));
}
for(auto tup : cgi->rootNode->typeList)
{
bool skip = false;
// check for duplicates.
// slightly inefficient.
// todo: hashmap or something
for(auto c : enr->cases)
{
if(c.first == std::get<0>(tup))
{
skip = true;
break;
}
}
if(skip) continue;
enr->cases.push_back(std::make_pair(std::get<0>(tup), num));
num = new Number(Parser::Pin(), num->ival + 1);
}
#if 0
printf("Final type list for module %s\n{\n", cgi->module->getModuleName().c_str());
int i = 1;
for(auto c : enr->cases)
{
printf("\t%d: %s\n", i, c.first.c_str());
i++;
}
printf("}\n\n");
#endif
#endif
}
}
| adrian17/flax | source/TypeInfo/TypeInfo.cpp | C++ | apache-2.0 | 6,227 |
/*
Copyright 2014 Joukou Ltd
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
var GraphStateModel, Model, Q, schema;
schema = require('./schema');
Model = require('../../../../lib/Model');
Q = require('q');
GraphStateModel = Model.define({
schema: schema,
type: 'graph_state',
bucket: 'graph_state'
});
GraphStateModel.retrieveForGraph = function(agentKey, graphKey) {
return GraphStateModel.search("agent_key:" + agentKey + " graph_key:" + graphKey, {
firstOnly: true
});
};
GraphStateModel.put = function(agentKey, graphKey, state) {
var data, deferred, numberOr, save;
if (state == null) {
state = {};
}
deferred = Q.defer();
save = function(model) {
return model.save().then(function() {
return deferred.resolve(model);
}).fail(deferred.reject);
};
numberOr = function(number, other) {
if (typeof number !== 'number') {
return other;
}
if (isNaN(number)) {
return other;
}
return number;
};
data = {
agent_key: agentKey,
graph_key: graphKey,
x: numberOr(state.x, 0),
y: numberOr(state.y, 0),
scale: numberOr(state.scale, 1),
metadata: state.metadata || {}
};
GraphStateModel.retrieveForGraph(agentKey, graphKey).then(function(model) {
model.setValue(data);
return save(model);
}).fail(function() {
return GraphStateModel.create(data).then(save).fail(deferred.reject);
});
return deferred.promise;
};
GraphStateModel.afterCreate = function(model) {
model.afterRetrieve();
return Q.resolve(model);
};
GraphStateModel.prototype.beforeSave = function() {};
GraphStateModel.prototype.afterRetrieve = function() {
this.addSecondaryIndex('agent_key_bin');
return this.addSecondaryIndex('graph_key_bin');
};
module.exports = GraphStateModel;
/*
//# sourceMappingURL=index.js.map
*/
| joukou/joukou-data | dist/models/agent/graph/state/index.js | JavaScript | apache-2.0 | 2,304 |
/*
* Copyright (c) "Neo4j"
* Neo4j Sweden AB [http://neo4j.com]
*
* This file is part of Neo4j.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neo4j.driver.internal.messaging.encode;
import org.junit.jupiter.api.Test;
import org.mockito.InOrder;
import java.util.HashMap;
import java.util.Map;
import org.neo4j.driver.internal.messaging.ValuePacker;
import org.neo4j.driver.internal.messaging.request.DiscardAllMessage;
import org.neo4j.driver.internal.messaging.request.DiscardMessage;
import org.neo4j.driver.Value;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.mockito.Mockito.inOrder;
import static org.mockito.Mockito.mock;
import static org.neo4j.driver.Values.value;
import static org.neo4j.driver.internal.messaging.request.DiscardMessage.newDiscardAllMessage;
class DiscardMessageEncoderTest
{
private final DiscardMessageEncoder encoder = new DiscardMessageEncoder();
private final ValuePacker packer = mock( ValuePacker.class );
@Test
void shouldDiscardAllCorrectly() throws Throwable
{
encoder.encode( newDiscardAllMessage( -1 ), packer );
Map<String,Value> meta = new HashMap<>();
meta.put( "n", value( -1 ) );
InOrder order = inOrder( packer );
order.verify( packer ).packStructHeader( 1, DiscardMessage.SIGNATURE );
order.verify( packer ).pack( meta );
}
@Test
void shouldEncodeDiscardMessage() throws Exception
{
encoder.encode( new DiscardMessage( 100, 200 ), packer );
Map<String,Value> meta = new HashMap<>();
meta.put( "n", value( 100 ) );
meta.put( "qid", value( 200 ) );
InOrder order = inOrder( packer );
order.verify( packer ).packStructHeader( 1, DiscardMessage.SIGNATURE );
order.verify( packer ).pack( meta );
}
@Test
void shouldAvoidQueryId() throws Throwable
{
encoder.encode( new DiscardMessage( 100, -1 ), packer );
Map<String,Value> meta = new HashMap<>();
meta.put( "n", value( 100 ) );
InOrder order = inOrder( packer );
order.verify( packer ).packStructHeader( 1, DiscardMessage.SIGNATURE );
order.verify( packer ).pack( meta );
}
@Test
void shouldFailToEncodeWrongMessage()
{
assertThrows( IllegalArgumentException.class, () -> encoder.encode( DiscardAllMessage.DISCARD_ALL, packer ) );
}
}
| neo4j/neo4j-java-driver | driver/src/test/java/org/neo4j/driver/internal/messaging/encode/DiscardMessageEncoderTest.java | Java | apache-2.0 | 2,938 |
/*
* DISCLAIMER
*
* Copyright 2016 ArangoDB GmbH, Cologne, Germany
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright holder is ArangoDB GmbH, Cologne, Germany
*/
package com.arangodb.entity;
import com.arangodb.entity.DocumentField.Type;
/**
* @author Mark Vollmary
*/
public class DocumentEntity implements Entity {
@DocumentField(Type.KEY)
private String key;
@DocumentField(Type.ID)
private String id;
@DocumentField(Type.REV)
private String rev;
public DocumentEntity() {
super();
}
public String getKey() {
return key;
}
public String getId() {
return id;
}
public String getRev() {
return rev;
}
}
| arangodb/arangodb-java-driver | src/main/java/com/arangodb/entity/DocumentEntity.java | Java | apache-2.0 | 1,231 |
# Copyright 2015-2016 Yelp Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from pyramid import testing
from paasta_tools.api.views import pause_autoscaler
def test_get_service_autoscaler_pause():
with mock.patch(
'paasta_tools.utils.KazooClient',
autospec=True,
) as mock_zk, mock.patch(
'paasta_tools.utils.load_system_paasta_config',
autospec=True,
):
request = testing.DummyRequest()
mock_zk_get = mock.Mock(return_value=(b'100', None))
mock_zk.return_value = mock.Mock(get=mock_zk_get)
response = pause_autoscaler.get_service_autoscaler_pause(request)
mock_zk_get.assert_called_once_with('/autoscaling/paused')
assert response == '100'
def test_update_autoscaler_pause():
with mock.patch(
'paasta_tools.utils.KazooClient',
autospec=True,
) as mock_zk, mock.patch(
'paasta_tools.api.views.pause_autoscaler.time',
autospec=True,
) as mock_time, mock.patch(
'paasta_tools.utils.load_system_paasta_config',
autospec=True,
):
request = testing.DummyRequest()
request.swagger_data = {
'json_body': {'minutes': 100},
}
mock_zk_set = mock.Mock()
mock_zk_ensure = mock.Mock()
mock_zk.return_value = mock.Mock(set=mock_zk_set, ensure_path=mock_zk_ensure)
mock_time.time = mock.Mock(return_value=0)
response = pause_autoscaler.update_service_autoscaler_pause(request)
mock_zk_ensure.assert_called_once_with('/autoscaling/paused')
mock_zk_set.assert_called_once_with('/autoscaling/paused', b'6000')
assert response is None
def test_delete_autoscaler_pause():
with mock.patch(
'paasta_tools.utils.KazooClient',
autospec=True,
) as mock_zk, mock.patch(
'paasta_tools.api.views.pause_autoscaler.time',
autospec=True,
) as mock_time, mock.patch(
'paasta_tools.utils.load_system_paasta_config',
autospec=True,
):
request = testing.DummyRequest()
mock_zk_del = mock.Mock()
mock_zk_ensure = mock.Mock()
mock_zk.return_value = mock.Mock(delete=mock_zk_del, ensure_path=mock_zk_ensure)
mock_time.time = mock.Mock(return_value=0)
response = pause_autoscaler.delete_service_autoscaler_pause(request)
mock_zk_ensure.assert_called_once_with('/autoscaling/paused')
mock_zk_del.assert_called_once_with('/autoscaling/paused')
assert response is None
| somic/paasta | tests/api/test_pause_autoscaler.py | Python | apache-2.0 | 3,045 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.bigtableadmin.v2.model;
/**
* The response message for Operations.ListOperations.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Bigtable Admin API. For a detailed explanation
* see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class ListOperationsResponse extends com.google.api.client.json.GenericJson {
/**
* The standard List next-page token.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String nextPageToken;
/**
* A list of operations that matches the specified filter in the request.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.util.List<Operation> operations;
/**
* The standard List next-page token.
* @return value or {@code null} for none
*/
public java.lang.String getNextPageToken() {
return nextPageToken;
}
/**
* The standard List next-page token.
* @param nextPageToken nextPageToken or {@code null} for none
*/
public ListOperationsResponse setNextPageToken(java.lang.String nextPageToken) {
this.nextPageToken = nextPageToken;
return this;
}
/**
* A list of operations that matches the specified filter in the request.
* @return value or {@code null} for none
*/
public java.util.List<Operation> getOperations() {
return operations;
}
/**
* A list of operations that matches the specified filter in the request.
* @param operations operations or {@code null} for none
*/
public ListOperationsResponse setOperations(java.util.List<Operation> operations) {
this.operations = operations;
return this;
}
@Override
public ListOperationsResponse set(String fieldName, Object value) {
return (ListOperationsResponse) super.set(fieldName, value);
}
@Override
public ListOperationsResponse clone() {
return (ListOperationsResponse) super.clone();
}
}
| googleapis/google-api-java-client-services | clients/google-api-services-bigtableadmin/v2/1.31.0/com/google/api/services/bigtableadmin/v2/model/ListOperationsResponse.java | Java | apache-2.0 | 2,926 |
# vim: set encoding=utf-8
# Copyright (c) 2016 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setup import tc, rm, get_sandbox_path
import logging
logger = logging.getLogger(__name__)
def test_svm(tc):
logger.info("define schema")
schema = [("data", float),("label", str)]
logger.info("creating the frame")
data = [[-48,1],
[-75,1],
[-63,1],
[-57,1],
[73,0],
[-33,1],
[100,0],
[-54,1],
[78,0],
[48,0],
[-55,1],
[23,0],
[45,0],
[75,0],
[95,0],
[73,0],
[7,0],
[39,0],
[-60,1]]
f = tc.frame.create(data, schema=schema)
logger.info(f.inspect())
logger.info("training the model on the frame")
model = tc.models.classification.svm.train(f, ['data'], 'label')
logger.info("predicting the class using the model and the frame")
predicted_frame = model.predict(f)
assert(set(predicted_frame.column_names) == set(['data', 'label', 'predicted_label']))
assert(len(predicted_frame.column_names) == 3)
assert(len(f.column_names) == 2)
metrics = model.test(predicted_frame)
assert(metrics.accuracy == 1.0)
assert(metrics.f_measure == 1.0)
assert(metrics.precision == 1.0)
assert(metrics.recall == 1.0)
| skavulya/spark-tk | integration-tests/tests/test_svm.py | Python | apache-2.0 | 1,997 |
package proxy
import (
"context"
"fmt"
"net"
"sync"
"github.com/fatedier/frp/client/event"
"github.com/fatedier/frp/pkg/config"
"github.com/fatedier/frp/pkg/msg"
"github.com/fatedier/frp/pkg/util/xlog"
"github.com/fatedier/golib/errors"
)
type Manager struct {
sendCh chan (msg.Message)
proxies map[string]*Wrapper
closed bool
mu sync.RWMutex
clientCfg config.ClientCommonConf
// The UDP port that the server is listening on
serverUDPPort int
ctx context.Context
}
func NewManager(ctx context.Context, msgSendCh chan (msg.Message), clientCfg config.ClientCommonConf, serverUDPPort int) *Manager {
return &Manager{
sendCh: msgSendCh,
proxies: make(map[string]*Wrapper),
closed: false,
clientCfg: clientCfg,
serverUDPPort: serverUDPPort,
ctx: ctx,
}
}
func (pm *Manager) StartProxy(name string, remoteAddr string, serverRespErr string) error {
pm.mu.RLock()
pxy, ok := pm.proxies[name]
pm.mu.RUnlock()
if !ok {
return fmt.Errorf("proxy [%s] not found", name)
}
err := pxy.SetRunningStatus(remoteAddr, serverRespErr)
if err != nil {
return err
}
return nil
}
func (pm *Manager) Close() {
pm.mu.Lock()
defer pm.mu.Unlock()
for _, pxy := range pm.proxies {
pxy.Stop()
}
pm.proxies = make(map[string]*Wrapper)
}
func (pm *Manager) HandleWorkConn(name string, workConn net.Conn, m *msg.StartWorkConn) {
pm.mu.RLock()
pw, ok := pm.proxies[name]
pm.mu.RUnlock()
if ok {
pw.InWorkConn(workConn, m)
} else {
workConn.Close()
}
}
func (pm *Manager) HandleEvent(evType event.Type, payload interface{}) error {
var m msg.Message
switch e := payload.(type) {
case *event.StartProxyPayload:
m = e.NewProxyMsg
case *event.CloseProxyPayload:
m = e.CloseProxyMsg
default:
return event.ErrPayloadType
}
err := errors.PanicToError(func() {
pm.sendCh <- m
})
return err
}
func (pm *Manager) GetAllProxyStatus() []*WorkingStatus {
ps := make([]*WorkingStatus, 0)
pm.mu.RLock()
defer pm.mu.RUnlock()
for _, pxy := range pm.proxies {
ps = append(ps, pxy.GetStatus())
}
return ps
}
func (pm *Manager) Reload(pxyCfgs map[string]config.ProxyConf) {
xl := xlog.FromContextSafe(pm.ctx)
pm.mu.Lock()
defer pm.mu.Unlock()
delPxyNames := make([]string, 0)
for name, pxy := range pm.proxies {
del := false
cfg, ok := pxyCfgs[name]
if !ok {
del = true
} else {
if !pxy.Cfg.Compare(cfg) {
del = true
}
}
if del {
delPxyNames = append(delPxyNames, name)
delete(pm.proxies, name)
pxy.Stop()
}
}
if len(delPxyNames) > 0 {
xl.Info("proxy removed: %v", delPxyNames)
}
addPxyNames := make([]string, 0)
for name, cfg := range pxyCfgs {
if _, ok := pm.proxies[name]; !ok {
pxy := NewWrapper(pm.ctx, cfg, pm.clientCfg, pm.HandleEvent, pm.serverUDPPort)
pm.proxies[name] = pxy
addPxyNames = append(addPxyNames, name)
pxy.Start()
}
}
if len(addPxyNames) > 0 {
xl.Info("proxy added: %v", addPxyNames)
}
}
| fatedier/frp | client/proxy/proxy_manager.go | GO | apache-2.0 | 2,973 |
package es.upm.fi.dia.oeg.ogsadai.sparql.client;
import uk.org.ogsadai.activity.ActivityName;
import uk.org.ogsadai.client.toolkit.ActivityOutput;
import uk.org.ogsadai.client.toolkit.SingleActivityOutput;
import uk.org.ogsadai.client.toolkit.activity.ActivityInput;
import uk.org.ogsadai.client.toolkit.activity.BaseResourceActivity;
import uk.org.ogsadai.client.toolkit.activity.SimpleActivityInput;
import uk.org.ogsadai.client.toolkit.activity.SimpleActivityOutput;
import uk.org.ogsadai.client.toolkit.exception.ActivityIOIllegalStateException;
import uk.org.ogsadai.client.toolkit.exception.DataSourceUsageException;
import uk.org.ogsadai.client.toolkit.exception.DataStreamErrorException;
import uk.org.ogsadai.client.toolkit.exception.UnexpectedDataValueException;
import uk.org.ogsadai.data.StringData;
public class RDFActivity extends BaseResourceActivity {
/** Activity output */
private ActivityOutput mOutput;
/** Activity input */
private ActivityInput query;
/** Default activity name */
public final static ActivityName DEFAULT_ACTIVITY_NAME =
new ActivityName("uk.org.ogsadai.SQLQuery");
public RDFActivity(String SPARQLquery) {
super(DEFAULT_ACTIVITY_NAME);
query = new SimpleActivityInput("expression");
query.add(new StringData(SPARQLquery));
mOutput = new SimpleActivityOutput("data");
this.setResourceID("DQPResource");
}
@Override
protected ActivityInput[] getInputs() {
return new ActivityInput[]{query};
}
@Override
protected ActivityOutput[] getOutputs() {
return new ActivityOutput[]{mOutput};
}
@Override
protected void validateIOState() throws ActivityIOIllegalStateException {
// TODO Auto-generated method stub
}
/**
* Gets the output so that it can be connected to the input of other
* activities.
*
* @return the activity output.
*/
public SingleActivityOutput getDataOutput()
{
return mOutput.getSingleActivityOutputs()[0];
}
/**
* Gets if the activity has a next output value.
*
* @return trueif there is another output value, false otherwise.
*
* @throws DataStreamErrorException
* if there is an error on the data stream.
* @throws UnexpectedDataValueException
* if there is an unexpected data value on the data stream.
* @throws DataSourceUsageException
* if there is an error reading from a data source.
*/
public boolean hasNextOutput()
throws DataStreamErrorException,
UnexpectedDataValueException,
DataSourceUsageException
{
return mOutput.getDataValueIterator().hasNext();
}
/**
* Gets the next output value.
*
* @return the next output value.
*
* @throws DataStreamErrorException
* if there is an error on the data stream.
* @throws UnexpectedDataValueException
* if there is an unexpected data value on the data stream.
* @throws DataSourceUsageException
* if there is an error reading from a data source.
*/
public StringData nextOutput()
throws DataStreamErrorException,
UnexpectedDataValueException,
DataSourceUsageException
{
return new StringData((String) mOutput.getDataValueIterator().nextAsString());
}
/**
* Connects the input to the given output.
*
* @param output
* Output to connect to.
*/
public void connectInput(SingleActivityOutput output)
{
query.connect(output);
}
/**
* Adds an value to the input.
*
* @param name
* Name to add to the input.
*/
public void addInput(String name)
{
query.add(new StringData(name));
}
}
| cbuil/sparql-dqp | src/main/java/es/upm/fi/dia/oeg/ogsadai/sparql/client/RDFActivity.java | Java | apache-2.0 | 3,973 |
/* Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "mlir-hlo/Dialect/mhlo/IR/chlo_ops.h"
#include "mlir-hlo/Dialect/mhlo/IR/hlo_ops.h"
#include "mlir-hlo/Dialect/mhlo/IR/lhlo_ops.h"
#include "mlir-hlo/Dialect/mhlo/IR/register.h"
// Static initialization for *HLO dialects registration.
void mlir::mhlo::registerAllDialects() {
static bool init_once = []() {
registerDialect<mlir::chlo::HloClientDialect>();
registerDialect<mlir::lmhlo::LmhloDialect>();
registerDialect<mlir::mhlo::MhloDialect>();
return true;
}();
(void)init_once;
// Dependent dialects
}
void mlir::mhlo::registerAllMhloDialects(mlir::DialectRegistry ®istry) {
// clang-format off
registry.insert<mlir::chlo::HloClientDialect,
mlir::lmhlo::LmhloDialect,
mlir::mhlo::MhloDialect>();
// clang-format on
}
| aldian/tensorflow | tensorflow/compiler/mlir/hlo/lib/Dialect/mhlo/IR/init.cc | C++ | apache-2.0 | 1,472 |
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
package wire
import (
"context"
"errors"
"fmt"
"reflect"
"sort"
"time"
"github.com/google/uuid"
"google.golang.org/grpc"
vkit "cloud.google.com/go/pubsublite/apiv1"
pb "google.golang.org/genproto/googleapis/cloud/pubsublite/v1"
)
// PartitionSet is a set of partition numbers.
type PartitionSet map[int]struct{}
// NewPartitionSet creates a partition set initialized from the given partition
// numbers.
func NewPartitionSet(partitions []int) PartitionSet {
var void struct{}
partitionSet := make(map[int]struct{})
for _, p := range partitions {
partitionSet[p] = void
}
return partitionSet
}
func newPartitionSet(assignmentpb *pb.PartitionAssignment) PartitionSet {
var partitions []int
for _, p := range assignmentpb.GetPartitions() {
partitions = append(partitions, int(p))
}
return NewPartitionSet(partitions)
}
// Ints returns the partitions contained in this set as an unsorted slice.
func (ps PartitionSet) Ints() (partitions []int) {
for p := range ps {
partitions = append(partitions, p)
}
return
}
// SortedInts returns the partitions contained in this set as a sorted slice.
func (ps PartitionSet) SortedInts() (partitions []int) {
partitions = ps.Ints()
sort.Ints(partitions)
return
}
// Contains returns true if this set contains the specified partition.
func (ps PartitionSet) Contains(partition int) bool {
_, exists := ps[partition]
return exists
}
// A function that generates a 16-byte UUID.
type generateUUIDFunc func() (uuid.UUID, error)
// partitionAssignmentReceiver must enact the received partition assignment from
// the server, or otherwise return an error, which will break the stream.
type partitionAssignmentReceiver func(PartitionSet) error
// assigner wraps the partition assignment stream and notifies a receiver when
// the server sends a new set of partition assignments for a subscriber.
type assigner struct {
// Immutable after creation.
assignmentClient *vkit.PartitionAssignmentClient
subscription string
initialReq *pb.PartitionAssignmentRequest
receiveAssignment partitionAssignmentReceiver
metadata pubsubMetadata
// Fields below must be guarded with mu.
stream *retryableStream
abstractService
}
func newAssigner(ctx context.Context, assignmentClient *vkit.PartitionAssignmentClient, genUUID generateUUIDFunc, settings ReceiveSettings, subscriptionPath string, receiver partitionAssignmentReceiver) (*assigner, error) {
clientID, err := genUUID()
if err != nil {
return nil, fmt.Errorf("pubsublite: failed to generate client UUID: %v", err)
}
a := &assigner{
assignmentClient: assignmentClient,
subscription: subscriptionPath,
initialReq: &pb.PartitionAssignmentRequest{
Request: &pb.PartitionAssignmentRequest_Initial{
Initial: &pb.InitialPartitionAssignmentRequest{
Subscription: subscriptionPath,
ClientId: clientID[:],
},
},
},
receiveAssignment: receiver,
metadata: newPubsubMetadata(),
}
a.stream = newRetryableStream(ctx, a, settings.Timeout, 10*time.Minute, reflect.TypeOf(pb.PartitionAssignment{}))
a.metadata.AddClientInfo(settings.Framework)
return a, nil
}
func (a *assigner) Start() {
a.mu.Lock()
defer a.mu.Unlock()
if a.unsafeUpdateStatus(serviceStarting, nil) {
a.stream.Start()
}
}
func (a *assigner) Stop() {
a.mu.Lock()
defer a.mu.Unlock()
a.unsafeInitiateShutdown(serviceTerminating, nil)
}
func (a *assigner) newStream(ctx context.Context) (grpc.ClientStream, error) {
return a.assignmentClient.AssignPartitions(a.metadata.AddToContext(ctx))
}
func (a *assigner) initialRequest() (interface{}, initialResponseRequired) {
return a.initialReq, initialResponseRequired(false)
}
func (a *assigner) validateInitialResponse(_ interface{}) error {
// Should not be called as initialResponseRequired=false above.
return errors.New("pubsublite: unexpected initial response")
}
func (a *assigner) onStreamStatusChange(status streamStatus) {
a.mu.Lock()
defer a.mu.Unlock()
switch status {
case streamConnected:
a.unsafeUpdateStatus(serviceActive, nil)
case streamTerminated:
a.unsafeInitiateShutdown(serviceTerminated, a.stream.Error())
}
}
func (a *assigner) onResponse(response interface{}) {
assignment, _ := response.(*pb.PartitionAssignment)
err := a.receiveAssignment(newPartitionSet(assignment))
a.mu.Lock()
defer a.mu.Unlock()
if a.status >= serviceTerminating {
return
}
if err != nil {
a.unsafeInitiateShutdown(serviceTerminated, err)
return
}
a.stream.Send(&pb.PartitionAssignmentRequest{
Request: &pb.PartitionAssignmentRequest_Ack{
Ack: &pb.PartitionAssignmentAck{},
},
})
}
func (a *assigner) unsafeInitiateShutdown(targetStatus serviceStatus, err error) {
if !a.unsafeUpdateStatus(targetStatus, wrapError("assigner", a.subscription, err)) {
return
}
// No data to send. Immediately terminate the stream.
a.stream.Stop()
}
| googleapis/google-cloud-go | pubsublite/internal/wire/assigner.go | GO | apache-2.0 | 5,451 |
/*
* Copyright (c) 2013, 2015 Sacred Scripture Foundation.
* "All scripture is given by inspiration of God, and is profitable for
* doctrine, for reproof, for correction, for instruction in righteousness:
* That the man of God may be perfect, throughly furnished unto all good
* works." (2 Tim 3:16-17)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sacredscripture.platform.internal.bible;
import org.sacredscripture.platform.bible.Book;
import org.sacredscripture.platform.bible.Content;
import org.sacredscripture.platform.internal.DataModel.ContentTable;
import org.sacredscripturefoundation.commons.entity.EntityImpl;
import org.sacredscripturefoundation.commons.entity.PublicIdProvider;
import javax.persistence.Column;
import javax.persistence.DiscriminatorColumn;
import javax.persistence.DiscriminatorType;
import javax.persistence.Entity;
import javax.persistence.Inheritance;
import javax.persistence.InheritanceType;
import javax.persistence.JoinColumn;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
/**
* This abstract class is the stock implementation of {@link Content}.
*
* @author Paul Benedict
* @since Sacred Scripture Platform 1.0
*/
@Entity
@Table(name = ContentTable.TABLE_NAME)
@Inheritance(strategy = InheritanceType.SINGLE_TABLE)
@DiscriminatorColumn(name = ContentTable.COLUMN_DISCRIMINATOR, discriminatorType = DiscriminatorType.INTEGER)
public abstract class ContentImpl extends EntityImpl<Long> implements Content, PublicIdProvider<String> {
/*
* Implementing PublicIdProvider shouldn't be necessary here, but HHH cannot
* proxy getPublicIdProvider() and lazy-load the value in the subclasses
* without it (always null). Bug in Javassist? Need to check if this problem
* still exists in HHH 5.x before reporting it.
*/
@Column(name = ContentTable.COLUMN_PUBLIC_ID)
private String publicId;
@ManyToOne(targetEntity = BookImpl.class, optional = false)
@JoinColumn(name = ContentTable.COLUMN_BOOK_ID)
private Book book;
@Column(name = ContentTable.COLUMN_POSITION)
private int order;
@Column(name = ContentTable.COLUMN_CODE)
private String code;
@Override
public Book getBook() {
return book;
}
@Override
public String getCode() {
return code;
}
@Override
public int getOrder() {
return order;
}
@Override
public String getPublicId() {
return publicId;
}
@Override
public void setBook(Book book) {
this.book = book;
}
@Override
public void setCode(String code) {
this.code = (code != null ? code.toLowerCase() : null);
}
@Override
public void setOrder(int order) {
if (order < 0) {
throw new IllegalArgumentException("Order must be >= 0");
}
this.order = order;
}
public void setPublicId(String publicId) {
this.publicId = publicId;
}
}
| SacredScriptureFoundation/sacredscripture | impl/src/main/java/org/sacredscripture/platform/internal/bible/ContentImpl.java | Java | apache-2.0 | 3,602 |
'use strict';
var JpipStructureParser = function JpipStructureParserClosure(
databinsSaver, markersParser, messageHeaderParser, offsetsCalculator) {
this.parseCodestreamStructure = function parseCodestreamStructure() {
// A.5.1 (Image and Tile Size)
var bytes = [];
var mainHeaderDatabin = databinsSaver.getMainHeaderDatabin();
var sizMarkerOffset = offsetsCalculator.getImageAndTileSizeOffset();
var bytes = getBytes(
mainHeaderDatabin,
/*numBytes=*/38,
sizMarkerOffset + j2kOffsets.MARKER_SIZE + j2kOffsets.LENGTH_FIELD_SIZE);
var referenceGridSizeOffset =
j2kOffsets.REFERENCE_GRID_SIZE_OFFSET_AFTER_SIZ_MARKER -
(j2kOffsets.MARKER_SIZE + j2kOffsets.LENGTH_FIELD_SIZE);
var numComponentsOffset =
j2kOffsets.NUM_COMPONENTS_OFFSET_AFTER_SIZ_MARKER -
(j2kOffsets.MARKER_SIZE + j2kOffsets.LENGTH_FIELD_SIZE);
var referenceGridSizeX = messageHeaderParser.getInt32(
bytes, referenceGridSizeOffset); // XSiz
var referenceGridSizeY = messageHeaderParser.getInt32(
bytes, referenceGridSizeOffset + 4); // YSiz
var imageOffsetX = messageHeaderParser.getInt32(bytes, 10); // XOSiz
var imageOffsetY = messageHeaderParser.getInt32(bytes, 14); // YOSiz
var tileSizeX = messageHeaderParser.getInt32(bytes, 18); // XTSiz
var tileSizeY = messageHeaderParser.getInt32(bytes, 22); // YTSiz
var firstTileOffsetX = messageHeaderParser.getInt32(bytes, 26); // XTOSiz
var firstTileOffsetY = messageHeaderParser.getInt32(bytes, 30); // YTOSiz
var numComponents = messageHeaderParser.getInt16(bytes, numComponentsOffset); // CSiz
var componentsDataOffset =
sizMarkerOffset + j2kOffsets.NUM_COMPONENTS_OFFSET_AFTER_SIZ_MARKER + 2;
var componentsDataLength = numComponents * 3;
var componentsDataBytes = getBytes(
mainHeaderDatabin, componentsDataLength, componentsDataOffset);
var componentsScaleX = new Array(numComponents);
var componentsScaleY = new Array(numComponents);
for (var i = 0; i < numComponents; ++i) {
componentsScaleX[i] = componentsDataBytes[i * 3 + 1];
componentsScaleY[i] = componentsDataBytes[i * 3 + 2];
}
var result = {
numComponents: numComponents,
componentsScaleX: componentsScaleX,
componentsScaleY: componentsScaleY,
imageWidth: referenceGridSizeX - firstTileOffsetX,
imageHeight: referenceGridSizeY - firstTileOffsetY,
tileWidth: tileSizeX,
tileHeight: tileSizeY,
firstTileOffsetX: firstTileOffsetX,
firstTileOffsetY: firstTileOffsetY
};
return result;
};
this.parseDefaultTileParams = function() {
var mainHeaderDatabin = databinsSaver.getMainHeaderDatabin();
var tileParams = parseCodingStyle(mainHeaderDatabin, /*isMandatory=*/true);
return tileParams;
};
this.parseOverridenTileParams = function(tileIndex) {
var tileHeaderDatabin = databinsSaver.getTileHeaderDatabin(tileIndex);
// A.4.2 (Start Of Tile-part)
var tileParams = parseCodingStyle(tileHeaderDatabin, /*isMandatory=*/false);
return tileParams;
};
function parseCodingStyle(databin, isMandatory) {
// A.5.1 (Image and Tile Size)
var baseParams = offsetsCalculator.getCodingStyleBaseParams(
databin, isMandatory);
if (baseParams === null) {
return null;
}
var mainHeaderDatabin = databinsSaver.getMainHeaderDatabin();
var sizMarkerOffset = offsetsCalculator.getImageAndTileSizeOffset();
var numComponentsOffset =
sizMarkerOffset + j2kOffsets.NUM_COMPONENTS_OFFSET_AFTER_SIZ_MARKER;
var numComponentsBytes = getBytes(
mainHeaderDatabin,
/*numBytes=*/2,
/*startOffset=*/numComponentsOffset);
var numComponents = messageHeaderParser.getInt16(numComponentsBytes, 0);
var packedPacketHeadersMarkerInTileHeader =
markersParser.getMarkerOffsetInDatabin(
databin, j2kMarkers.PackedPacketHeadersInTileHeader);
var packedPacketHeadersMarkerInMainHeader =
markersParser.getMarkerOffsetInDatabin(
mainHeaderDatabin, j2kMarkers.PackedPacketHeadersInMainHeader);
var isPacketHeadersNearData =
packedPacketHeadersMarkerInTileHeader === null &&
packedPacketHeadersMarkerInMainHeader === null;
var codingStyleMoreDataOffset = baseParams.codingStyleDefaultOffset + 6;
var codingStyleMoreDataBytes = getBytes(
databin,
/*numBytes=*/6,
/*startOffset=*/codingStyleMoreDataOffset);
var numQualityLayers = messageHeaderParser.getInt16(
codingStyleMoreDataBytes, 0);
var codeblockWidth = parseCodeblockSize(
codingStyleMoreDataBytes, 4);
var codeblockHeight = parseCodeblockSize(
codingStyleMoreDataBytes, 5);
var precinctWidths = new Array(baseParams.numResolutionLevels);
var precinctHeights = new Array(baseParams.numResolutionLevels);
var precinctSizesBytes = null;
if (!baseParams.isDefaultPrecinctSize) {
var precinctSizesBytesNeeded = baseParams.numResolutionLevels;
precinctSizesBytes = getBytes(
databin,
precinctSizesBytesNeeded,
baseParams.precinctSizesOffset);
}
var defaultSize = 1 << 15;
for (var i = 0; i < baseParams.numResolutionLevels; ++i) {
if (baseParams.isDefaultPrecinctSize) {
precinctWidths[i] = defaultSize;
precinctHeights[i] = defaultSize;
continue;
}
var precinctSizeOffset = i;
var sizeExponents = precinctSizesBytes[precinctSizeOffset];
var ppx = sizeExponents & 0x0F;
var ppy = sizeExponents >>> 4;
precinctWidths[i] = 1 * Math.pow(2, ppx); // Avoid negative result due to signed calculation
precinctHeights[i] = 1 * Math.pow(2, ppy); // Avoid negative result due to signed calculation
}
var paramsPerComponent = new Array(numComponents);
for (var i = 0; i < numComponents; ++i) {
paramsPerComponent[i] = {
maxCodeblockWidth: codeblockWidth,
maxCodeblockHeight: codeblockHeight,
numResolutionLevels: baseParams.numResolutionLevels,
precinctWidthPerLevel: precinctWidths,
precinctHeightPerLevel: precinctHeights
};
}
var defaultComponentParams = {
maxCodeblockWidth: codeblockWidth,
maxCodeblockHeight: codeblockHeight,
numResolutionLevels: baseParams.numResolutionLevels,
precinctWidthPerLevel: precinctWidths,
precinctHeightPerLevel: precinctHeights
};
var tileParams = {
numQualityLayers: numQualityLayers,
isPacketHeadersNearData: isPacketHeadersNearData,
isStartOfPacketMarkerAllowed: baseParams.isStartOfPacketMarkerAllowed,
isEndPacketHeaderMarkerAllowed: baseParams.isEndPacketHeaderMarkerAllowed,
paramsPerComponent: paramsPerComponent,
defaultComponentParams: defaultComponentParams
};
return tileParams;
}
function parseCodeblockSize(bytes, offset) {
var codeblockSizeExponentMinus2 = bytes[offset];
var codeblockSizeExponent = 2 + (codeblockSizeExponentMinus2 & 0x0F);
if (codeblockSizeExponent > 10) {
throw new j2kExceptions.IllegalDataException(
'Illegal codeblock width exponent ' + codeblockSizeExponent,
'A.6.1, Table A.18');
}
var size = 1 << codeblockSizeExponent;
return size;
}
function getBytes(databin, numBytes, databinStartOffset, allowEndOfRange) {
var bytes = [];
var rangeOptions = {
forceCopyAllRange: true,
maxLengthToCopy: numBytes,
databinStartOffset: databinStartOffset
};
var bytesCopied = databin.copyBytes(bytes, /*startOffset=*/0, rangeOptions);
if (bytesCopied === null) {
throw new jpipExceptions.InternalErrorException(
'Header data-bin has not yet recieved ' + numBytes +
' bytes starting from offset ' + databinStartOffset);
}
return bytes;
}
}; | MaMazav/MaMazav.github.io | webjpip.js/old/webjpip.js/jpipcore/parsers/jpipstructureparser.js | JavaScript | apache-2.0 | 9,141 |
import { IThriftClientFilter } from '../types'
export function filterByMethod<Context>(
method: string,
): (filter: IThriftClientFilter<Context>) => boolean {
return (filter: any): boolean => {
return (
filter.methods.length === 0 || filter.methods.indexOf(method) > -1
)
}
}
| creditkarma/thrift-server | packages/thrift-client/src/main/connections/utils.ts | TypeScript | apache-2.0 | 317 |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
/**
* LiveStreamEventCdnSettingsError.java
*
* This file was auto-generated from WSDL
* by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter.
*/
package com.google.api.ads.admanager.axis.v202105;
/**
* Lists all errors associated with {@link LiveStreamEvent} CDN configurations.
*/
public class LiveStreamEventCdnSettingsError extends com.google.api.ads.admanager.axis.v202105.ApiError implements java.io.Serializable {
private com.google.api.ads.admanager.axis.v202105.LiveStreamEventCdnSettingsErrorReason reason;
public LiveStreamEventCdnSettingsError() {
}
public LiveStreamEventCdnSettingsError(
java.lang.String fieldPath,
com.google.api.ads.admanager.axis.v202105.FieldPathElement[] fieldPathElements,
java.lang.String trigger,
java.lang.String errorString,
com.google.api.ads.admanager.axis.v202105.LiveStreamEventCdnSettingsErrorReason reason) {
super(
fieldPath,
fieldPathElements,
trigger,
errorString);
this.reason = reason;
}
@Override
public String toString() {
return com.google.common.base.MoreObjects.toStringHelper(this.getClass())
.omitNullValues()
.add("errorString", getErrorString())
.add("fieldPath", getFieldPath())
.add("fieldPathElements", getFieldPathElements())
.add("reason", getReason())
.add("trigger", getTrigger())
.toString();
}
/**
* Gets the reason value for this LiveStreamEventCdnSettingsError.
*
* @return reason
*/
public com.google.api.ads.admanager.axis.v202105.LiveStreamEventCdnSettingsErrorReason getReason() {
return reason;
}
/**
* Sets the reason value for this LiveStreamEventCdnSettingsError.
*
* @param reason
*/
public void setReason(com.google.api.ads.admanager.axis.v202105.LiveStreamEventCdnSettingsErrorReason reason) {
this.reason = reason;
}
private java.lang.Object __equalsCalc = null;
public synchronized boolean equals(java.lang.Object obj) {
if (!(obj instanceof LiveStreamEventCdnSettingsError)) return false;
LiveStreamEventCdnSettingsError other = (LiveStreamEventCdnSettingsError) obj;
if (obj == null) return false;
if (this == obj) return true;
if (__equalsCalc != null) {
return (__equalsCalc == obj);
}
__equalsCalc = obj;
boolean _equals;
_equals = super.equals(obj) &&
((this.reason==null && other.getReason()==null) ||
(this.reason!=null &&
this.reason.equals(other.getReason())));
__equalsCalc = null;
return _equals;
}
private boolean __hashCodeCalc = false;
public synchronized int hashCode() {
if (__hashCodeCalc) {
return 0;
}
__hashCodeCalc = true;
int _hashCode = super.hashCode();
if (getReason() != null) {
_hashCode += getReason().hashCode();
}
__hashCodeCalc = false;
return _hashCode;
}
// Type metadata
private static org.apache.axis.description.TypeDesc typeDesc =
new org.apache.axis.description.TypeDesc(LiveStreamEventCdnSettingsError.class, true);
static {
typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "LiveStreamEventCdnSettingsError"));
org.apache.axis.description.ElementDesc elemField = new org.apache.axis.description.ElementDesc();
elemField.setFieldName("reason");
elemField.setXmlName(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "reason"));
elemField.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202105", "LiveStreamEventCdnSettingsError.Reason"));
elemField.setMinOccurs(0);
elemField.setNillable(false);
typeDesc.addFieldDesc(elemField);
}
/**
* Return type metadata object
*/
public static org.apache.axis.description.TypeDesc getTypeDesc() {
return typeDesc;
}
/**
* Get Custom Serializer
*/
public static org.apache.axis.encoding.Serializer getSerializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanSerializer(
_javaType, _xmlType, typeDesc);
}
/**
* Get Custom Deserializer
*/
public static org.apache.axis.encoding.Deserializer getDeserializer(
java.lang.String mechType,
java.lang.Class _javaType,
javax.xml.namespace.QName _xmlType) {
return
new org.apache.axis.encoding.ser.BeanDeserializer(
_javaType, _xmlType, typeDesc);
}
}
| googleads/googleads-java-lib | modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202105/LiveStreamEventCdnSettingsError.java | Java | apache-2.0 | 5,570 |
/**
* Copyright (C) 2009-2013 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.aws;
import org.dasein.cloud.ProviderContext;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
@RunWith(JUnit4.class)
public class AWSSignatureV4Tests {
private AWSCloud awsCloud;
public AWSSignatureV4Tests() {
this.awsCloud = new AWSCloud();
awsCloud.connect(new ProviderContext("2123454", "us-east-1"));
}
/* ========================= GENERATED TESTS ==========================
The below tests are generated from the AWS Signature V4 Test Suite:
http://docs.aws.amazon.com/general/latest/gr/signature-v4-test-suite.html
We currently skip four of these test cases which cover functionality we do
not care about. Namely, duplicate HTTP headers and Unicode query strings.
Tests were generated using this Python script, run from the extracted
aws5_testsuite directory:
#!/usr/bin/python
import os
from hashlib import sha256 as sha256
ACCESS_KEY = "AKIDEXAMPLE"
SECRET = "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY"
# ignore some tests for functionality we don't care about:
# unicode query strings and duplicate headers
IGNORED = ["get-vanilla-ut8-query", "get-header-key-duplicate",
"get-header-value-order", "post-vanilla-query-nonunreserved"]
files = set(os.listdir("."))
basenames = set(os.path.splitext(f)[0] for f in files)
basenames = set(f for f in basenames if f + ".req" in files and f + ".authz" in files)
def out(s):
print " " + s
for f in sorted(basenames):
req = open(f + ".req").read()
authz = open(f + ".authz").read().strip()
header, body = req.split("\r\n\r\n", 1)
req_lines = header.split("\r\n")
request = req_lines[0].strip()
headers = [(line.split(":", 1)) for line in req_lines[1:]]
body_hash = sha256(body).hexdigest()
method, _, request = request.partition(" ")
request = request[:request.find(" ")].replace("\\", "\\\\").replace('"', '\\"')
out("@Test")
if f in IGNORED:
out('@Ignore("AWS4 signature feature we don\'t care about")')
out("public void testV4Signature__" + f.replace('-', '_') + "() throws Exception {")
out(" Map<String, String> headers = new HashMap<String, String>();")
for key, val in headers:
out(' headers.put("' + key + '", "' + val + '");')
out(' String authz = awsCloud.getV4Authorization("' + ACCESS_KEY + '", "' + SECRET + '",')
out('"' + method + '", "https://host.foo.com' + request +
'", "host", headers, "' + body_hash + '");')
out(' assertEquals("'+ authz + '", authz);')
out("}\n")
*/
@Test
@Ignore("AWS4 signature feature we don't care about")
public void testV4Signature__get_header_key_duplicate() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("DATE", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("host", "host.foo.com");
headers.put("ZOO", "zoobar");
headers.put("zoo", "foobar");
headers.put("zoo", "zoobar");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host;zoo, Signature=54afcaaf45b331f81cd2edb974f7b824ff4dd594cbbaa945ed636b48477368ed", authz);
}
@Test
@Ignore("AWS4 signature feature we don't care about")
public void testV4Signature__get_header_value_order() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("DATE", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("host", "host.foo.com");
headers.put("p", "z");
headers.put("p", "a");
headers.put("p", "p");
headers.put("p", "a");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host;p, Signature=d2973954263943b11624a11d1c963ca81fb274169c7868b2858c04f083199e3d", authz);
}
@Test
public void testV4Signature__get_header_value_trim() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("DATE", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("host", "host.foo.com");
headers.put("p", " phfft ");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host;p, Signature=debf546796015d6f6ded8626f5ce98597c33b47b9164cf6b17b4642036fcb592", authz);
}
@Test
public void testV4Signature__get_relative() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/foo/..", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b27ccfbfa7df52a200ff74193ca6e32d4b48b8856fab7ebf1c595d0670a7e470", authz);
}
@Test
public void testV4Signature__get_relative_relative() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/foo/bar/../..", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b27ccfbfa7df52a200ff74193ca6e32d4b48b8856fab7ebf1c595d0670a7e470", authz);
}
@Test
public void testV4Signature__get_slash() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com//", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b27ccfbfa7df52a200ff74193ca6e32d4b48b8856fab7ebf1c595d0670a7e470", authz);
}
@Test
public void testV4Signature__get_slash_dot_slash() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/./", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b27ccfbfa7df52a200ff74193ca6e32d4b48b8856fab7ebf1c595d0670a7e470", authz);
}
@Test
public void testV4Signature__get_slash_pointless_dot() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/./foo", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=910e4d6c9abafaf87898e1eb4c929135782ea25bb0279703146455745391e63a", authz);
}
@Test
public void testV4Signature__get_slashes() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com//foo//", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b00392262853cfe3201e47ccf945601079e9b8a7f51ee4c3d9ee4f187aa9bf19", authz);
}
@Test
public void testV4Signature__get_space() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/%20/foo", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=f309cfbd10197a230c42dd17dbf5cca8a0722564cb40a872d25623cfa758e374", authz);
}
@Test
public void testV4Signature__get_unreserved() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=830cc36d03f0f84e6ee4953fbe701c1c8b71a0372c63af9255aa364dd183281e", authz);
}
@Test
public void testV4Signature__get_utf8() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/%E1%88%B4", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=8d6634c189aa8c75c2e51e106b6b5121bed103fdb351f7d7d4381c738823af74", authz);
}
@Test
public void testV4Signature__get_vanilla() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b27ccfbfa7df52a200ff74193ca6e32d4b48b8856fab7ebf1c595d0670a7e470", authz);
}
@Test
public void testV4Signature__get_vanilla_empty_query_key() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/?foo=bar", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=56c054473fd260c13e4e7393eb203662195f5d4a1fada5314b8b52b23f985e9f", authz);
}
@Test
public void testV4Signature__get_vanilla_query() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b27ccfbfa7df52a200ff74193ca6e32d4b48b8856fab7ebf1c595d0670a7e470", authz);
}
@Test
public void testV4Signature__get_vanilla_query_order_key() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/?a=foo&b=foo", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=0dc122f3b28b831ab48ba65cb47300de53fbe91b577fe113edac383730254a3b", authz);
}
@Test
public void testV4Signature__get_vanilla_query_order_key_case() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/?foo=Zoo&foo=aha", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=be7148d34ebccdc6423b19085378aa0bee970bdc61d144bd1a8c48c33079ab09", authz);
}
@Test
public void testV4Signature__get_vanilla_query_order_value() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/?foo=b&foo=a", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=feb926e49e382bec75c9d7dcb2a1b6dc8aa50ca43c25d2bc51143768c0875acc", authz);
}
@Test
public void testV4Signature__get_vanilla_query_unreserved() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/?-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz=-._~0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=f1498ddb4d6dae767d97c466fb92f1b59a2c71ca29ac954692663f9db03426fb", authz);
}
@Test
@Ignore("AWS4 signature feature we don't care about")
public void testV4Signature__get_vanilla_ut8_query() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"GET", "https://host.foo.com/?foo=bar", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=6fb359e9a05394cc7074e0feb42573a2601abc0c869a953e8c5c12e4e01f1a8c", authz);
}
@Test
public void testV4Signature__post_header_key_case() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("DATE", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=22902d79e148b64e7571c3565769328423fe276eae4b26f83afceda9e767f726", authz);
}
@Test
public void testV4Signature__post_header_key_sort() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("DATE", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("host", "host.foo.com");
headers.put("ZOO", "zoobar");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host;zoo, Signature=b7a95a52518abbca0964a999a880429ab734f35ebbf1235bd79a5de87756dc4a", authz);
}
@Test
public void testV4Signature__post_header_value_case() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("DATE", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("host", "host.foo.com");
headers.put("zoo", "ZOOBAR");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host;zoo, Signature=273313af9d0c265c531e11db70bbd653f3ba074c1009239e8559d3987039cad7", authz);
}
@Test
public void testV4Signature__post_vanilla() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=22902d79e148b64e7571c3565769328423fe276eae4b26f83afceda9e767f726", authz);
}
@Test
public void testV4Signature__post_vanilla_empty_query_value() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/?foo=bar", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b6e3b79003ce0743a491606ba1035a804593b0efb1e20a11cba83f8c25a57a92", authz);
}
@Test
public void testV4Signature__post_vanilla_query() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/?foo=bar", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b6e3b79003ce0743a491606ba1035a804593b0efb1e20a11cba83f8c25a57a92", authz);
}
@Test
@Ignore("AWS4 signature feature we don't care about")
public void testV4Signature__post_vanilla_query_nonunreserved() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/?@#$%^&+=/,?><`\";:\\|][{}", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=28675d93ac1d686ab9988d6617661da4dffe7ba848a2285cb75eac6512e861f9", authz);
}
@Test
public void testV4Signature__post_vanilla_query_space() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/?f", "host", headers, "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=date;host, Signature=b7eb653abe5f846e7eee4d1dba33b15419dc424aaf215d49b1240732b10cc4ca", authz);
}
@Test
public void testV4Signature__post_x_www_form_urlencoded() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Content-Type", "application/x-www-form-urlencoded");
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/", "host", headers, "3ba8907e7a252327488df390ed517c45b96dead033600219bdca7107d1d3f88a");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=content-type;date;host, Signature=5a15b22cf462f047318703b92e6f4f38884e4a7ab7b1d6426ca46a8bd1c26cbc", authz);
}
@Test
public void testV4Signature__post_x_www_form_urlencoded_parameters() throws Exception {
Map<String, String> headers = new HashMap<String, String>();
headers.put("Content-Type", "application/x-www-form-urlencoded; charset=utf8");
headers.put("Date", "Mon, 09 Sep 2011 23:36:00 GMT");
headers.put("Host", "host.foo.com");
String authz = awsCloud.getV4Authorization("AKIDEXAMPLE", "wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY",
"POST", "https://host.foo.com/", "host", headers, "3ba8907e7a252327488df390ed517c45b96dead033600219bdca7107d1d3f88a");
assertEquals("AWS4-HMAC-SHA256 Credential=AKIDEXAMPLE/20110909/us-east-1/host/aws4_request, SignedHeaders=content-type;date;host, Signature=b105eb10c6d318d2294de9d49dd8b031b55e3c3fe139f2e637da70511e9e7b71", authz);
}
}
| daniellemayne/dasein-cloud-aws_old | src/test/java/org/dasein/cloud/aws/AWSSignatureV4Tests.java | Java | apache-2.0 | 26,369 |
package reply
import (
"context"
"go-common/app/interface/main/reply/conf"
model "go-common/app/interface/main/reply/model/reply"
"testing"
"github.com/smartystreets/goconvey/convey"
)
func TestReplyNewMemcacheDao(t *testing.T) {
convey.Convey("NewMemcacheDao", t, func(ctx convey.C) {
var (
c = conf.Conf.Memcache
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1 := NewMemcacheDao(c)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
})
}
func TestReplykeyCaptcha(t *testing.T) {
convey.Convey("keyCaptcha", t, func(ctx convey.C) {
var (
mid = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1 := keyCaptcha(mid)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
})
}
func TestReplykeyAdminTop(t *testing.T) {
convey.Convey("keyAdminTop", t, func(ctx convey.C) {
var (
oid = int64(0)
tp = int8(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1 := keyAdminTop(oid, tp)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
})
}
func TestReplykeyUpperTop(t *testing.T) {
convey.Convey("keyUpperTop", t, func(ctx convey.C) {
var (
oid = int64(0)
tp = int8(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1 := keyUpperTop(oid, tp)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
})
}
func TestReplykeySub(t *testing.T) {
convey.Convey("keySub", t, func(ctx convey.C) {
var (
oid = int64(0)
tp = int8(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1 := keySub(oid, tp)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
})
}
func TestReplykeyRp(t *testing.T) {
convey.Convey("keyRp", t, func(ctx convey.C) {
var (
rpID = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1 := keyRp(rpID)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
})
}
func TestReplykeyConfig(t *testing.T) {
convey.Convey("keyConfig", t, func(ctx convey.C) {
var (
oid = int64(0)
typ = int8(0)
category = int8(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1 := keyConfig(oid, typ, category)
ctx.Convey("Then p1 should not be nil.", func(ctx convey.C) {
ctx.So(p1, convey.ShouldNotBeNil)
})
})
})
}
func TestReplyMcPing(t *testing.T) {
convey.Convey("Ping", t, func(ctx convey.C) {
var (
c = context.Background()
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.Mc.Ping(c)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestReplyMcCaptchaToken(t *testing.T) {
convey.Convey("CaptchaToken", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
p1, err := d.Mc.CaptchaToken(c, mid)
ctx.Convey("Then err should be nil.p1 should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(p1, convey.ShouldNotBeNil)
})
})
})
}
func TestReplySetCaptchaToken(t *testing.T) {
convey.Convey("SetCaptchaToken", t, func(ctx convey.C) {
var (
c = context.Background()
mid = int64(0)
token = ""
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.Mc.SetCaptchaToken(c, mid, token)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestReplyGetSubject(t *testing.T) {
convey.Convey("GetSubject", t, func(ctx convey.C) {
var (
c = context.Background()
oid = int64(0)
tp = int8(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
sub, err := d.Mc.GetSubject(c, oid, tp)
ctx.Convey("Then err should be nil.sub should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(sub, convey.ShouldNotBeNil)
})
})
})
}
func TestReplyGetMultiSubject(t *testing.T) {
convey.Convey("GetMultiSubject", t, func(ctx convey.C) {
var (
c = context.Background()
oids = []int64{1322313213123}
tp = int8(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
res, missed, err := d.Mc.GetMultiSubject(c, oids, tp)
ctx.Convey("Then err should be nil.res,missed should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(missed, convey.ShouldNotBeNil)
ctx.So(res, convey.ShouldNotBeNil)
})
})
})
}
func TestReplyDeleteSubject(t *testing.T) {
convey.Convey("DeleteSubject", t, func(ctx convey.C) {
var (
c = context.Background()
oid = int64(0)
tp = int8(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.Mc.DeleteSubject(c, oid, tp)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestReplyAddSubject(t *testing.T) {
convey.Convey("AddSubject", t, func(ctx convey.C) {
var (
c = context.Background()
subs = &model.Subject{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.Mc.AddSubject(c, subs)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestReplyAddReply(t *testing.T) {
convey.Convey("AddReply", t, func(ctx convey.C) {
var (
c = context.Background()
rs = &model.Reply{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.Mc.AddReply(c, rs)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestReplyAddTop(t *testing.T) {
convey.Convey("AddTop", t, func(ctx convey.C) {
var (
c = context.Background()
oid = int64(0)
tp = int8(0)
rp = &model.Reply{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.Mc.AddTop(c, oid, tp, rp)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestReplyDeleteReply(t *testing.T) {
convey.Convey("DeleteReply", t, func(ctx convey.C) {
var (
c = context.Background()
rpID = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.Mc.DeleteReply(c, rpID)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
func TestReplyGetTop(t *testing.T) {
convey.Convey("GetTop", t, func(ctx convey.C) {
var (
c = context.Background()
oid = int64(0)
tp = int8(0)
top = uint32(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
rp, err := d.Mc.GetTop(c, oid, tp, top)
ctx.Convey("Then err should be nil.rp should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(rp, convey.ShouldBeNil)
})
})
})
}
func TestReplyGetReply(t *testing.T) {
convey.Convey("GetReply", t, func(ctx convey.C) {
var (
c = context.Background()
rpID = int64(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
rp, err := d.Mc.GetReply(c, rpID)
ctx.Convey("Then err should be nil.rp should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(rp, convey.ShouldBeNil)
})
})
})
}
func TestReplyGetMultiReply(t *testing.T) {
convey.Convey("GetMultiReply", t, func(ctx convey.C) {
var (
c = context.Background()
rpIDs = []int64{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
rpMap, missed, err := d.Mc.GetMultiReply(c, rpIDs)
ctx.Convey("Then err should be nil.rpMap,missed should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(missed, convey.ShouldBeNil)
ctx.So(rpMap, convey.ShouldBeNil)
})
})
})
}
func TestReplyGetReplyConfig(t *testing.T) {
convey.Convey("GetReplyConfig", t, func(ctx convey.C) {
var (
c = context.Background()
oid = int64(0)
typ = int8(0)
category = int8(0)
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
config, err := d.Mc.GetReplyConfig(c, oid, typ, category)
ctx.Convey("Then err should be nil.config should not be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
ctx.So(config, convey.ShouldBeNil)
})
})
})
}
func TestReplyAddReplyConfigCache(t *testing.T) {
convey.Convey("AddReplyConfigCache", t, func(ctx convey.C) {
var (
c = context.Background()
m = &model.Config{}
)
ctx.Convey("When everything goes positive", func(ctx convey.C) {
err := d.Mc.AddReplyConfigCache(c, m)
ctx.Convey("Then err should be nil.", func(ctx convey.C) {
ctx.So(err, convey.ShouldBeNil)
})
})
})
}
| LQJJ/demo | 126-go-common-master/app/interface/main/reply/dao/reply/memcache_test.go | GO | apache-2.0 | 9,124 |
import { REST } from 'core/api/ApiService';
export class ArtifactService {
public static getArtifactNames(type: string, accountName: string): PromiseLike<string[]> {
return REST('/artifacts/account').path(accountName, 'names').query({ type: type }).get();
}
public static getArtifactVersions(type: string, accountName: string, artifactName: string): PromiseLike<string[]> {
return REST('/artifacts/account')
.path(accountName, 'versions')
.query({ type: type, artifactName: artifactName })
.get();
}
}
| ajordens/deck | app/scripts/modules/core/src/pipeline/config/triggers/artifacts/ArtifactService.ts | TypeScript | apache-2.0 | 538 |
/*
* RED5 Open Source Media Server - https://github.com/Red5/ Copyright 2006-2016 by respective authors (see below). All rights reserved. Licensed under the Apache License, Version
* 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless
* required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and limitations under the License.
*/
package org.red5.server.exception;
/**
* Generic Scope exception.
*/
public class ScopeException extends RuntimeException {
private static final long serialVersionUID = -8512088658139011L;
public ScopeException(String string) {
super(string);
}
}
| Red5/red5-server-common | src/main/java/org/red5/server/exception/ScopeException.java | Java | apache-2.0 | 944 |
<?php
use Zizaco\Confide\ConfideUser;
use Zizaco\Confide\ConfideUserInterface;
use Zizaco\Entrust\HasRole;
class User extends Eloquent implements ConfideUserInterface {
use HasRole; // Add this trait to your user model
use ConfideUser;
protected $fillable = array('username','email','password','password_confirmation','confirmation_code', 'remember_token','confirmed');
protected $hidden = array('password');
public static $rules = array(
'email' => 'required',
'password' => 'required',
'password_confirmation' => 'required|same:password'
);
public function profile() {
return $this->hasOne('Profile'); // this matches the Eloquent model
}
public function clubs() {
return $this->belongsToMany('Club')->withTimestamps();
}
public function players(){
return $this->hasMany('Player');
}
public function teams(){
return $this->belongsToMany('Team');
}
}
// <?php
// use Illuminate\Auth\UserTrait;
// use Illuminate\Auth\UserInterface;
// use Illuminate\Auth\Reminders\RemindableTrait;
// use Illuminate\Auth\Reminders\RemindableInterface;
// class User extends Eloquent implements UserInterface, RemindableInterface {
// use UserTrait, RemindableTrait;
// /**
// * The database table used by the model.
// *
// * @var string
// */
// protected $table = 'users';
// /**
// * The attributes excluded from the model's JSON form.
// *
// * @var array
// */
// protected $hidden = array('password', 'remember_token');
// }
| PlusTechnologies/league-production | app/models/User.php | PHP | apache-2.0 | 1,482 |
/*
* Copyright 2016-2019 David Karnok
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package hu.akarnokd.rxjava2.operators;
import static org.junit.Assert.assertFalse;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.junit.Test;
import hu.akarnokd.rxjava2.test.TestHelper;
import io.reactivex.Observable;
import io.reactivex.functions.Function;
import io.reactivex.observers.TestObserver;
import io.reactivex.plugins.RxJavaPlugins;
import io.reactivex.schedulers.Schedulers;
import io.reactivex.subjects.PublishSubject;
public class ObservableValveTest {
@Test
public void passthrough() {
Observable.range(1, 10)
.compose(ObservableTransformers.<Integer>valve(Observable.<Boolean>never()))
.test()
.assertResult(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
}
@Test
public void gatedoff() {
Observable.range(1, 10)
.compose(ObservableTransformers.<Integer>valve(Observable.<Boolean>never(), false))
.test()
.assertEmpty();
}
@Test
public void syncGating() {
PublishSubject<Boolean> ps = PublishSubject.create();
TestObserver<Integer> to = Observable.range(1, 10)
.compose(ObservableTransformers.<Integer>valve(ps, false))
.test();
to.assertEmpty();
ps.onNext(true);
to.assertResult(1, 2, 3, 4, 5, 6, 7, 8, 9, 10);
assertFalse(ps.hasObservers());
}
@Test
public void gating() {
Observable.intervalRange(1, 10, 17, 17, TimeUnit.MILLISECONDS)
.compose(ObservableTransformers.<Long>valve(
Observable.interval(50, TimeUnit.MILLISECONDS).map(new Function<Long, Boolean>() {
@Override
public Boolean apply(Long v) throws Exception {
return (v & 1) == 0;
}
}), true, 16))
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertResult(1L, 2L, 3L, 4L, 5L, 6L, 7L, 8L, 9L, 10L);
}
@Test
public void mainError() {
Observable.<Integer>error(new IOException())
.compose(ObservableTransformers.<Integer>valve(Observable.<Boolean>never()))
.test()
.assertFailure(IOException.class);
}
@Test
public void otherError() {
Observable.just(1)
.compose(ObservableTransformers.<Integer>valve(Observable.<Boolean>error(new IOException())))
.test()
.assertFailure(IOException.class);
}
@Test
public void otherCompletes() {
Observable.just(1)
.compose(ObservableTransformers.<Integer>valve(Observable.<Boolean>empty()))
.test()
.assertFailure(IllegalStateException.class);
}
@Test
public void bothError() {
List<Throwable> errors = TestHelper.trackPluginErrors();
try {
Observable.<Integer>error(new IllegalArgumentException())
.compose(ObservableTransformers.<Integer>valve(Observable.<Boolean>error(new IOException())))
.test()
.assertFailure(IOException.class);
TestHelper.assertError(errors, 0, IllegalArgumentException.class);
} finally {
RxJavaPlugins.reset();
}
}
@Test
public void take() {
Observable.range(1, 10)
.compose(ObservableTransformers.<Integer>valve(Observable.<Boolean>never()))
.take(5)
.test()
.assertResult(1, 2, 3, 4, 5);
}
@Test
public void openCloseRace() {
for (int i = 0; i < 1000; i++) {
final PublishSubject<Integer> ps1 = PublishSubject.create();
final PublishSubject<Boolean> ps2 = PublishSubject.create();
TestObserver<Integer> to = ps1.compose(ObservableTransformers.<Integer>valve(ps2, false))
.test();
Runnable r1 = new Runnable() {
@Override
public void run() {
ps1.onNext(1);
}
};
Runnable r2 = new Runnable() {
@Override
public void run() {
ps2.onNext(true);
}
};
TestHelper.race(r1, r2, Schedulers.single());
to.assertValue(1).assertNoErrors().assertNotComplete();
}
}
@Test
public void disposed() {
TestHelper.checkDisposed(PublishSubject.<Integer>create().compose(ObservableTransformers.<Integer>valve(Observable.<Boolean>never())));
}
}
| akarnokd/RxJava2Extensions | src/test/java/hu/akarnokd/rxjava2/operators/ObservableValveTest.java | Java | apache-2.0 | 5,022 |
/*
* Copyright 2017 - 2019 EasyFXML project and contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package moe.tristan.easyfxml.samples.form.user.view.userform;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.verify;
import java.time.LocalDate;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.mock.mockito.MockBean;
import javafx.scene.control.DatePicker;
import javafx.scene.layout.Pane;
import moe.tristan.easyfxml.EasyFxml;
import moe.tristan.easyfxml.junit.SpringBootComponentTest;
import moe.tristan.easyfxml.samples.form.user.model.ImmutableUserForm;
import moe.tristan.easyfxml.samples.form.user.model.UserCreationService;
import moe.tristan.easyfxml.samples.form.user.model.UserForm;
@SpringBootTest
public class UserFormComponentTest extends SpringBootComponentTest {
@Autowired
private EasyFxml easyFxml;
@Autowired
private UserFormComponent userFormComponent;
@MockBean
private UserCreationService userCreationService;
private Pane userFormPane;
@BeforeEach
public void setUp() {
userFormPane = easyFxml.load(userFormComponent).getNodeOrExceptionPane();
}
@Test
public void checkSubmitsOnAllValid() {
final UserForm expectedUserForm = ImmutableUserForm
.builder()
.firstName("Firstname")
.lastName("Lastname")
.emailAddress("something@email.com")
.birthdate(LocalDate.now().minusYears(15))
.build();
withNodes(userFormPane)
.willDo(
() -> clickOn("#firstNameField").write(expectedUserForm.getFirstName()),
() -> clickOn("#lastNameField").write(expectedUserForm.getLastName()),
() -> lookup("#datePicker").queryAs(DatePicker.class).setValue(expectedUserForm.getBirthdate()),
() -> clickOn("#emailField").write(expectedUserForm.getEmailAddress())
).run();
clickOn("#submitButton");
final ArgumentCaptor<UserForm> submittedFormCaptor = ArgumentCaptor.forClass(UserForm.class);
verify(userCreationService).submitUserForm(submittedFormCaptor.capture());
assertThat(submittedFormCaptor.getValue()).isEqualTo(expectedUserForm);
}
@Test
public void checkDoesNotSubmitOnAllEmpty() {
withNodes(userFormPane).run();
clickOn("#submitButton");
verify(userCreationService, never()).submitUserForm(any());
}
}
| Tristan971/EasyFXML | easyfxml-samples/easyfxml-sample-form-user/src/test/java/moe/tristan/easyfxml/samples/form/user/view/userform/UserFormComponentTest.java | Java | apache-2.0 | 3,340 |
/*
* Copyright 2007 - 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sf.jailer.ui;
import java.awt.Color;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.Point;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseEvent;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.JComponent;
import javax.swing.JLabel;
import javax.swing.JTextField;
import javax.swing.SwingUtilities;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import net.sf.jailer.Configuration;
import net.sf.jailer.DDLCreator;
import net.sf.jailer.ScriptFormat;
import net.sf.jailer.database.Session;
import net.sf.jailer.database.TemporaryTableScope;
import net.sf.jailer.datamodel.Association;
import net.sf.jailer.datamodel.DataModel;
import net.sf.jailer.datamodel.Table;
import net.sf.jailer.extractionmodel.ExtractionModel.AdditionalSubject;
import net.sf.jailer.util.CancellationHandler;
import net.sf.jailer.util.CsvFile;
/**
* Data Export Dialog.
*
* @author Ralf Wisser
*/
public class ExportDialog extends javax.swing.JDialog {
/**
* true iff ok-button was clicked.
*/
boolean isOk = false;
/**
* Xml/Sql switch.
*/
private ScriptFormat scriptFormat;
/**
* Restricted data model.
*/
private final DataModel dataModel;
/**
* Previous subject condition.
*/
private static String previousSubjectCondition;
/**
* Previous initial subject condition.
*/
private static String previousInitialSubjectCondition;
/**
* Display name for default schema.
*/
private static String DEFAULT_SCHEMA = "<default>";
/**
* Schema mapping fields.
*/
private Map<String, JTextField> schemaMappingFields = new HashMap<String, JTextField>();
/**
* Source-schema mapping fields.
*/
private Map<String, JTextField> sourceSchemaMappingFields = new HashMap<String, JTextField>();
/**
* The form field setting.
*/
private Settings theSettings;
/**
* The subject table.
*/
private final Table subject;
private ParameterEditor parameterEditor;
private final List<String> initialArgs;
private final String password;
private final String subjectCondition;
/** Creates new form DbConnectionDialog
* @param showCmd
* @param args */
public ExportDialog(java.awt.Frame parent, final DataModel dataModel, final Table subject, String subjectCondition, List<AdditionalSubject> additionalSubjects, Session session, List<String> initialArgs, String password, boolean showCmd) {
super(parent, true);
this.subjectCondition = subjectCondition;
this.dataModel = dataModel;
this.subject = subject;
this.initialArgs = new ArrayList<String>(initialArgs);
this.password = password;
initComponents();
if (!showCmd) {
commandLinePanel.setVisible(false);
}
parameterEditor = new ParameterEditor(parent);
GridBagConstraints gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
parameterPanel.add(parameterEditor.createPane(dataModel.getParameters(subjectCondition, additionalSubjects)), gridBagConstraints);
setModal(true);
setLocation(100, 150);
Map<String, JComponent> fields = new HashMap<String, JComponent>();
fields.put("insert", insert);
fields.put("delete", delete);
fields.put("threads", threads);
fields.put("rowsPerThread", rowsPerThread);
fields.put("unicode", unicode);
for (Map.Entry<String, JTextField> e: parameterEditor.textfieldsPerParameter.entrySet()) {
fields.put("$" + e.getKey(), e.getValue());
}
scriptFormat = ScriptFormat.SQL;
try {
scriptFormat = ScriptFormat.valueOf(dataModel.getExportModus());
} catch (Exception e) {
}
sortedCheckBox.setEnabled(ScriptFormat.SQL.equals(scriptFormat) || ScriptFormat.DBUNIT_FLAT_XML.equals(scriptFormat) || ScriptFormat.LIQUIBASE_XML.equals(scriptFormat));
sortedCheckBox.setSelected(true);
upsertCheckbox.setEnabled(ScriptFormat.SQL.equals(scriptFormat));
rowsPerThread.setEnabled(ScriptFormat.SQL.equals(scriptFormat));
Map<JTextField, String> defaults = new HashMap<JTextField, String>();
if ((!ScriptFormat.SQL.equals(scriptFormat)) && (!ScriptFormat.DBUNIT_FLAT_XML.equals(scriptFormat)) && !ScriptFormat.LIQUIBASE_XML.equals(scriptFormat)) {
schemaMappingPanel.setVisible(false);
} else {
schemaMappingPanel.setVisible(true);
initSchemaMapping(dataModel, fields, defaults);
}
initSourceSchemaMapping(dataModel, fields, defaults);
theSettings = new Settings(".exportdata.ui", fields);
theSettings.restore("default");
for (JTextField field: defaults.keySet()) {
if (field.getText().length() == 0) {
field.setText(defaults.get(field));
}
}
if (threads.getText().length() == 0) {
threads.setText("1");
}
if (rowsPerThread.getText().length() == 0) {
rowsPerThread.setText("50");
}
if (additionalSubjects.isEmpty()) {
additSubsLabel.setVisible(false);
additSubsLabelTitel.setVisible(false);
} else {
StringBuilder sb = new StringBuilder();
for (AdditionalSubject as: additionalSubjects) {
if (sb.length() > 0) {
sb.append(", ");
}
sb.append(as.subject.getName());
}
final int MAX = 60;
if (sb.length() > MAX) {
additSubsLabel.setToolTipText(sb.toString());
additSubsLabel.setText(sb.toString().substring(0, MAX) + "...");
} else {
additSubsLabel.setText(sb.toString());
}
}
subjectTable.setText(subject.getName());
if (subjectCondition.equals(previousInitialSubjectCondition)) {
where.setText(ConditionEditor.toSingleLine(previousSubjectCondition));
} else {
where.setText(ConditionEditor.toSingleLine(subjectCondition));
}
initScopeButtons(session);
selectInsertFile.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
selectInsertFile.setEnabled(false);
}
public void mouseExited(java.awt.event.MouseEvent evt) {
selectInsertFile.setEnabled(true);
}
});
selectDeleteFile.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseEntered(java.awt.event.MouseEvent evt) {
selectDeleteFile.setEnabled(false);
}
public void mouseExited(java.awt.event.MouseEvent evt) {
selectDeleteFile.setEnabled(true);
}
});
selectInsertFile.setText("");
selectInsertFile.setIcon(loadIcon);
selectDeleteFile.setText("");
selectDeleteFile.setIcon(loadIcon);
if (parameterEditor.firstTextField != null) {
parameterEditor.firstTextField.grabFocus();
}
DocumentListener dl = new DocumentListener() {
@Override
public void removeUpdate(DocumentEvent e) {
updateCLIArea();
}
@Override
public void insertUpdate(DocumentEvent e) {
updateCLIArea();
}
@Override
public void changedUpdate(DocumentEvent e) {
updateCLIArea();
}
};
ActionListener al = new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
updateCLIArea();
}
};
where.getDocument().addDocumentListener(dl);
insert.getDocument().addDocumentListener(dl);
delete.getDocument().addDocumentListener(dl);
threads.getDocument().addDocumentListener(dl);
rowsPerThread.getDocument().addDocumentListener(dl);
upsertCheckbox.addActionListener(al);
explain.addActionListener(al);
unicode.addActionListener(al);
sortedCheckBox.addActionListener(al);
scopeGlobal.addActionListener(al);
scopeSession.addActionListener(al);
scopeLocal.addActionListener(al);
for (JTextField field: parameterEditor.textfieldsPerParameter.values()) {
field.getDocument().addDocumentListener(dl);
}
Dimension preferredSize = where.getPreferredSize();
preferredSize.width = 10;
where.setPreferredSize(preferredSize);
final ConditionEditor subjectConditionEditor = new ConditionEditor(null, null);
subjectConditionEditor.setTitle("Subject condition");
openWhereEditor.setIcon(conditionEditorIcon);
openWhereEditor.setText(null);
openWhereEditor.addMouseListener(new java.awt.event.MouseAdapter() {
@Override
public void mouseReleased(MouseEvent e) {
mouseClicked(e);
}
public void mouseClicked(java.awt.event.MouseEvent evt) {
String cond = subjectConditionEditor.edit(where.getText(), "Subject", "T", subject, null, null, null, false);
if (cond != null) {
if (!where.getText().equals(ConditionEditor.toSingleLine(cond))) {
where.setText(ConditionEditor.toSingleLine(cond));
}
openWhereEditor.setIcon(conditionEditorSelectedIcon);
}
}
public void mouseEntered(java.awt.event.MouseEvent evt) {
openWhereEditor.setIcon(conditionEditorSelectedIcon);
}
public void mouseExited(java.awt.event.MouseEvent evt) {
openWhereEditor.setIcon(conditionEditorIcon);
}
});
updateCLIArea();
pack();
setSize(Math.max(Math.min(getSize().width, 900), 580), getSize().height);
placeholder.setVisible(false);
placeholder1.setVisible(false);
UIUtil.initPeer();
UIUtil.fit(this);
setVisible(true);
try {
if (initScopeButtonThread != null) {
initScopeButtonThread.join();
}
} catch (InterruptedException e1) {
}
initScopeButtonThread = null;
if (isOk) {
previousInitialSubjectCondition = subjectCondition;
previousSubjectCondition = where.getText();
}
}
private void updateCLIArea() {
explain.setEnabled(!scopeLocal.isSelected());
if (scopeLocal.isSelected()) {
explain.setSelected(false);
}
List<String> args = new ArrayList<String>(initialArgs);
fillCLIArgs(args);
String cmd = "sh jailer.sh";
if (System.getProperty("os.name", "").toLowerCase().startsWith("windows")) {
cmd = "jailer.bat";
}
cliArea.setText(cmd + UIUtil.createCLIArgumentString(password, args));
cliArea.setCaretPosition(0);
jScrollPane1.getViewport().setViewPosition(new Point(0,0));
}
private Thread initScopeButtonThread;
private void initScopeButtons(final Session session) {
synchronized (this) {
scopeGlobal.setSelected(false);
scopeGlobal.setEnabled(false);
scopeSession.setSelected(false);
scopeSession.setEnabled(false);
scopeLocal.setSelected(true);
jButton1.setEnabled(false);
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
updateCLIArea();
}
CancellationHandler.reset(null);
initScopeButtonThread = new Thread(new Runnable() {
public void run() {
Configuration configuration = Configuration.forDbms(session);
boolean ok = false;
if (configuration.sessionTemporaryTableManager != null) {
try {
session.reconnect();
DDLCreator.createDDL(dataModel, session, TemporaryTableScope.SESSION_LOCAL);
SwingUtilities.invokeLater(new Runnable() {
public void run() {
synchronized (ExportDialog.this) {
scopeSession.setEnabled(true);
scopeGlobal.setEnabled(true);
scopeLocal.setSelected(false);
scopeGlobal.setSelected(false);
scopeSession.setSelected(true);
updateCLIArea();
}
}
});
ok = true;
} catch (Exception e) {
// ignore
}
}
if (!ok) {
try {
DDLCreator.createDDL(dataModel, session, TemporaryTableScope.GLOBAL);
ok = true;
} catch (Exception e) {
// ignore
}
if (ok) {
SwingUtilities.invokeLater(new Runnable() {
public void run() {
synchronized (ExportDialog.this) {
scopeGlobal.setEnabled(true);
scopeLocal.setSelected(false);
scopeSession.setSelected(false);
scopeGlobal.setSelected(true);
updateCLIArea();
}
}
});
}
}
SwingUtilities.invokeLater(new Runnable() {
public void run() {
synchronized (ExportDialog.this) {
if (scopeSession.isSelected() && scopeGlobal.isEnabled()) {
scopeSession.setSelected(false);
scopeGlobal.setSelected(true);
}
jButton1.setEnabled(true);
setCursor(Cursor.getDefaultCursor());
updateCLIArea();
}
}
});
}
});
initScopeButtonThread.start();
new Thread(new Runnable() {
public void run() {
try {
Thread.sleep(4000);
} catch (InterruptedException e) {
// ignore
}
SwingUtilities.invokeLater(new Runnable() {
public void run() {
synchronized (ExportDialog.this) {
jButton1.setEnabled(true);
setCursor(Cursor.getDefaultCursor());
updateCLIArea();
}
}
});
}
}).start();
}
/**
* Initializes the schema mapping panel.
*
* @param dataModel the data model
* @param fields to put newly created text fields into
* @param defaults to put default values for newly created text fields into
*/
private void initSchemaMapping(DataModel dataModel, Map<String, JComponent> fields, Map<JTextField, String> defaults) {
Set<String> distinctSchemas = new HashSet<String>();
for (Table table: dataModel.getTables()) {
String schema = table.getOriginalSchema(DEFAULT_SCHEMA);
distinctSchemas.add(schema);
}
List<String> sortedSchemaList = new ArrayList<String>(distinctSchemas);
Collections.sort(sortedSchemaList);
Set<String> relevantSchemas = getRelevantSchemas(true);
int y = 0;
for (String schema: sortedSchemaList) {
boolean add = relevantSchemas.contains(schema.equals(DEFAULT_SCHEMA)? "" : schema);
JLabel a = new JLabel(schema);
a.setFont(new java.awt.Font("Dialog", 0, 12));
java.awt.GridBagConstraints gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = y;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
if (add) {
schemaMappingPanel.add(a, gridBagConstraints);
}
JLabel b = new JLabel(" into schema ");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = y;
if (add) {
schemaMappingPanel.add(b, gridBagConstraints);
}
JTextField c = new JTextField(schema);
c.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void removeUpdate(DocumentEvent e) {
updateCLIArea();
}
@Override
public void insertUpdate(DocumentEvent e) {
updateCLIArea();
}
@Override
public void changedUpdate(DocumentEvent e) {
updateCLIArea();
}
});
fields.put("schema-" + schema, c);
defaults.put(c, schema);
schemaMappingFields.put(schema, c);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 3;
gridBagConstraints.gridy = y;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
if (add) {
schemaMappingPanel.add(c, gridBagConstraints);
}
if (add) {
y++;
}
}
}
/**
* Initializes the source schema mapping panel.
*
* @param dataModel the data model
* @param fields to put newly created text fields into
* @param defaults to put default values for newly created text fields into
*/
private void initSourceSchemaMapping(DataModel dataModel, Map<String, JComponent> fields, Map<JTextField, String> defaults) {
Set<String> distinctSchemas = new HashSet<String>();
for (Table table: dataModel.getTables()) {
String schema = table.getOriginalSchema(DEFAULT_SCHEMA);
distinctSchemas.add(schema);
}
List<String> sortedSchemaList = new ArrayList<String>(distinctSchemas);
Collections.sort(sortedSchemaList);
Set<String> relevantSchemas = getRelevantSchemas(true);
int y = 0;
for (String schema: sortedSchemaList) {
boolean add = relevantSchemas.contains(schema.equals(DEFAULT_SCHEMA)? "" : schema);
JLabel b = new JLabel(" instead of ");
java.awt.GridBagConstraints gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = y;
if (add) {
sourceSchemaMappingPanel.add(b, gridBagConstraints);
}
JTextField c = new JTextField(schema);
c.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void removeUpdate(DocumentEvent e) {
updateCLIArea();
}
@Override
public void insertUpdate(DocumentEvent e) {
updateCLIArea();
}
@Override
public void changedUpdate(DocumentEvent e) {
updateCLIArea();
}
});
fields.put("srcschema-" + schema, c);
defaults.put(c, schema);
sourceSchemaMappingFields.put(schema, c);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = y;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
if (add) {
sourceSchemaMappingPanel.add(c, gridBagConstraints);
}
JLabel a = new JLabel(schema);
a.setFont(new java.awt.Font("Dialog", 0, 12));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 3;
gridBagConstraints.gridy = y;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
if (add) {
sourceSchemaMappingPanel.add(a, gridBagConstraints);
}
if (add) {
y++;
}
}
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
java.awt.GridBagConstraints gridBagConstraints;
buttonGroup1 = new javax.swing.ButtonGroup();
jScrollPane2 = new javax.swing.JScrollPane();
jPanel6 = new javax.swing.JPanel();
jPanel1 = new javax.swing.JPanel();
sourceSchemaMappingPanel = new javax.swing.JPanel();
jLabel18 = new javax.swing.JLabel();
jLabel19 = new javax.swing.JLabel();
jLabel20 = new javax.swing.JLabel();
schemaMappingPanel = new javax.swing.JPanel();
jLabel13 = new javax.swing.JLabel();
jLabel14 = new javax.swing.JLabel();
jLabel15 = new javax.swing.JLabel();
where = new javax.swing.JTextField();
exportLabel = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jLabel5 = new javax.swing.JLabel();
jLabel6 = new javax.swing.JLabel();
insert = new javax.swing.JTextField();
delete = new javax.swing.JTextField();
threads = new javax.swing.JTextField();
rowsPerThread = new javax.swing.JTextField();
upsertCheckbox = new javax.swing.JCheckBox();
explain = new javax.swing.JCheckBox();
placeholder = new javax.swing.JLabel();
jLabel4 = new javax.swing.JLabel();
jLabel8 = new javax.swing.JLabel();
jLabel7 = new javax.swing.JLabel();
jLabel11 = new javax.swing.JLabel();
jPanel4 = new javax.swing.JPanel();
subjectTable = new javax.swing.JLabel();
jLabel12 = new javax.swing.JLabel();
jLabel16 = new javax.swing.JLabel();
jPanel8 = new javax.swing.JPanel();
scopeSession = new javax.swing.JRadioButton();
scopeGlobal = new javax.swing.JRadioButton();
scopeLocal = new javax.swing.JRadioButton();
jLabel1 = new javax.swing.JLabel();
jLabel26 = new javax.swing.JLabel();
jLabel27 = new javax.swing.JLabel();
jLabel9 = new javax.swing.JLabel();
selectInsertFile = new javax.swing.JLabel();
selectDeleteFile = new javax.swing.JLabel();
jLabel21 = new javax.swing.JLabel();
parameterPanel = new javax.swing.JPanel();
commandLinePanel = new javax.swing.JPanel();
jLabel22 = new javax.swing.JLabel();
jScrollPane1 = new javax.swing.JScrollPane();
cliArea = new javax.swing.JTextArea();
jLabel23 = new javax.swing.JLabel();
jLabel24 = new javax.swing.JLabel();
jLabel25 = new javax.swing.JLabel();
copyButton = new javax.swing.JButton();
placeholder1 = new javax.swing.JLabel();
sortedCheckBox = new javax.swing.JCheckBox();
unicode = new javax.swing.JCheckBox();
openWhereEditor = new javax.swing.JLabel();
additSubsLabel = new javax.swing.JLabel();
additSubsLabelTitel = new javax.swing.JLabel();
jPanel7 = new javax.swing.JPanel();
jPanel2 = new javax.swing.JPanel();
jButton1 = new javax.swing.JButton();
jLabel2 = new javax.swing.JLabel();
cancelButton = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("Data Export"); // NOI18N
getContentPane().setLayout(new java.awt.GridBagLayout());
jScrollPane2.setBorder(javax.swing.BorderFactory.createEmptyBorder(1, 1, 1, 1));
jScrollPane2.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
jPanel6.setBorder(javax.swing.BorderFactory.createEmptyBorder(1, 1, 1, 1));
jPanel6.setLayout(new java.awt.GridBagLayout());
jPanel1.setBorder(javax.swing.BorderFactory.createEmptyBorder(1, 1, 1, 1));
jPanel1.setLayout(new java.awt.GridBagLayout());
sourceSchemaMappingPanel.setLayout(new java.awt.GridBagLayout());
jLabel18.setText(" Read from schema "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
sourceSchemaMappingPanel.add(jLabel18, gridBagConstraints);
jLabel19.setFont(new java.awt.Font("Dialog", 0, 12)); // NOI18N
jLabel19.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 4;
gridBagConstraints.gridy = 0;
gridBagConstraints.weightx = 1.0;
sourceSchemaMappingPanel.add(jLabel19, gridBagConstraints);
jLabel20.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 200;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
sourceSchemaMappingPanel.add(jLabel20, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 78;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
jPanel1.add(sourceSchemaMappingPanel, gridBagConstraints);
schemaMappingPanel.setLayout(new java.awt.GridBagLayout());
jLabel13.setText(" Import rows from schema "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
schemaMappingPanel.add(jLabel13, gridBagConstraints);
jLabel14.setFont(new java.awt.Font("Dialog", 0, 12)); // NOI18N
jLabel14.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 4;
gridBagConstraints.gridy = 0;
gridBagConstraints.weightx = 1.0;
schemaMappingPanel.add(jLabel14, gridBagConstraints);
jLabel15.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 3;
gridBagConstraints.gridy = 200;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
schemaMappingPanel.add(jLabel15, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 80;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
jPanel1.add(schemaMappingPanel, gridBagConstraints);
where.setMaximumSize(new java.awt.Dimension(300, 2147483647));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 18;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 1, 0);
jPanel1.add(where, gridBagConstraints);
exportLabel.setText(" Into*"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 30;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(exportLabel, gridBagConstraints);
jLabel3.setText(" Generate delete-script* "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 40;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(jLabel3, gridBagConstraints);
jLabel5.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 45;
jPanel1.add(jLabel5, gridBagConstraints);
jLabel6.setText(" Threads "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 50;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(jLabel6, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 30;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 1, 0);
jPanel1.add(insert, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 40;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 1, 0);
jPanel1.add(delete, gridBagConstraints);
threads.setMinimumSize(new java.awt.Dimension(44, 19));
threads.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusLost(java.awt.event.FocusEvent evt) {
threadsFocusLost(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 50;
gridBagConstraints.ipadx = 30;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(threads, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 51;
gridBagConstraints.ipadx = 30;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(rowsPerThread, gridBagConstraints);
upsertCheckbox.setText("upsert-statements (overwrite) for all rows"); // NOI18N
upsertCheckbox.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 44;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0);
jPanel1.add(upsertCheckbox, gridBagConstraints);
explain.setText("explain"); // NOI18N
explain.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0));
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 45;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 4, 0);
jPanel1.add(explain, gridBagConstraints);
placeholder.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 0;
gridBagConstraints.weightx = 1.0;
jPanel1.add(placeholder, gridBagConstraints);
jLabel4.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 10;
gridBagConstraints.gridy = 30;
jPanel1.add(jLabel4, gridBagConstraints);
jLabel8.setText(" Working table scope"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 55;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(12, 0, 0, 0);
jPanel1.add(jLabel8, gridBagConstraints);
jLabel7.setText(" Export from"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 15;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(jLabel7, gridBagConstraints);
jLabel11.setText(" Where"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 18;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(jLabel11, gridBagConstraints);
jPanel4.setLayout(new java.awt.GridBagLayout());
subjectTable.setFont(new java.awt.Font("Dialog", 0, 12)); // NOI18N
subjectTable.setText("jLabel11"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
jPanel4.add(subjectTable, gridBagConstraints);
jLabel12.setText(" as T"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 1;
jPanel4.add(jLabel12, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 15;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
gridBagConstraints.insets = new java.awt.Insets(0, 0, 4, 0);
jPanel1.add(jPanel4, gridBagConstraints);
jLabel16.setText(" Rows per statement"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 51;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(jLabel16, gridBagConstraints);
jPanel8.setLayout(new java.awt.GridBagLayout());
buttonGroup1.add(scopeSession);
scopeSession.setText("temporary tables "); // NOI18N
scopeSession.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
scopeSessionActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 58;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel8.add(scopeSession, gridBagConstraints);
buttonGroup1.add(scopeGlobal);
scopeGlobal.setText("global tables"); // NOI18N
scopeGlobal.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
scopeGlobalActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 56;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel8.add(scopeGlobal, gridBagConstraints);
buttonGroup1.add(scopeLocal);
scopeLocal.setText("local database");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 55;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel8.add(scopeLocal, gridBagConstraints);
jLabel1.setForeground(new java.awt.Color(128, 128, 128));
jLabel1.setText(" (best for single-thread performance)");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 58;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
jPanel8.add(jLabel1, gridBagConstraints);
jLabel26.setForeground(new java.awt.Color(128, 128, 128));
jLabel26.setText(" (best for multi-thread performance)");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 56;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
jPanel8.add(jLabel26, gridBagConstraints);
jLabel27.setForeground(new java.awt.Color(128, 128, 128));
jLabel27.setText(" (no write-access needed)");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 55;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.weightx = 1.0;
jPanel8.add(jLabel27, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 55;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(12, 0, 0, 0);
jPanel1.add(jPanel8, gridBagConstraints);
jLabel9.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 58;
jPanel1.add(jLabel9, gridBagConstraints);
selectInsertFile.setText("jLabel21"); // NOI18N
selectInsertFile.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
selectInsertFileMouseClicked(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 3;
gridBagConstraints.gridy = 30;
gridBagConstraints.insets = new java.awt.Insets(0, 2, 0, 0);
jPanel1.add(selectInsertFile, gridBagConstraints);
selectDeleteFile.setText("jLabel21"); // NOI18N
selectDeleteFile.addMouseListener(new java.awt.event.MouseAdapter() {
public void mouseClicked(java.awt.event.MouseEvent evt) {
selectDeleteFileMouseClicked(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 3;
gridBagConstraints.gridy = 40;
gridBagConstraints.insets = new java.awt.Insets(0, 2, 0, 0);
jPanel1.add(selectDeleteFile, gridBagConstraints);
jLabel21.setText(" With"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 24;
gridBagConstraints.anchor = java.awt.GridBagConstraints.NORTHWEST;
gridBagConstraints.insets = new java.awt.Insets(4, 0, 0, 0);
jPanel1.add(jLabel21, gridBagConstraints);
parameterPanel.setLayout(new java.awt.GridBagLayout());
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 24;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
jPanel1.add(parameterPanel, gridBagConstraints);
commandLinePanel.setLayout(new java.awt.GridBagLayout());
jLabel22.setText(" Command line"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.gridwidth = 2;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
commandLinePanel.add(jLabel22, gridBagConstraints);
jScrollPane1.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER);
cliArea.setEditable(false);
cliArea.setColumns(20);
cliArea.setLineWrap(true);
cliArea.setRows(5);
cliArea.setWrapStyleWord(true);
cliArea.setMaximumSize(new java.awt.Dimension(300, 2147483647));
jScrollPane1.setViewportView(cliArea);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 1;
gridBagConstraints.gridheight = 3;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(0, 8, 0, 0);
commandLinePanel.add(jScrollPane1, gridBagConstraints);
jLabel23.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
commandLinePanel.add(jLabel23, gridBagConstraints);
jLabel24.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 2;
commandLinePanel.add(jLabel24, gridBagConstraints);
jLabel25.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 3;
commandLinePanel.add(jLabel25, gridBagConstraints);
copyButton.setText("Copy"); // NOI18N
copyButton.setToolTipText("Copy to Clipboard"); // NOI18N
copyButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
copyButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 1;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
gridBagConstraints.insets = new java.awt.Insets(0, 4, 0, 0);
commandLinePanel.add(copyButton, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 85;
gridBagConstraints.gridwidth = 4;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
jPanel1.add(commandLinePanel, gridBagConstraints);
placeholder1.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 1;
gridBagConstraints.weightx = 1.0;
jPanel1.add(placeholder1, gridBagConstraints);
sortedCheckBox.setText("sort topologically");
sortedCheckBox.setToolTipText("sort exported rows according to dependencies");
sortedCheckBox.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0));
sortedCheckBox.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
sortedCheckBoxActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 42;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 2, 0);
jPanel1.add(sortedCheckBox, gridBagConstraints);
unicode.setText("UTF-8 encoding"); // NOI18N
unicode.setBorder(javax.swing.BorderFactory.createEmptyBorder(0, 0, 0, 0));
unicode.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
unicodeActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 46;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
gridBagConstraints.insets = new java.awt.Insets(2, 0, 4, 0);
jPanel1.add(unicode, gridBagConstraints);
openWhereEditor.setText("jLabel28");
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 18;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
jPanel1.add(openWhereEditor, gridBagConstraints);
additSubsLabel.setText(" "); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 20;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(additSubsLabel, gridBagConstraints);
additSubsLabelTitel.setText(" Additional Subjects"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 20;
gridBagConstraints.anchor = java.awt.GridBagConstraints.WEST;
jPanel1.add(additSubsLabelTitel, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
jPanel6.add(jPanel1, gridBagConstraints);
jScrollPane2.setViewportView(jPanel6);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
getContentPane().add(jScrollPane2, gridBagConstraints);
jPanel7.setBorder(javax.swing.BorderFactory.createEmptyBorder(1, 1, 1, 1));
jPanel7.setLayout(new java.awt.GridBagLayout());
jPanel2.setLayout(new java.awt.GridBagLayout());
jButton1.setText("Export Data"); // NOI18N
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 1;
gridBagConstraints.gridy = 0;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(4, 4, 0, 6);
jPanel2.add(jButton1, gridBagConstraints);
jLabel2.setFont(new java.awt.Font("Dialog", 0, 12)); // NOI18N
jLabel2.setText(" * add '.zip' or '.gz' extension for compressed files"); // NOI18N
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 0;
gridBagConstraints.fill = java.awt.GridBagConstraints.HORIZONTAL;
jPanel2.add(jLabel2, gridBagConstraints);
cancelButton.setText(" Cancel ");
cancelButton.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
cancelButtonActionPerformed(evt);
}
});
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 2;
gridBagConstraints.gridy = 0;
gridBagConstraints.anchor = java.awt.GridBagConstraints.EAST;
gridBagConstraints.weighty = 1.0;
gridBagConstraints.insets = new java.awt.Insets(4, 0, 0, 6);
jPanel2.add(cancelButton, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 100;
gridBagConstraints.gridwidth = 4;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
gridBagConstraints.weightx = 1.0;
gridBagConstraints.weighty = 1.0;
jPanel7.add(jPanel2, gridBagConstraints);
gridBagConstraints = new java.awt.GridBagConstraints();
gridBagConstraints.gridx = 0;
gridBagConstraints.gridy = 1;
gridBagConstraints.fill = java.awt.GridBagConstraints.BOTH;
getContentPane().add(jPanel7, gridBagConstraints);
pack();
}// </editor-fold>//GEN-END:initComponents
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
for (JTextField f: schemaMappingFields.values()) {
if (f.getText().trim().length() == 0) {
f.setText(DEFAULT_SCHEMA);
}
}
for (JTextField f: sourceSchemaMappingFields.values()) {
if (f.getText().trim().length() == 0) {
f.setText(DEFAULT_SCHEMA);
}
}
theSettings.save("default");
if (insert.getText().trim().length() == 0 && delete.getText().trim().length() == 0) {
exportLabel.setForeground(Color.RED);
} else {
isOk = true;
setVisible(false);
}
}//GEN-LAST:event_jButton1ActionPerformed
private void scopeGlobalActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_scopeGlobalActionPerformed
}//GEN-LAST:event_scopeGlobalActionPerformed
private void selectInsertFileMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_selectInsertFileMouseClicked
String fn = UIUtil.choseFile(null, ".", scriptFormat.getFileChooserTitle(), scriptFormat.getFileExtension(), ExportDialog.this, true, false);
if (fn != null) {
insert.setText(fn);
}
}//GEN-LAST:event_selectInsertFileMouseClicked
private void selectDeleteFileMouseClicked(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_selectDeleteFileMouseClicked
String fn = UIUtil.choseFile(null, ".", "SQL Delete Script", ".sql", ExportDialog.this, true, true);
if (fn != null) {
delete.setText(fn);
}
}//GEN-LAST:event_selectDeleteFileMouseClicked
private void copyButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_copyButtonActionPerformed
cliArea.selectAll();
cliArea.copy();
updateCLIArea();
}//GEN-LAST:event_copyButtonActionPerformed
private void sortedCheckBoxActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_sortedCheckBoxActionPerformed
}//GEN-LAST:event_sortedCheckBoxActionPerformed
private void scopeSessionActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_scopeSessionActionPerformed
}//GEN-LAST:event_scopeSessionActionPerformed
private void unicodeActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_unicodeActionPerformed
}//GEN-LAST:event_unicodeActionPerformed
private void cancelButtonActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cancelButtonActionPerformed
dispose();
}//GEN-LAST:event_cancelButtonActionPerformed
private void threadsFocusLost(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_threadsFocusLost
String text = threads.getText().trim();
if (text.length() > 0) {
try {
int n = Integer.parseInt(text);
if (n > 100) {
threads.setText("100");
}
} catch (NumberFormatException e) {
threads.setText("");
}
}
}//GEN-LAST:event_threadsFocusLost
public boolean isOk() {
return isOk;
}
/**
* Fills field content into cli-args.
*
* @param args the argument-list to fill
*/
public void fillCLIArgs(List<String> args) {
boolean withDelete = false;
if (insert.getText().trim().length() > 0) {
args.add(0, "export");
args.add("-e");
args.add(insert.getText());
} else {
args.add(0, "delete");
}
if (delete.getText().trim().length() > 0) {
withDelete = true;
args.add("-d");
args.add(delete.getText().trim());
}
if (explain.isSelected()) {
args.add("-explain");
}
if (unicode.isSelected()) {
args.add("-UTF8");
}
if (upsertCheckbox.isSelected()) {
args.add("-upsert-only");
}
if (!sortedCheckBox.isSelected()) {
args.add("-no-sorting");
}
try {
int nt = Integer.parseInt(threads.getText().trim());
if (nt > 0) {
args.add("-threads");
args.add("" + nt);
}
} catch (Exception e) {
}
try {
int nt = Integer.parseInt(rowsPerThread.getText().trim());
if (nt > 0) {
args.add("-entities");
args.add("" + nt);
}
} catch (Exception e) {
}
if (!where.getText().equals(subjectCondition)) {
args.add("-where");
args.add(ConditionEditor.toMultiLine(where.getText()).replace('\n', ' ').replace('\r', ' '));
}
args.add("-format");
args.add(scriptFormat.toString());
if (ScriptFormat.XML.equals(scriptFormat)) {
args.add("-xml");
args.add("-xml-root");
args.add(dataModel.getXmlSettings().rootTag);
args.add("-xml-date");
args.add(dataModel.getXmlSettings().datePattern);
args.add("-xml-timestamp");
args.add(dataModel.getXmlSettings().timestampPattern);
}
StringBuilder schemaMapping = new StringBuilder();
for (String schema: schemaMappingFields.keySet()) {
String to = schemaMappingFields.get(schema).getText().trim();
if (to.equals(DEFAULT_SCHEMA)) {
to = "";
}
if (schemaMapping.length() > 0) {
schemaMapping.append(",");
}
schemaMapping.append((schema.equals(DEFAULT_SCHEMA)? "" : schema) + "=" + to);
}
if (schemaMapping.length() > 0) {
args.add("-schemamapping");
args.add(schemaMapping.toString());
}
StringBuilder parameter = new StringBuilder();
for (String p: parameterEditor.textfieldsPerParameter.keySet()) {
String v = parameterEditor.textfieldsPerParameter.get(p).getText().trim();
if (parameter.length() > 0) {
parameter.append(";");
}
parameter.append(p + "=" + CsvFile.encodeCell(v));
}
if (parameter.length() > 0) {
args.add("-parameters");
args.add(parameter.toString());
}
Set<String> relevantSchemas = getRelevantSchemas(withDelete);
StringBuilder sourceSchemaMapping = new StringBuilder();
for (String schema: sourceSchemaMappingFields.keySet()) {
String to = sourceSchemaMappingFields.get(schema).getText().trim();
if (to.equals(DEFAULT_SCHEMA)) {
to = "";
}
if (sourceSchemaMapping.length() > 0) {
sourceSchemaMapping.append(",");
}
if (!relevantSchemas.contains(schema.equals(DEFAULT_SCHEMA)? "" : schema)) {
to = "I/" + schema;
}
sourceSchemaMapping.append((schema.equals(DEFAULT_SCHEMA)? "" : schema) + "=" + to);
}
if (sourceSchemaMapping.length() > 0) {
args.add("-source-schemamapping");
args.add(sourceSchemaMapping.toString());
}
File excludeFromDeletion = new File(DataModel.getExcludeFromDeletionFile());
if (excludeFromDeletion.exists()) {
args.add("-t");
args.add(DataModel.getExcludeFromDeletionFile());
}
args.add("-scope");
args.add(getTemporaryTableScope().toString());
}
private Set<String> getRelevantSchemas(boolean withDelete) {
Set<Table> closure = subject.closure(true);
if (withDelete) {
Set<Table> border = new HashSet<Table>();
for (Table table: closure) {
for (Association a: table.associations) {
if (!a.reversalAssociation.isIgnored()) {
border.add(a.destination);
}
}
}
closure.addAll(border);
}
Set<String> relevantSchemas = new HashSet<String>();
for (Table table: closure) {
relevantSchemas.add(table.getOriginalSchema(""));
}
return relevantSchemas;
}
public TemporaryTableScope getTemporaryTableScope() {
if (scopeLocal.isSelected()) {
return TemporaryTableScope.LOCAL_DATABASE;
}
if (scopeSession.isSelected()) {
return TemporaryTableScope.SESSION_LOCAL;
}
// if (scopeTransaction.isSelected()) {
// return TemporaryTableScope.TRANSACTION_LOCAL;
// }
return TemporaryTableScope.GLOBAL;
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel additSubsLabel;
private javax.swing.JLabel additSubsLabelTitel;
private javax.swing.ButtonGroup buttonGroup1;
private javax.swing.JButton cancelButton;
private javax.swing.JTextArea cliArea;
public javax.swing.JPanel commandLinePanel;
private javax.swing.JButton copyButton;
private javax.swing.JTextField delete;
public javax.swing.JCheckBox explain;
private javax.swing.JLabel exportLabel;
private javax.swing.JTextField insert;
private javax.swing.JButton jButton1;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel11;
private javax.swing.JLabel jLabel12;
private javax.swing.JLabel jLabel13;
private javax.swing.JLabel jLabel14;
private javax.swing.JLabel jLabel15;
private javax.swing.JLabel jLabel16;
private javax.swing.JLabel jLabel18;
private javax.swing.JLabel jLabel19;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel20;
private javax.swing.JLabel jLabel21;
private javax.swing.JLabel jLabel22;
private javax.swing.JLabel jLabel23;
private javax.swing.JLabel jLabel24;
private javax.swing.JLabel jLabel25;
private javax.swing.JLabel jLabel26;
private javax.swing.JLabel jLabel27;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JLabel jLabel6;
private javax.swing.JLabel jLabel7;
private javax.swing.JLabel jLabel8;
private javax.swing.JLabel jLabel9;
private javax.swing.JPanel jPanel1;
private javax.swing.JPanel jPanel2;
private javax.swing.JPanel jPanel4;
private javax.swing.JPanel jPanel6;
private javax.swing.JPanel jPanel7;
private javax.swing.JPanel jPanel8;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JScrollPane jScrollPane2;
private javax.swing.JLabel openWhereEditor;
private javax.swing.JPanel parameterPanel;
private javax.swing.JLabel placeholder;
private javax.swing.JLabel placeholder1;
private javax.swing.JTextField rowsPerThread;
private javax.swing.JPanel schemaMappingPanel;
private javax.swing.JRadioButton scopeGlobal;
private javax.swing.JRadioButton scopeLocal;
private javax.swing.JRadioButton scopeSession;
private javax.swing.JLabel selectDeleteFile;
private javax.swing.JLabel selectInsertFile;
private javax.swing.JCheckBox sortedCheckBox;
public javax.swing.JPanel sourceSchemaMappingPanel;
private javax.swing.JLabel subjectTable;
private javax.swing.JTextField threads;
public javax.swing.JCheckBox unicode;
private javax.swing.JCheckBox upsertCheckbox;
private javax.swing.JTextField where;
// End of variables declaration//GEN-END:variables
private Icon loadIcon;
private Icon conditionEditorIcon;
private Icon conditionEditorSelectedIcon;
{
String dir = "/net/sf/jailer/resource";
// load images
try {
loadIcon = new ImageIcon(getClass().getResource(dir + "/load.png"));
} catch (Exception e) {
e.printStackTrace();
}
try {
conditionEditorIcon = new ImageIcon(getClass().getResource(dir + "/edit.png"));
} catch (Exception e) {
e.printStackTrace();
}
try {
conditionEditorSelectedIcon = new ImageIcon(getClass().getResource(dir + "/edit_s.png"));
} catch (Exception e) {
e.printStackTrace();
}
}
private static final long serialVersionUID = 952553009821662964L;
}
| pellcorp/jailer | src/main/net/sf/jailer/ui/ExportDialog.java | Java | apache-2.0 | 59,977 |
var path = require('path'),
sys = require('util'),
url = require('url'),
request,
fs = require('fs');
var less = {
version: [1, 4, 0],
Parser: require('./parser').Parser,
importer: require('./parser').importer,
tree: require('./tree'),
render: function (input, options, callback) {
options = options || {};
if (typeof(options) === 'function') {
callback = options, options = {};
}
var parser = new(less.Parser)(options),
ee;
if (callback) {
parser.parse(input, function (e, root) {
callback(e, root && root.toCSS && root.toCSS(options));
});
} else {
ee = new(require('events').EventEmitter);
process.nextTick(function () {
parser.parse(input, function (e, root) {
if (e) { ee.emit('error', e) }
else { ee.emit('success', root.toCSS(options)) }
});
});
return ee;
}
},
formatError: function(ctx, options) {
options = options || {};
var message = "";
var extract = ctx.extract;
var error = [];
var stylize = options.color ? require('./lessc_helper').stylize : function (str) { return str };
// only output a stack if it isn't a less error
if (ctx.stack && !ctx.type) { return stylize(ctx.stack, 'red') }
if (!ctx.hasOwnProperty('index') || !extract) {
return ctx.stack || ctx.message;
}
if (typeof(extract[0]) === 'string') {
error.push(stylize((ctx.line - 1) + ' ' + extract[0], 'grey'));
}
if (extract[1]) {
error.push(ctx.line + ' ' + extract[1].slice(0, ctx.column)
+ stylize(stylize(stylize(extract[1][ctx.column], 'bold')
+ extract[1].slice(ctx.column + 1), 'red'), 'inverse'));
}
if (typeof(extract[2]) === 'string') {
error.push(stylize((ctx.line + 1) + ' ' + extract[2], 'grey'));
}
error = error.join('\n') + stylize('', 'reset') + '\n';
message += stylize(ctx.type + 'Error: ' + ctx.message, 'red');
ctx.filename && (message += stylize(' in ', 'red') + ctx.filename +
stylize(':' + ctx.line + ':' + ctx.column, 'grey'));
message += '\n' + error;
if (ctx.callLine) {
message += stylize('from ', 'red') + (ctx.filename || '') + '/n';
message += stylize(ctx.callLine, 'grey') + ' ' + ctx.callExtract + '/n';
}
return message;
},
writeError: function (ctx, options) {
options = options || {};
if (options.silent) { return }
sys.error(less.formatError(ctx, options));
}
};
['color', 'directive', 'operation', 'dimension',
'keyword', 'variable', 'ruleset', 'element',
'selector', 'quoted', 'expression', 'rule',
'call', 'url', 'alpha', 'import',
'mixin', 'comment', 'anonymous', 'value',
'javascript', 'assignment', 'condition', 'paren',
'media', 'ratio', 'unicode-descriptor', 'extend'
].forEach(function (n) {
require('./tree/' + n);
});
var isUrlRe = /^(?:https?:)?\/\//i;
less.Parser.importer = function (file, paths, callback, env) {
var pathname, dirname, data;
function parseFile(e, data) {
if (e) { return callback(e); }
env = new less.tree.parseEnv(env);
var j = file.lastIndexOf('/');
// Pass on an updated rootpath if path of imported file is relative and file
// is in a (sub|sup) directory
//
// Examples:
// - If path of imported file is 'module/nav/nav.less' and rootpath is 'less/',
// then rootpath should become 'less/module/nav/'
// - If path of imported file is '../mixins.less' and rootpath is 'less/',
// then rootpath should become 'less/../'
if(env.relativeUrls && !/^(?:[a-z-]+:|\/)/.test(file) && j != -1) {
env.rootpath = env.rootpath + file.slice(0, j+1); // append (sub|sup) directory path of imported file
}
env.contents[pathname] = data; // Updating top importing parser content cache.
env.paths = [dirname].concat(paths);
env.filename = pathname;
new(less.Parser)(env).parse(data, function (e, root) {
callback(e, root, pathname);
});
};
var isUrl = isUrlRe.test( file );
if (isUrl || isUrlRe.test(paths[0])) {
if (request === undefined) {
try { request = require('request'); }
catch(e) { request = null; }
}
if (!request) {
callback({ type: 'File', message: "optional dependency 'request' required to import over http(s)\n" });
return;
}
var urlStr = isUrl ? file : url.resolve(paths[0], file),
urlObj = url.parse(urlStr),
req = {
host: urlObj.hostname,
port: urlObj.port || 80,
path: urlObj.pathname + (urlObj.search||'')
};
request.get(urlStr, function (error, res, body) {
if (res.statusCode === 404) {
callback({ type: 'File', message: "resource '" + urlStr + "' was not found\n" });
return;
}
if (!body) {
sys.error( 'Warning: Empty body (HTTP '+ res.statusCode + ') returned by "' + urlStr +'"' );
}
if (error) {
callback({ type: 'File', message: "resource '" + urlStr + "' gave this Error:\n "+ error +"\n" });
}
pathname = urlStr;
dirname = urlObj.protocol +'//'+ urlObj.host + urlObj.pathname.replace(/[^\/]*$/, '');
parseFile(null, body);
});
} else {
// TODO: Undo this at some point,
// or use different approach.
var paths = [].concat(paths);
paths.push('.');
for (var i = 0; i < paths.length; i++) {
try {
pathname = path.join(paths[i], file);
fs.statSync(pathname);
break;
} catch (e) {
pathname = null;
}
}
paths = paths.slice(0, paths.length - 1);
if (!pathname) {
if (typeof(env.errback) === "function") {
env.errback(file, paths, callback);
} else {
callback({ type: 'File', message: "'" + file + "' wasn't found.\n" });
}
return;
}
dirname = path.dirname(pathname);
if (env.syncImport) {
try {
data = fs.readFileSync(pathname, 'utf-8');
parseFile(null, data);
} catch (e) {
parseFile(e);
}
} else {
fs.readFile(pathname, 'utf-8', parseFile);
}
}
}
require('./env');
require('./functions');
require('./colors');
for (var k in less) { exports[k] = less[k] }
| ricardobeat/less-less | lib/less/index.js | JavaScript | apache-2.0 | 7,200 |
class WorkflowSerializer
include RestPack::Serializer
include FilterHasMany
include MediaLinksSerializer
attributes :id, :display_name, :tasks, :classifications_count, :subjects_count,
:created_at, :updated_at, :first_task, :primary_language,
:version, :content_language, :prioritized, :grouped, :pairwise,
:retirement, :retired_set_member_subjects_count, :href, :active,
:aggregation
can_include :project, :subject_sets, :tutorial_subject, :expert_subject_sets
can_filter_by :active
media_include :attached_images
def self.links
links = super
ess = links.delete('workflows.expert_subject_sets')
links['workflows.expert_subject_set'] = ess
links
end
def version
"#{ModelVersion.version_number(@model)}.#{ModelVersion.version_number(content)}"
end
def content_language
content.language if content
end
def tasks
if content
tasks = @model.tasks.dup
TasksVisitors::InjectStrings.new(content.strings).visit(tasks)
tasks
else
{}
end
end
def content
@content ||= @model.content_for(@context[:languages])
end
end
| edpaget/Panoptes | app/serializers/workflow_serializer.rb | Ruby | apache-2.0 | 1,164 |
var db = require('../db');
var Schema = db.Schema;
var ParticipantSchema = Schema({
id:{ type: Schema.Types.ObjectId},
fitbitid: {type: String, required: true},
dateofbirth: {type: String, required: true},
name: {type: String, required: true},
gender: {type: String},
age: {type: Number},
fitbittoken: [Schema.Types.Mixed]
});
module.exports = db.model('Participant', ParticipantSchema);
| KHP-Informatics/RADAR-platform | producers/fitbit/models/ParticipantModel.js | JavaScript | apache-2.0 | 419 |
/**
* The jdbc module provides access to java database connectivity (JDBC).
* Typically you acquire access to the module by naming it as a dependency in a call to `define()`
* when defining your own module.
* The object passed to the `define()` callback to represent the dependency can be considered
* a singleton `jdbc` class, which exposes all its functionality as static methods.
*
* @module jdbc
*/
(function(exports){
/**
* This is not a standalone class. Rather it describes the properties required to establish a JDBC connection.
* As such it appears as type for arguments to various methods that use JDBC.
*
* @Class JDBCConnectionProperties
*/
/**
* The name for the connection. If the name is present when the JDBCConnectionProperties object is passed to a method
* that creates a connection, then this name will be used to store this connection in a connection cache. The name can
* then later be used to retrieve the connection.
* If passed to functions that just need a connection to work with, then an attempt will be made to retrieve the connection.
* If the connection cannot be retrieved, and the JDBCConnectionProperties also contains properties that can be used to create
* a connection, then a connection will be created and stored under this name.
*
* @property {name} name The name that will be used to store this connection in the connection cache.
* @optional
*/
/**
*
* If present, the driver property will be used to load the JDBC driver class.
* This should take care of registering the driver with the JDBC DriverManager.
*
* @property {string} driver The fully qualified (java) class name of the JDBC driver that manages the connection.
* @optional
*/
/**
* A class to create JDBC database connections.
* Can also load JDBC drivers dynamically from a jar file.
*
* Example:
*
* (function(){
* define("src/jdbc/jdbc.js", function(jdbc){
*
* var connection = jdbc.openConnection({
* //optional: specify a name so you can refer to this connection by name later on`
* name: "My Sakila Connection",
* //The fully qualified class name of the driver. Optional if you're sure the driver was already registered.
* driver: "com.mysql.jdbc.Driver",
* //A string that can be resolved as a path identifying a jar file that contains the driver.
* //This is required to load drivers dyntamically from jar files that are not on the class path.
* jar: "/usr/share/java/mysql-connection-java-5.1.38-bin.jar",
* //The driver specific JDBC url to connect to your database.
* url: "jdbc:mysql://localhost/sakila",
* //JDBC user.
* user: "sakila",
* //JDBC password.
* password: "sakila"
* });
*
* ...use the connection...
*
* //get an existing connection
* var connection = jdbc.getConnection("My Sakila Connection");
*
* //close existing connection
* jdbc.closeConnection("My Sakila Connection");
*
* //You can also explicitly close the connection itself:
* connection.close();
*
* });
* })();
*
* @class JDBC
* @static
*/
var Class = Java.type("java.lang.Class");
var DriverManager = Java.type("java.sql.DriverManager");
var connections = {};
function loadDriverClassFromJar(jar, driver){
var File = Java.type("java.io.File");
var file = new File(jar);
var uri = file.toURI();
var url = uri.toURL();
var URLClassLoader = Java.type("java.net.URLClassLoader");
var urlClassLoader = new URLClassLoader([url], file.getClass().getClassLoader());
var driverClass = Class.forName(driver, true, urlClassLoader);
return driverClass;
}
/**
*
* Load a JDBC driver, and register it with the JDBC DriverManager.
*
* The `conf` argument can either be a string, or an object.
*
* If it is a string, it should be the fully qualified class name of the driver.
* This class will then be loaded and this should automatically register the driver with the driver manager.
* In order for this to work, the driver should already be on the system classpath.
* To dynamically load a JDBC driver from a jar that is not on the classpath, consider passing a `conf` object instead.
*
* If `conf` is an object, it should have a `driver` key, which should be the fully qualified class name of the driver.
*
* Optionally, the conf object can contain a `jar` key.
* If a `jar` key is specified, then an attempt will be made to load the driver class from the specified jarfile.
* To register the driver with the DriverManager, it is passed to an instance of the jjsutils DriverDelegate.
* The DriverDelegate is a utility class that wraps the dyncamically loaded Driver from the jar.
* Since the DriverDelegate is assumed to be in the classpath, this can be succesfully registred by the DriverManager.
*
* @method loadDriver
* @param conf {string | JDBCConnectionProperties}
* @static
*/
function loadDriver(conf){
var typeOfConf = typeof(conf);
var driver, jar, driverClass;
switch (typeOfConf) {
case "string":
driver = conf;
break;
case "object":
driver = conf.driver;
jar = conf.jar;
break;
default:
throw new Error("Configuration must either be a driverName or an object with a driver key, and optionally a jar key.");
}
if (jar) {
var driverDelegateClassName = "org.jjsutils.jdbc.DriverDelegate";
try {
var DriverDelegate = Java.type(driverDelegateClassName);
var driver = new DriverDelegate(driver, jar);
}
catch (ex){
ex.printStackTrace();
throw new Error(
"Error getting Java Type " + driverDelegateClassName + ".\n\n" +
"Maybe you forgot to pass -J-Djava.class.path=lib/jjsutils-yyyymmdd.jar to the jjs executable?"
);
}
}
else {
driverClass = Class.forName(driver);
}
return driverClass;
}
function connect(conf, driverManager){
var connection = DriverManager.getConnection(conf.url, conf.user, conf.password);
if (conf.name) {
connections[name] = connection;
}
return connection;
}
/**
* Opens a JDBC connection based on properties passed in the argument configuration object.
*
* The configuration object supports the following properties:
* * driver
* * jar
* * url
* * user
* * password
* * name
*
* The driver and jar properties maybe used by `loadDriver()` to load the driver.
* The url, user and password properties are used to obtain a connection from the JDBC DriverManager
* The name (if present) will be used to store this connection in a cache of connections.
* This way you can later refer to this connection by name using `getConnection()`.
*
* @method openConnection
* @static
* @param {JDBCConnectionProperties} conf An object that specifies data required to actually establish the JDBC connection.
* @return {java.sql.Connection} Retuns the JDBC connection.
*/
function openConnection(conf){
if (conf.driver) {
loadDriver(conf);
}
var connection = connect(conf);
return connection;
}
/**
* Obtain a connection created prior with `openConnection()`.
*
* @method getConnection
* @static
* @param {string} name The name of a connection created prior using `openConnection()`.
* @return {java.sql.Connection} The connection.
*/
function getConnection(name){
return connections[name];
}
function obtainConnection(connection) {
var type = typeof(connection);
var conn;
if (type === "string") {
if (!(conn = getConnection(connection))) {
throw new Error("No such connection: " + connection);
}
}
else
if (type === "object") {
var name = connection.name;
if (typeof(name) === "string") {
conn = getConnection(connection);
}
if (!conn) {
conn = openConnection(connection);
}
}
if (!conn) {
throw new Error("Could not open connection.");
}
return conn;
}
/**
*
* Execute a SQL-SELECT statement (a query) and obtain the results.
*
* @method query
* @static
* @param {string | object} connection Either a connection name, or an object such as one would pass to `openConnection()`.
* @param {string} sql A SQL SELECT statement.
* @return {java.sql.ResultSet} A resultset object that represents the result of the SQL query.
*/
function executeQuery(connection, sql){
var conn = obtainConnection(connection);
var res;
try {
var stmt = conn.createStatement();
res = stmt.executeQuery(sql);
}
catch (e){
throw e;
}
return res;
}
/**
*
* Execute a non-query SQL statement (a INSERT, UPDATE, DELETE or DDL statement) and obtain the results.
*
* @method execute
* @static
* @param {string | object} connection Either a connection name, or an object such as one would pass to `openConnection()`.
* @param {string} sql A SQL DDL statement, or INSERT, UPDATE, DELETE statement.
* @return {java.sql.ResultSet} A resultset object that represents the result of the SQL query.
*/
function executeUpdate(connection, sql){
var conn = obtainConnection(connection);
var res;
try {
var stmt = conn.createStatement();
res = stmt.executeUpdate(sql);
}
catch (e){
throw e;
}
return res;
}
/**
* Close a connection created prior with `openConnection()`, and remove it from the connection cache.
*
* @method closeConnection
* @static
* @param {string} name The name of a connection created prior using `openConnection()`.
*/
function closeConnection(name) {
var connection = connections[name];
if (typeof(connection) === "undefined") {
return;
}
delete connections[name];
connection.close();
}
/**
* Closes all connections stored in the connection cache.
*
* @method closeAllConnections
* @static
*/
function closeAllConnections(){
var name;
for (name in connections) {
try {
closeConnection(name);
}
catch (e) {
}
}
}
return define(function(){
return {
loadDriver: loadDriver,
openConnection: openConnection,
query: executeQuery,
execute: executeUpdate,
getConnection: getConnection,
closeConnection: closeConnection,
closeAll: closeAllConnections
};
});
})(this); | rpbouman/jjsutils | src/jdbc/jdbc.js | JavaScript | apache-2.0 | 10,572 |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.android.toolchain.AdbToolchain;
import com.facebook.buck.android.toolchain.AndroidBuildToolsLocation;
import com.facebook.buck.android.toolchain.AndroidPlatformTarget;
import com.facebook.buck.android.toolchain.AndroidSdkLocation;
import com.facebook.buck.android.toolchain.impl.AndroidPlatformTargetProducer;
import com.facebook.buck.io.file.MorePathsForTests;
import com.facebook.buck.io.filesystem.impl.FakeProjectFilesystem;
import java.util.Optional;
public class TestAndroidPlatformTargetFactory {
public static AndroidPlatformTarget create() {
return AndroidPlatformTargetProducer.getDefaultPlatformTarget(
new FakeProjectFilesystem(),
AndroidBuildToolsLocation.of(
MorePathsForTests.rootRelativePath("AndroidSDK").resolve("build-tools")),
AndroidSdkLocation.of(MorePathsForTests.rootRelativePath("AndroidSDK")),
/* aaptOverride= */ Optional.empty(),
/* aapt2Override= */ Optional.empty(),
/* zipalignOverride= */ Optional.empty(),
AdbToolchain.of(
MorePathsForTests.rootRelativePath("AndroidSDK").resolve("platform-tools/adb")));
}
}
| JoelMarcey/buck | test/com/facebook/buck/android/TestAndroidPlatformTargetFactory.java | Java | apache-2.0 | 1,811 |
/* eslint-env jest */
import eventize, {
PRIO_DEFAULT,
EVENT_CATCH_EM_ALL,
} from '../eventize';
describe('on()', () => {
describe('eventName is a string', () => {
describe('on( eventName, priority, listenerFunc, listenerObject )', () => {
const listenerObject = {};
const listenerFunc = jest.fn();
const obj = eventize({});
let context;
const unsubscribe = obj.on('foo', 7, function () { // eslint-disable-line
context = this;
}, listenerObject);
obj.on('foo', 0, listenerFunc, listenerObject);
obj.emit('foo', 'bar', 666);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('bar', 666);
});
it('emit() calls the listener with correct context', () => {
expect(context).toBe(listenerObject);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(7);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe('foo');
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(false);
});
});
describe('on( eventName, priority, listenerFuncName, listenerObject )', () => {
const listenerObject = {
foo(...args) {
this.args = args;
},
};
const obj = eventize({});
const unsubscribe = obj.on('foo', 9, 'foo', listenerObject);
obj.emit('foo', 'bar', 666);
it('emit() calls the listener', () => {
expect(listenerObject.args).toEqual(['bar', 666]);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(9);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe('foo');
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(false);
});
});
describe('on( eventName, priority, listenerFunc )', () => {
const listenerFunc = jest.fn();
const obj = eventize({});
const unsubscribe = obj.on('foo', 11, listenerFunc);
obj.emit('foo', 'plah', 669);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('plah', 669);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(11);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe('foo');
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(false);
});
});
describe('on( eventName, priority, object )', () => {
const listenerFunc = jest.fn();
let listenerContext;
const listener = {
foo(...args) {
listenerContext = this;
listenerFunc(...args);
},
};
const obj = eventize({});
const unsubscribe = obj.on('foo', 13, listener);
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(13);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe('foo');
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(false);
});
obj.emit('foo', 'plah', 667);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('plah', 667);
});
it('emit() calls the listener with correct context', () => {
expect(listener).toBe(listenerContext);
});
});
describe('on( eventName, listenerFunc, listenerObject )', () => {
const listenerObject = {};
const listenerFunc = jest.fn();
const obj = eventize({});
let context;
const unsubscribe = obj.on('foo', function () { // eslint-disable-line
context = this;
}, listenerObject);
obj.on('foo', listenerFunc, listenerObject);
obj.emit('foo', 'bar', 666);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('bar', 666);
});
it('emit() calls the listener with correct context', () => {
expect(context).toBe(listenerObject);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(PRIO_DEFAULT);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe('foo');
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(false);
});
});
describe('on( eventName, listenerFunc )', () => {
const listenerFunc = jest.fn();
const obj = eventize({});
const unsubscribe = obj.on('foo', listenerFunc);
obj.emit('foo', 'plah', 669);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('plah', 669);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(PRIO_DEFAULT);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe('foo');
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(false);
});
});
}); // eventName is a string
describe('eventName is an array', () => {
describe('on( eventNameArray, priority, listenerFunc, listenerObject )', () => {
const listenerObject = {};
const listenerFunc = jest.fn();
const obj = eventize({});
const context = [];
const { listeners } = obj.on(['foo', 'fu'], 7, function () { // eslint-disable-line
context.push(this);
}, listenerObject);
obj.on(['foo', 'fu'], 0, listenerFunc, listenerObject);
obj.emit(['foo', 'fu'], 'bar', 666);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledTimes(2);
expect(listenerFunc).toHaveBeenCalledWith('bar', 666);
});
it('emit() calls the listener with correct context', () => {
expect(context).toEqual([listenerObject, listenerObject]);
});
it('priorites are correct', () => {
expect(listeners[0].priority).toBe(7);
expect(listeners[1].priority).toBe(7);
});
it('eventNames are correct', () => {
expect(listeners[0].eventName).toBe('foo');
expect(listeners[1].eventName).toBe('fu');
});
it('isCatchEmAll is correct', () => {
expect(listeners[0].isCatchEmAll).toBe(false);
expect(listeners[1].isCatchEmAll).toBe(false);
});
});
describe('on( eventName*, priority, listenerFuncName, listenerObject )', () => {
const mockFunc = jest.fn();
const listenerObject = {
foo(...args) {
this.context = this;
this.args = args;
mockFunc(...args);
},
};
const obj = eventize({});
const { listeners } = obj.on(['foo', 'fu'], 9, 'foo', listenerObject);
obj.emit(['foo', 'fu'], 'bar', 666);
it('emit() calls the listener', () => {
expect(mockFunc).toHaveBeenCalledTimes(2);
expect(listenerObject.args).toEqual(['bar', 666]);
expect(listenerObject.context).toBe(listenerObject);
});
it('priorities are correct', () => {
expect(listeners[0].priority).toBe(9);
expect(listeners[1].priority).toBe(9);
});
it('eventNames is correct', () => {
expect(listeners[0].eventName).toBe('foo');
expect(listeners[1].eventName).toBe('fu');
});
it('isCatchEmAll is correct', () => {
expect(listeners[0].isCatchEmAll).toBe(false);
expect(listeners[1].isCatchEmAll).toBe(false);
});
});
describe('on( eventName*, priority, listenerFunc )', () => {
const listenerFunc = jest.fn();
const obj = eventize({});
const { listeners } = obj.on(['foo', 'bar'], 11, listenerFunc);
obj.emit(['foo', 'bar'], 'plah', 669);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledTimes(2);
expect(listenerFunc).toHaveBeenCalledWith('plah', 669);
});
it('priorities are correct', () => {
expect(listeners[0].priority).toBe(11);
expect(listeners[1].priority).toBe(11);
});
it('eventNames are correct', () => {
expect(listeners[0].eventName).toBe('foo');
expect(listeners[1].eventName).toBe('bar');
});
it('isCatchEmAll is correct', () => {
expect(listeners[0].isCatchEmAll).toBe(false);
expect(listeners[1].isCatchEmAll).toBe(false);
});
});
describe('on( eventName*, priority, object )', () => {
const listenerFuncFoo = jest.fn();
const listenerFuncBar = jest.fn();
const obj = eventize({});
const { listeners } = obj.on(['foo', 'bar'], 13, {
foo: listenerFuncFoo,
bar: listenerFuncBar,
});
it('priorities are correct', () => {
expect(listeners[0].priority).toBe(13);
expect(listeners[1].priority).toBe(13);
});
it('eventNames are correct', () => {
expect(listeners[0].eventName).toBe('foo');
expect(listeners[1].eventName).toBe('bar');
});
it('isCatchEmAll is correct', () => {
expect(listeners[0].isCatchEmAll).toBe(false);
expect(listeners[1].isCatchEmAll).toBe(false);
});
obj.emit(['foo', 'bar'], 'plah', 667);
it('emit() calls the :foo listener', () => {
expect(listenerFuncFoo).toHaveBeenCalledWith('plah', 667);
});
it('emit() calls the :bar listener', () => {
expect(listenerFuncBar).toHaveBeenCalledWith('plah', 667);
});
});
describe('on( eventName*, listenerFunc, listenerObject )', () => {
const listenerObject = {};
const listenerFunc = jest.fn();
const obj = eventize({});
const contexts = [];
const { listeners } = obj.on(['foo', 'bar'], function fooBar(...args) {
contexts.push(this);
listenerFunc(...args);
}, listenerObject);
obj.emit(['foo', 'bar'], 'plah', 669);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledTimes(2);
expect(listenerFunc).toHaveBeenCalledWith('plah', 669);
});
it('priorities are correct', () => {
expect(listeners[0].priority).toBe(PRIO_DEFAULT);
expect(listeners[1].priority).toBe(PRIO_DEFAULT);
});
it('eventNames are correct', () => {
expect(listeners[0].eventName).toBe('foo');
expect(listeners[1].eventName).toBe('bar');
});
it('isCatchEmAll is correct', () => {
expect(listeners[0].isCatchEmAll).toBe(false);
expect(listeners[1].isCatchEmAll).toBe(false);
});
it('emit() calls the listener with correct context', () => {
expect(contexts[0]).toBe(listenerObject);
expect(contexts[1]).toBe(listenerObject);
});
});
describe('on( eventName*, listenerFunc )', () => {
const listenerFunc = jest.fn();
const obj = eventize({});
const { listeners } = obj.on(['foo', 'bar'], listenerFunc);
obj.emit(['foo', 'bar'], 'plah', 669);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledTimes(2);
expect(listenerFunc).toHaveBeenCalledWith('plah', 669);
});
it('priorities are correct', () => {
expect(listeners[0].priority).toBe(PRIO_DEFAULT);
expect(listeners[1].priority).toBe(PRIO_DEFAULT);
});
it('eventNames are correct', () => {
expect(listeners[0].eventName).toBe('foo');
expect(listeners[1].eventName).toBe('bar');
});
it('isCatchEmAll is correct', () => {
expect(listeners[0].isCatchEmAll).toBe(false);
expect(listeners[1].isCatchEmAll).toBe(false);
});
});
describe('on( eventName*, listenerFunc ) supports [ [eventName, PRIO], .. ]', () => {
const listenerFunc = jest.fn();
const obj = eventize({});
const { listeners } = obj.on([['foo', 500], ['bar', 1000]], listenerFunc);
obj.emit(['foo', 'bar'], 'plah', 669);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledTimes(2);
expect(listenerFunc).toHaveBeenCalledWith('plah', 669);
});
it('priorities are correct', () => {
expect(listeners[0].priority).toBe(500);
expect(listeners[1].priority).toBe(1000);
});
it('eventNames are correct', () => {
expect(listeners[0].eventName).toBe('foo');
expect(listeners[1].eventName).toBe('bar');
});
it('isCatchEmAll is correct', () => {
expect(listeners[0].isCatchEmAll).toBe(false);
expect(listeners[1].isCatchEmAll).toBe(false);
});
});
}); // eventName is an array
describe('on( priority, listenerFunc, listenerObject ) => object.on( "*", priority, listenerFunc, listenerObject )', () => {
const listenerObject = {};
const listenerFunc = jest.fn();
const obj = eventize({});
let context;
const unsubscribe = obj.on(7, function () { // eslint-disable-line
context = this;
}, listenerObject);
obj.on(listenerFunc, listenerObject);
obj.emit('foo', 'bar', 666);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('bar', 666);
});
it('emit() calls the listener with correct context', () => {
expect(context).toBe(listenerObject);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(7);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe(EVENT_CATCH_EM_ALL);
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(true);
});
});
describe('on( priority, listenerFunc ) => object.on( "*", priority, listenerFunc )', () => {
const listenerFunc = jest.fn();
const obj = eventize({});
const unsubscribe = obj.on(11, listenerFunc);
obj.emit('foo', 'plah', 669);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('plah', 669);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(11);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe(EVENT_CATCH_EM_ALL);
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(true);
});
});
describe('on( listenerFunc, listenerObject ) => object.on( "*", PRIO_DEFAULT, listenerFunc, listenerObject )', () => {
const listenerObject = {};
const listenerFunc = jest.fn();
const obj = eventize({});
let context;
const unsubscribe = obj.on(function () { // eslint-disable-line
context = this;
}, listenerObject);
obj.on(listenerFunc, listenerObject);
obj.emit('foo', 'bar', 666);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('bar', 666);
});
it('emit() calls the listener with correct context', () => {
expect(context).toBe(listenerObject);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(PRIO_DEFAULT);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe(EVENT_CATCH_EM_ALL);
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(true);
});
});
describe('on( listenerFunc ) => object.on( "*", PRIO_DEFAULT, listenerFunc )', () => {
const listenerFunc = jest.fn();
const obj = eventize({});
const unsubscribe = obj.on(listenerFunc);
obj.emit('foo', 'plah', 669);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('plah', 669);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(PRIO_DEFAULT);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe(EVENT_CATCH_EM_ALL);
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(true);
});
});
describe('on( priority, object ) => object.on( "*", priority, object )', () => {
const listenerFunc = jest.fn();
const obj = eventize({});
const unsubscribe = obj.on(13, { foo: listenerFunc });
obj.emit('foo', 'plah', 667);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('plah', 667);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(13);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe(EVENT_CATCH_EM_ALL);
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(true);
});
});
describe('on( object ) => object.on( "*", PRIO_DEFAULT, object )', () => {
const listenerFunc = jest.fn();
const obj = eventize({});
const unsubscribe = obj.on({ foo: listenerFunc });
obj.emit('foo', 'plah', 667);
it('emit() calls the listener', () => {
expect(listenerFunc).toHaveBeenCalledWith('plah', 667);
});
it('priority is correct', () => {
expect(unsubscribe.listener.priority).toBe(PRIO_DEFAULT);
});
it('eventName is correct', () => {
expect(unsubscribe.listener.eventName).toBe(EVENT_CATCH_EM_ALL);
});
it('isCatchEmAll is correct', () => {
expect(unsubscribe.listener.isCatchEmAll).toBe(true);
});
});
});
| spearwolf/eventize | src/__tests__/on.spec.js | JavaScript | apache-2.0 | 17,681 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.index;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.*;
import java.util.concurrent.*;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.config.ConfigurationException;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.filter.IFilter;
import org.apache.cassandra.dht.AbstractBounds;
import org.apache.cassandra.dht.LocalToken;
import org.apache.cassandra.io.sstable.ReducingKeyIterator;
import org.apache.cassandra.io.sstable.SSTableReader;
import org.apache.cassandra.thrift.IndexClause;
import org.apache.cassandra.thrift.IndexExpression;
import org.apache.cassandra.utils.ByteBufferUtil;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Manages all the indexes associated with a given CFS
* Different types of indexes can be created across the same CF
*/
public class SecondaryIndexManager
{
private static final Logger logger = LoggerFactory.getLogger(SecondaryIndexManager.class);
/**
* Organizes the indexes by column name
*/
private final ConcurrentNavigableMap<ByteBuffer, SecondaryIndex> indexesByColumn;
/**
* Keeps a single instance of a SecondaryIndex for many columns when the index type
* has isRowLevelIndex() == true
*
* This allows updates to happen to an entire row at once
*/
private final Map<Class<? extends SecondaryIndex>,SecondaryIndex> rowLevelIndexMap;
/**
* The underlying column family containing the source data for these indexes
*/
public final ColumnFamilyStore baseCfs;
public SecondaryIndexManager(ColumnFamilyStore baseCfs)
{
indexesByColumn = new ConcurrentSkipListMap<ByteBuffer, SecondaryIndex>();
rowLevelIndexMap = new HashMap<Class<? extends SecondaryIndex>, SecondaryIndex>();
this.baseCfs = baseCfs;
}
/**
* Drops and adds new indexes associated with the underlying CF
* @throws IOException
*/
public void reload() throws IOException
{
// figure out what needs to be added and dropped.
// future: if/when we have modifiable settings for secondary indexes,
// they'll need to be handled here.
Collection<ByteBuffer> indexedColumnNames = getIndexedColumns();
for (ByteBuffer indexedColumn : indexedColumnNames)
{
ColumnDefinition def = baseCfs.metadata.getColumn_metadata().get(indexedColumn);
if (def == null || def.getIndexType() == null)
removeIndexedColumn(indexedColumn);
}
for (ColumnDefinition cdef : baseCfs.metadata.getColumn_metadata().values())
if (cdef.getIndexType() != null && !indexedColumnNames.contains(cdef.name))
addIndexedColumn(cdef);
}
/**
* Does a full, blocking rebuild of the indexes specified by columns from the sstables.
* Does nothing if columns is empty.
*
* Caller must acquire and release references to the sstables used here.
*
* @param sstables the data to build from
* @param columns the list of columns to index
* @throws IOException
*/
public void maybeBuildSecondaryIndexes(Collection<SSTableReader> sstables, SortedSet<ByteBuffer> columns) throws IOException
{
if (columns.isEmpty())
return;
logger.info(String.format("Submitting index build of %s for data in %s",
baseCfs.metadata.comparator.getString(columns), StringUtils.join(sstables, ", ")));
SecondaryIndexBuilder builder = new SecondaryIndexBuilder(baseCfs, columns, new ReducingKeyIterator(sstables));
Future<?> future = CompactionManager.instance.submitIndexBuild(builder);
try
{
future.get();
flushIndexesBlocking();
}
catch (InterruptedException e)
{
throw new AssertionError(e);
}
catch (ExecutionException e)
{
throw new RuntimeException(e);
}
logger.info("Index build of " + baseCfs.metadata.comparator.getString(columns) + " complete");
}
/**
* @return the list of indexed columns
*/
public SortedSet<ByteBuffer> getIndexedColumns()
{
return indexesByColumn.keySet();
}
/**
* Removes a existing index
* @param column the indexed column to remove
* @throws IOException
*/
public void removeIndexedColumn(ByteBuffer column) throws IOException
{
SecondaryIndex index = indexesByColumn.remove(column);
if (index == null)
return;
// Remove this column from from row level index map
if (index instanceof PerRowSecondaryIndex)
{
index.removeColumnDef(column);
//If now columns left on this CF remove from row level lookup
if (index.getColumnDefs().isEmpty())
rowLevelIndexMap.remove(index.getClass());
}
index.removeIndex(column);
SystemTable.setIndexRemoved(baseCfs.metadata.ksName, index.getNameForSystemTable(column));
}
/**
* Adds and builds a index for a column
* @param cdef the column definition holding the index data
* @return a future which the caller can optionally block on signaling the index is built
*/
public synchronized Future<?> addIndexedColumn(ColumnDefinition cdef)
{
if (indexesByColumn.containsKey(cdef.name))
return null;
assert cdef.getIndexType() != null;
logger.info("Creating new index : {}",cdef);
SecondaryIndex index;
try
{
index = SecondaryIndex.createInstance(baseCfs, cdef);
} catch (ConfigurationException e)
{
throw new RuntimeException(e);
}
// Keep a single instance of the index per-cf for row level indexes
// since we want all columns to be under the index
if (index instanceof PerRowSecondaryIndex)
{
SecondaryIndex currentIndex = rowLevelIndexMap.get(index.getClass());
if (currentIndex == null)
{
rowLevelIndexMap.put(index.getClass(), index);
index.init();
}
else
{
index = currentIndex;
index.addColumnDef(cdef);
}
}
else
{
index.init();
}
// link in indexedColumns. this means that writes will add new data to
// the index immediately,
// so we don't have to lock everything while we do the build. it's up to
// the operator to wait
// until the index is actually built before using in queries.
indexesByColumn.put(cdef.name, index);
// if we're just linking in the index to indexedColumns on an
// already-built index post-restart, we're done
if (index.isIndexBuilt(cdef.name))
return null;
return index.buildIndexAsync();
}
/**
*
* @param column the name of indexes column
* @return the index
*/
public SecondaryIndex getIndexForColumn(ByteBuffer column)
{
return indexesByColumn.get(column);
}
/**
* Remove all index MBeans
*/
public void unregisterMBeans()
{
for (Map.Entry<ByteBuffer, SecondaryIndex> entry : indexesByColumn.entrySet())
entry.getValue().invalidate();
}
/**
* Remove all underlying index data
* @throws IOException
*/
public void removeAllIndexes() throws IOException
{
for (Map.Entry<ByteBuffer, SecondaryIndex> entry : indexesByColumn.entrySet())
entry.getValue().removeIndex(entry.getKey());
}
/**
* Rename all underlying index files
* @param newCfName the new index Name
*/
public void renameIndexes(String newCfName) throws IOException
{
for (Map.Entry<ByteBuffer, SecondaryIndex> entry : indexesByColumn.entrySet())
entry.getValue().renameIndex(newCfName);
}
/**
* Flush all indexes to disk
* @throws ExecutionException
* @throws InterruptedException
*/
public void flushIndexesBlocking() throws IOException
{
for (Map.Entry<ByteBuffer, SecondaryIndex> entry : indexesByColumn.entrySet())
entry.getValue().forceBlockingFlush();
}
/**
* Returns the decoratedKey for a column value
* @param name column name
* @param value column value
* @return decorated key
*/
public DecoratedKey<LocalToken> getIndexKeyFor(ByteBuffer name, ByteBuffer value)
{
return new DecoratedKey<LocalToken>(new LocalToken(baseCfs.metadata.getColumnDefinition(name).getValidator(), value), value);
}
/**
* @return all built indexes (ready to use)
*/
public List<String> getBuiltIndexes()
{
List<String> indexList = new ArrayList<String>();
for (Map.Entry<ByteBuffer, SecondaryIndex> entry : indexesByColumn.entrySet())
{
SecondaryIndex index = entry.getValue();
if (index.isIndexBuilt(entry.getKey()))
{
indexList.add(entry.getValue().getIndexName());
}
}
return indexList;
}
/**
* @return all CFS from indexes which use a backing CFS internally (KEYS)
*/
public Collection<ColumnFamilyStore> getIndexesBackedByCfs()
{
ArrayList<ColumnFamilyStore> cfsList = new ArrayList<ColumnFamilyStore>();
for (Map.Entry<ByteBuffer, SecondaryIndex> entry : indexesByColumn.entrySet())
{
ColumnFamilyStore cfs = entry.getValue().getUnderlyingCfs();
if (cfs != null)
cfsList.add(cfs);
}
return cfsList;
}
/**
* Removes obsolete index entries and creates new ones for the given row key
* and mutated columns.
*
* For columns whos underlying index type has the isRowLevelIndex() flag set to true this function will
* call the
*
* @param rowKey the row key
* @param cf the current rows data
* @param mutatedIndexedColumns the set of columns that were changed or added
* @param oldIndexedColumns the columns what were deleted
* @throws IOException
*/
public void applyIndexUpdates(ByteBuffer rowKey,
ColumnFamily cf,
SortedSet<ByteBuffer> mutatedIndexedColumns,
ColumnFamily oldIndexedColumns) throws IOException
{
// Identify the columns with PerRowSecondaryIndexes
// we need to make sure this is only called once
Set<Class<? extends SecondaryIndex>> appliedRowLevelIndexes = null;
// remove the old index entries
if (oldIndexedColumns != null)
{
for (ByteBuffer columnName : oldIndexedColumns.getColumnNames())
{
IColumn column = oldIndexedColumns.getColumn(columnName);
if (column == null)
continue;
//this was previously deleted so should not be in index
if (column.isMarkedForDelete())
continue;
SecondaryIndex index = getIndexForColumn(columnName);
if (index == null)
{
logger.debug("Looks like index got dropped mid-update. Skipping");
continue;
}
// Update entire row if we encounter a row level index
if (index instanceof PerRowSecondaryIndex)
{
if (appliedRowLevelIndexes == null)
appliedRowLevelIndexes = new HashSet<Class<? extends SecondaryIndex>>();
else
if (appliedRowLevelIndexes.add(index.getClass()))
((PerRowSecondaryIndex)index).applyIndexUpdates(rowKey, cf, mutatedIndexedColumns, oldIndexedColumns);
}
else
{
DecoratedKey<LocalToken> valueKey = getIndexKeyFor(columnName, column.value());
((PerColumnSecondaryIndex)index).deleteColumn(valueKey, rowKey, column);
}
}
}
//insert new columns
for (ByteBuffer columnName : mutatedIndexedColumns)
{
IColumn column = cf.getColumn(columnName);
if (column == null || column.isMarkedForDelete())
continue; // null column == row deletion
SecondaryIndex index = getIndexForColumn(columnName);
if (index == null)
{
logger.debug("index on {} removed; skipping remove-old for {}", columnName, ByteBufferUtil.bytesToHex(rowKey));
continue;
}
// Update entire row if we encounter a row level index
if (index instanceof PerRowSecondaryIndex)
{
if (appliedRowLevelIndexes == null)
appliedRowLevelIndexes = new HashSet<Class<? extends SecondaryIndex>>();
else
if (appliedRowLevelIndexes.add(index.getClass()))
((PerRowSecondaryIndex)index).applyIndexUpdates(rowKey, cf, mutatedIndexedColumns, oldIndexedColumns);
}
else
{
DecoratedKey<LocalToken> valueKey = getIndexKeyFor(columnName, column.value());
((PerColumnSecondaryIndex)index).insertColumn(valueKey, rowKey, column);
}
}
}
/**
* Delete all columns from all indexes for this row
* @param key the row key
* @param indexedColumnsInRow all column names in row
*/
public void deleteFromIndexes(DecoratedKey<?> key, List<IColumn> indexedColumnsInRow) throws IOException
{
// Identify the columns with isRowLevelIndex == true
// we need to make sure this is only called once
Set<Class<? extends SecondaryIndex>> cleanedRowLevelIndexes = null;
for (IColumn column : indexedColumnsInRow)
{
SecondaryIndex index = indexesByColumn.get(column.name());
if (index == null)
continue;
//Update entire row if we encounter a row level index
if (index instanceof PerRowSecondaryIndex)
{
if (cleanedRowLevelIndexes == null)
cleanedRowLevelIndexes = new HashSet<Class<? extends SecondaryIndex>>();
else
if (cleanedRowLevelIndexes.add(index.getClass()))
((PerRowSecondaryIndex)index).deleteFromIndex(key, indexedColumnsInRow);
}
else
{
DecoratedKey<LocalToken> valueKey = getIndexKeyFor(column.name(), column.value());
((PerColumnSecondaryIndex) index).deleteColumn(valueKey, key.key, column);
}
}
}
/**
* Get a list of IndexSearchers from the union of expression index types
* @param clause the query clause
* @return the searchers to needed to query the index
*/
private List<SecondaryIndexSearcher> getIndexSearchersForQuery(IndexClause clause)
{
List<SecondaryIndexSearcher> indexSearchers = new ArrayList<SecondaryIndexSearcher>();
Map<String, Set<ByteBuffer>> groupByIndexType = new HashMap<String, Set<ByteBuffer>>();
//Group columns by type
for (IndexExpression ix : clause.expressions)
{
SecondaryIndex index = getIndexForColumn(ix.column_name);
if (index == null)
continue;
Set<ByteBuffer> columns = groupByIndexType.get(index.getClass().getCanonicalName());
if (columns == null)
{
columns = new HashSet<ByteBuffer>();
groupByIndexType.put(index.getClass().getCanonicalName(), columns);
}
columns.add(ix.column_name);
}
//create searcher per type
for (Map.Entry<String, Set<ByteBuffer>> entry : groupByIndexType.entrySet())
{
indexSearchers.add( getIndexForColumn(entry.getValue().iterator().next()).createSecondaryIndexSearcher(entry.getValue()) );
}
return indexSearchers;
}
/**
* Performs a search across a number of column indexes
* TODO: add support for querying across index types
*
* @param clause the index query clause
* @param range the row range to restrict to
* @param dataFilter the column range to restrict to
* @return found indexed rows
*/
public List<Row> search(IndexClause clause, AbstractBounds range, IFilter dataFilter)
{
List<SecondaryIndexSearcher> indexSearchers = getIndexSearchersForQuery(clause);
if (indexSearchers.isEmpty())
return Collections.emptyList();
//We currently don't support searching across multiple index types
if (indexSearchers.size() > 1)
throw new RuntimeException("Unable to search across multiple secondary index types");
return indexSearchers.get(0).search(clause, range, dataFilter);
}
}
| Mandar-Shinde/cassandra | src/java/org/apache/cassandra/db/index/SecondaryIndexManager.java | Java | apache-2.0 | 18,876 |
namespace ts {
const libFile: TestFSWithWatch.File = {
path: "/a/lib/lib.d.ts",
content: `/// <reference no-default-lib="true"/>
interface Boolean {}
interface Function {}
interface IArguments {}
interface Number { toExponential: any; }
interface Object {}
declare function fetch(input?, init?): Promise<Response>;
interface Response extends Body {
readonly headers: Headers;
readonly ok: boolean;
readonly redirected: boolean;
readonly status: number;
readonly statusText: string;
readonly trailer: Promise<Headers>;
readonly type: ResponseType;
readonly url: string;
clone(): Response;
}
interface Body {
readonly body: ReadableStream | null;
readonly bodyUsed: boolean;
arrayBuffer(): Promise<ArrayBuffer>;
blob(): Promise<Blob>;
formData(): Promise<FormData>;
json(): Promise<any>;
text(): Promise<string>;
}
declare type PromiseConstructorLike = new <T>(executor: (resolve: (value?: T | PromiseLike<T>) => void, reject: (reason?: any) => void) => void) => PromiseLike<T>;
interface PromiseLike<T> {
/**
* Attaches callbacks for the resolution and/or rejection of the Promise.
* @param onfulfilled The callback to execute when the Promise is resolved.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of which ever callback is executed.
*/
then<TResult1 = T, TResult2 = never>(onfulfilled?: ((value: T) => TResult1 | PromiseLike<TResult1>) | undefined | null, onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | undefined | null): PromiseLike<TResult1 | TResult2>;
}
interface Promise<T> {
/**
* Attaches callbacks for the resolution and/or rejection of the Promise.
* @param onfulfilled The callback to execute when the Promise is resolved.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of which ever callback is executed.
*/
then<TResult1 = T, TResult2 = never>(onfulfilled?: ((value: T) => TResult1 | PromiseLike<TResult1>) | undefined | null, onrejected?: ((reason: any) => TResult2 | PromiseLike<TResult2>) | undefined | null): Promise<TResult1 | TResult2>;
/**
* Attaches a callback for only the rejection of the Promise.
* @param onrejected The callback to execute when the Promise is rejected.
* @returns A Promise for the completion of the callback.
*/
catch<TResult = never>(onrejected?: ((reason: any) => TResult | PromiseLike<TResult>) | undefined | null): Promise<T | TResult>;
}
interface PromiseConstructor {
/**
* A reference to the prototype.
*/
readonly prototype: Promise<any>;
/**
* Creates a new Promise.
* @param executor A callback used to initialize the promise. This callback is passed two arguments:
* a resolve callback used resolve the promise with a value or the result of another promise,
* and a reject callback used to reject the promise with a provided reason or error.
*/
new <T>(executor: (resolve: (value?: T | PromiseLike<T>) => void, reject: (reason?: any) => void) => void): Promise<T>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike <T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>, T7 | PromiseLike<T7>, T8 | PromiseLike<T8>, T9 | PromiseLike<T9>, T10 | PromiseLike<T10>]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10]>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T1, T2, T3, T4, T5, T6, T7, T8, T9>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike <T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>, T7 | PromiseLike<T7>, T8 | PromiseLike<T8>, T9 | PromiseLike<T9>]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8, T9]>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T1, T2, T3, T4, T5, T6, T7, T8>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike <T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>, T7 | PromiseLike<T7>, T8 | PromiseLike<T8>]): Promise<[T1, T2, T3, T4, T5, T6, T7, T8]>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T1, T2, T3, T4, T5, T6, T7>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike <T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>, T7 | PromiseLike<T7>]): Promise<[T1, T2, T3, T4, T5, T6, T7]>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T1, T2, T3, T4, T5, T6>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike <T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>]): Promise<[T1, T2, T3, T4, T5, T6]>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T1, T2, T3, T4, T5>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike <T4>, T5 | PromiseLike<T5>]): Promise<[T1, T2, T3, T4, T5]>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T1, T2, T3, T4>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike <T4>]): Promise<[T1, T2, T3, T4]>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T1, T2, T3>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>]): Promise<[T1, T2, T3]>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T1, T2>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>]): Promise<[T1, T2]>;
/**
* Creates a Promise that is resolved with an array of results when all of the provided Promises
* resolve, or rejected when any Promise is rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
all<T>(values: (T | PromiseLike<T>)[]): Promise<T[]>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T1, T2, T3, T4, T5, T6, T7, T8, T9, T10>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike<T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>, T7 | PromiseLike<T7>, T8 | PromiseLike<T8>, T9 | PromiseLike<T9>, T10 | PromiseLike<T10>]): Promise<T1 | T2 | T3 | T4 | T5 | T6 | T7 | T8 | T9 | T10>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T1, T2, T3, T4, T5, T6, T7, T8, T9>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike<T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>, T7 | PromiseLike<T7>, T8 | PromiseLike<T8>, T9 | PromiseLike<T9>]): Promise<T1 | T2 | T3 | T4 | T5 | T6 | T7 | T8 | T9>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T1, T2, T3, T4, T5, T6, T7, T8>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike<T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>, T7 | PromiseLike<T7>, T8 | PromiseLike<T8>]): Promise<T1 | T2 | T3 | T4 | T5 | T6 | T7 | T8>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T1, T2, T3, T4, T5, T6, T7>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike<T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>, T7 | PromiseLike<T7>]): Promise<T1 | T2 | T3 | T4 | T5 | T6 | T7>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T1, T2, T3, T4, T5, T6>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike<T4>, T5 | PromiseLike<T5>, T6 | PromiseLike<T6>]): Promise<T1 | T2 | T3 | T4 | T5 | T6>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T1, T2, T3, T4, T5>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike<T4>, T5 | PromiseLike<T5>]): Promise<T1 | T2 | T3 | T4 | T5>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T1, T2, T3, T4>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>, T4 | PromiseLike<T4>]): Promise<T1 | T2 | T3 | T4>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T1, T2, T3>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>, T3 | PromiseLike<T3>]): Promise<T1 | T2 | T3>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T1, T2>(values: [T1 | PromiseLike<T1>, T2 | PromiseLike<T2>]): Promise<T1 | T2>;
/**
* Creates a Promise that is resolved or rejected when any of the provided Promises are resolved
* or rejected.
* @param values An array of Promises.
* @returns A new Promise.
*/
race<T>(values: (T | PromiseLike<T>)[]): Promise<T>;
/**
* Creates a new rejected promise for the provided reason.
* @param reason The reason the promise was rejected.
* @returns A new rejected Promise.
*/
reject<T = never>(reason?: any): Promise<T>;
/**
* Creates a new resolved promise for the provided value.
* @param value A promise.
* @returns A promise whose internal state matches the provided promise.
*/
resolve<T>(value: T | PromiseLike<T>): Promise<T>;
/**
* Creates a new resolved promise .
* @returns A resolved promise.
*/
resolve(): Promise<void>;
}
declare var Promise: PromiseConstructor;
interface RegExp {}
interface String { charAt: any; }
interface Array<T> {}`
};
const moduleFile: TestFSWithWatch.File = {
path: "/module.ts",
content:
`export function fn(res: any): any {
return res;
}`
};
type WithSkipAndOnly<T extends any[]> = ((...args: T) => void) & {
skip: (...args: T) => void;
only: (...args: T) => void;
};
function createTestWrapper<T extends any[]>(fn: (it: Mocha.PendingTestFunction, ...args: T) => void): WithSkipAndOnly<T> {
wrapped.skip = (...args: T) => fn(it.skip, ...args);
wrapped.only = (...args: T) => fn(it.only, ...args);
return wrapped;
function wrapped(...args: T) {
return fn(it, ...args);
}
}
function testConvertToAsyncFunction(it: Mocha.PendingTestFunction, caption: string, text: string, baselineFolder: string, includeLib?: boolean, includeModule?: boolean, expectFailure = false, onlyProvideAction = false) {
const t = extractTest(text);
const selectionRange = t.ranges.get("selection")!;
if (!selectionRange) {
throw new Error(`Test ${caption} does not specify selection range`);
}
const extensions = expectFailure ? [Extension.Ts] : [Extension.Ts, Extension.Js];
extensions.forEach(extension =>
it(`${caption} [${extension}]`, () => runBaseline(extension)));
function runBaseline(extension: Extension) {
const path = "/a" + extension;
const languageService = makeLanguageService({ path, content: t.source }, includeLib, includeModule);
const program = languageService.getProgram()!;
if (hasSyntacticDiagnostics(program)) {
// Don't bother generating JS baselines for inputs that aren't valid JS.
assert.equal(Extension.Js, extension, "Syntactic diagnostics found in non-JS file");
return;
}
const f = {
path,
content: t.source
};
const sourceFile = program.getSourceFile(path)!;
const context: CodeFixContext = {
errorCode: 80006,
span: { start: selectionRange.pos, length: selectionRange.end - selectionRange.pos },
sourceFile,
program,
cancellationToken: { throwIfCancellationRequested: noop, isCancellationRequested: returnFalse },
preferences: emptyOptions,
host: notImplementedHost,
formatContext: formatting.getFormatContext(testFormatSettings, notImplementedHost)
};
const diagnostics = languageService.getSuggestionDiagnostics(f.path);
const diagnostic = find(diagnostics, diagnostic => diagnostic.messageText === Diagnostics.This_may_be_converted_to_an_async_function.message &&
diagnostic.start === context.span.start && diagnostic.length === context.span.length);
if (expectFailure) {
assert.isUndefined(diagnostic);
}
else {
assert.exists(diagnostic);
}
const actions = codefix.getFixes(context);
const action = find(actions, action => action.description === Diagnostics.Convert_to_async_function.message);
if (expectFailure && !onlyProvideAction) {
assert.isNotTrue(action && action.changes.length > 0);
return;
}
assert.isTrue(action && action.changes.length > 0);
const data: string[] = [];
data.push(`// ==ORIGINAL==`);
data.push(text.replace("[#|", "/*[#|*/").replace("|]", "/*|]*/"));
const changes = action!.changes;
assert.lengthOf(changes, 1);
data.push(`// ==ASYNC FUNCTION::${action!.description}==`);
const newText = textChanges.applyChanges(sourceFile.text, changes[0].textChanges);
data.push(newText);
const diagProgram = makeLanguageService({ path, content: newText }, includeLib, includeModule).getProgram()!;
assert.isFalse(hasSyntacticDiagnostics(diagProgram));
Harness.Baseline.runBaseline(`${baselineFolder}/${caption}${extension}`, data.join(newLineCharacter));
}
function makeLanguageService(file: TestFSWithWatch.File, includeLib?: boolean, includeModule?: boolean) {
const files = [file];
if (includeLib) {
files.push(libFile); // libFile is expensive to parse repeatedly - only test when required
}
if (includeModule) {
files.push(moduleFile);
}
const host = projectSystem.createServerHost(files);
const projectService = projectSystem.createProjectService(host);
projectService.openClientFile(file.path);
return first(projectService.inferredProjects).getLanguageService();
}
function hasSyntacticDiagnostics(program: Program) {
const diags = program.getSyntacticDiagnostics();
return length(diags) > 0;
}
}
const _testConvertToAsyncFunction = createTestWrapper((it, caption: string, text: string) => {
testConvertToAsyncFunction(it, caption, text, "convertToAsyncFunction", /*includeLib*/ true);
});
const _testConvertToAsyncFunctionFailed = createTestWrapper((it, caption: string, text: string) => {
testConvertToAsyncFunction(it, caption, text, "convertToAsyncFunction", /*includeLib*/ true, /*includeModule*/ false, /*expectFailure*/ true);
});
const _testConvertToAsyncFunctionFailedSuggestion = createTestWrapper((it, caption: string, text: string) => {
testConvertToAsyncFunction(it, caption, text, "convertToAsyncFunction", /*includeLib*/ true, /*includeModule*/ false, /*expectFailure*/ true, /*onlyProvideAction*/ true);
});
const _testConvertToAsyncFunctionWithModule = createTestWrapper((it, caption: string, text: string) => {
testConvertToAsyncFunction(it, caption, text, "convertToAsyncFunction", /*includeLib*/ true, /*includeModule*/ true);
});
describe("unittests:: services:: convertToAsyncFunction", () => {
_testConvertToAsyncFunction("convertToAsyncFunction_basic", `
function [#|f|](): Promise<void>{
return fetch('https://typescriptlang.org').then(result => { console.log(result) });
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_arrayBindingPattern", `
function [#|f|](): Promise<void>{
return fetch('https://typescriptlang.org').then(([result]) => { console.log(result) });
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_objectBindingPattern", `
function [#|f|](): Promise<void>{
return fetch('https://typescriptlang.org').then(({ result }) => { console.log(result) });
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_arrayBindingPatternRename", `
function [#|f|](): Promise<void>{
const result = getResult();
return fetch('https://typescriptlang.org').then(([result]) => { console.log(result) });
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_objectBindingPatternRename", `
function [#|f|](): Promise<void>{
const result = getResult();
return fetch('https://typescriptlang.org').then(({ result }) => { console.log(result) });
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_basicNoReturnTypeAnnotation", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(result => { console.log(result) });
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_basicWithComments", `
function [#|f|](): Promise<void>{
/* Note - some of these comments are removed during the refactor. This is not ideal. */
// a
/*b*/ return /*c*/ fetch( /*d*/ 'https://typescriptlang.org' /*e*/).then( /*f*/ result /*g*/ => { /*h*/ console.log(/*i*/ result /*j*/) /*k*/}/*l*/);
// m
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_ArrowFunction", `
[#|():Promise<void> => {|]
return fetch('https://typescriptlang.org').then(result => console.log(result));
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_ArrowFunctionNoAnnotation", `
[#|() => {|]
return fetch('https://typescriptlang.org').then(result => console.log(result));
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_Catch", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(result => { console.log(result); }).catch(err => { console.log(err); });
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchAndRej", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(result => { console.log(result); }, rejection => { console.log("rejected:", rejection); }).catch(err => { console.log(err) });
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchAndRejRef", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(res, rej).catch(catch_err)
}
function res(result){
console.log(result);
}
function rej(rejection){
return rejection.ok;
}
function catch_err(err){
console.log(err);
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchRef", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(res).catch(catch_err)
}
function res(result){
console.log(result);
}
function catch_err(err){
console.log(err);
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchNoBrackets", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(result => console.log(result)).catch(err => console.log(err));
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_IgnoreArgs1", `
function [#|f|](): Promise<void> {
return fetch('https://typescriptlang.org').then( _ => { console.log("done"); });
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_IgnoreArgs2", `
function [#|f|](): Promise<void> {
return fetch('https://typescriptlang.org').then( () => console.log("done") );
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_IgnoreArgs3", `
function [#|f|](): Promise<void> {
return fetch('https://typescriptlang.org').then( () => console.log("almost done") ).then( () => console.log("done") );
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_IgnoreArgs4", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(res);
}
function res(){
console.log("done");
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Method", `
class Parser {
[#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(result => console.log(result));
}
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_MultipleCatches", `
function [#|f|](): Promise<void> {
return fetch('https://typescriptlang.org').then(res => console.log(res)).catch(err => console.log("err", err)).catch(err2 => console.log("err2", err2));
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_MultipleThens", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(res).then(res2);
}
function res(result){
return result.ok;
}
function res2(result2){
console.log(result2);
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_MultipleThensSameVarName", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(res).then(res2);
}
function res(result){
return result.ok;
}
function res2(result){
return result.bodyUsed;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_NoRes", `
function [#|f|]():Promise<void | Response> {
return fetch('https://typescriptlang.org').then(null, rejection => console.log("rejected:", rejection));
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_NoRes2", `
function [#|f|]():Promise<void | Response> {
return fetch('https://typescriptlang.org').then(undefined).catch(rej => console.log(rej));
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_NoRes3", `
function [#|f|]():Promise<void | Response> {
return fetch('https://typescriptlang.org').catch(rej => console.log(rej));
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_NoRes4", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(undefined, rejection => console.log("rejected:", rejection));
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_NoCatchHandler", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(x => x.statusText).catch(undefined);
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestion", `
function [#|f|]():Promise<Response> {
return fetch('https://typescriptlang.org');
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_PromiseDotAll", `
function [#|f|]():Promise<void>{
return Promise.all([fetch('https://typescriptlang.org'), fetch('https://microsoft.com'), fetch('https://youtube.com')]).then(function(vals){
vals.forEach(console.log);
});
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestionNoPromise", `
function [#|f|]():void{
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_Rej", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(result => { console.log(result); }, rejection => { console.log("rejected:", rejection); });
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_RejRef", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(res, rej);
}
function res(result){
console.log(result);
}
function rej(err){
console.log(err);
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_RejNoBrackets", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(result => console.log(result), rejection => console.log("rejected:", rejection));
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_ResRef", `
function [#|f|]():Promise<boolean> {
return fetch('https://typescriptlang.org').then(res);
}
function res(result){
return result.ok;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_ResRef1", `
class Foo {
public [#|method|](): Promise<boolean> {
return fetch('a').then(this.foo);
}
private foo(res) {
return res.ok;
}
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_ResRef2", `
class Foo {
public [#|method|](): Promise<Response> {
return fetch('a').then(this.foo);
}
private foo = res => res;
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_ResRef3", `
const res = (result) => {
return result.ok;
}
function [#|f|](): Promise<boolean> {
return fetch('https://typescriptlang.org').then(res);
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestionResRef1", `
const res = 1;
function [#|f|]() {
return fetch('https://typescriptlang.org').then(res);
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestionResRef2", `
class Foo {
private foo = 1;
public [#|method|](): Promise<boolean> {
return fetch('a').then(this.foo);
}
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestionResRef3", `
const res = undefined;
function [#|f|]() {
return fetch('https://typescriptlang.org').then(res);
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NoSuggestionResRef4", `
class Foo {
private foo = undefined;
public [#|method|](): Promise<boolean> {
return fetch('a').then(this.foo);
}
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_ResRefNoReturnVal", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(res);
}
function res(result){
console.log(result);
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_ResRefNoReturnVal1", `
class Foo {
public [#|method|](): Promise<void> {
return fetch('a').then(this.foo);
}
private foo(res) {
console.log(res);
}
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_NoBrackets", `
function [#|f|]():Promise<void> {
return fetch('https://typescriptlang.org').then(result => console.log(result));
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_Finally1", `
function [#|finallyTest|](): Promise<void> {
return fetch("https://typescriptlang.org").then(res => console.log(res)).catch(rej => console.log("error", rej)).finally(console.log("finally!"));
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_Finally2", `
function [#|finallyTest|](): Promise<void> {
return fetch("https://typescriptlang.org").then(res => console.log(res)).finally(console.log("finally!"));
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_Finally3", `
function [#|finallyTest|](): Promise<void> {
return fetch("https://typescriptlang.org").finally(console.log("finally!"));
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_InnerPromise", `
function [#|innerPromise|](): Promise<string> {
return fetch("https://typescriptlang.org").then(resp => {
var blob2 = resp.blob().then(blob => blob.byteOffset).catch(err => 'Error');
return blob2;
}).then(blob => {
return blob.toString();
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_InnerPromiseRet", `
function [#|innerPromise|](): Promise<string> {
return fetch("https://typescriptlang.org").then(resp => {
return resp.blob().then(blob => blob.byteOffset).catch(err => 'Error');
}).then(blob => {
return blob.toString();
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_InnerPromiseRetBinding1", `
function [#|innerPromise|](): Promise<string> {
return fetch("https://typescriptlang.org").then(resp => {
return resp.blob().then(({ blob }) => blob.byteOffset).catch(({ message }) => 'Error ' + message);
}).then(blob => {
return blob.toString();
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_InnerPromiseRetBinding2", `
function [#|innerPromise|](): Promise<string> {
return fetch("https://typescriptlang.org").then(resp => {
return resp.blob().then(blob => blob.byteOffset).catch(err => 'Error');
}).then(({ x }) => {
return x.toString();
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_InnerPromiseRetBinding3", `
function [#|innerPromise|](): Promise<string> {
return fetch("https://typescriptlang.org").then(resp => {
return resp.blob().then(({ blob }) => blob.byteOffset).catch(({ message }) => 'Error ' + message);
}).then(([x, y]) => {
return (x || y).toString();
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_InnerPromiseRetBinding4", `
function [#|innerPromise|](): Promise<string> {
return fetch("https://typescriptlang.org").then(resp => {
return resp.blob().then(({ blob }: { blob: { byteOffset: number } }) => [0, blob.byteOffset]).catch(({ message }: Error) => ['Error ', message]);
}).then(([x, y]) => {
return (x || y).toString();
});
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn01", `
function [#|f|]() {
let blob = fetch("https://typescriptlang.org").then(resp => console.log(resp));
return blob;
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn02", `
function [#|f|]() {
let blob = fetch("https://typescriptlang.org");
blob.then(resp => console.log(resp));
return blob;
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn03", `
function [#|f|]() {
let blob = fetch("https://typescriptlang.org")
let blob2 = blob.then(resp => console.log(resp));
blob2.catch(err);
return blob;
}
function err (rej) {
console.log(rej)
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn04", `
function [#|f|]() {
var blob = fetch("https://typescriptlang.org").then(res => console.log(res)), blob2 = fetch("https://microsoft.com").then(res => res.ok).catch(err);
return blob;
}
function err (rej) {
console.log(rej)
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn05", `
function [#|f|]() {
var blob = fetch("https://typescriptlang.org").then(res => console.log(res));
blob.then(x => x);
return blob;
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn06", `
function [#|f|]() {
var blob = fetch("https://typescriptlang.org");
return blob;
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn07", `
function [#|f|]() {
let blob = fetch("https://typescriptlang.org");
let blob2 = fetch("https://microsoft.com");
blob2.then(res => console.log("res:", res));
blob.then(resp => console.log(resp));
return blob;
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn08", `
function [#|f|]() {
let blob = fetch("https://typescriptlang.org");
if (!blob.ok){
return blob;
}
blob.then(resp => console.log(resp));
return blob;
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn09", `
function [#|f|]() {
let blob3;
let blob = fetch("https://typescriptlang.org");
let blob2 = fetch("https://microsoft.com");
blob2.then(res => console.log("res:", res));
blob.then(resp => console.log(resp));
blob3 = blob2.catch(rej => rej.ok);
return blob;
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn10", `
function [#|f|]() {
let blob3;
let blob = fetch("https://typescriptlang.org");
let blob2 = fetch("https://microsoft.com");
blob2.then(res => console.log("res:", res));
blob.then(resp => console.log(resp));
blob3 = fetch("test.com");
blob3 = blob2;
return blob;
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_VarReturn11", `
function [#|f|]() {
let blob;
return blob;
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_Param1", `
function [#|f|]() {
return my_print(fetch("https://typescriptlang.org").then(res => console.log(res)));
}
function my_print (resp) {
if (resp.ok) {
console.log(resp.buffer);
}
return resp;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Param2", `
function [#|f|]() {
return my_print(fetch("https://typescriptlang.org").then(res => console.log(res))).catch(err => console.log("Error!", err));
}
function my_print (resp): Promise<void> {
if (resp.ok) {
console.log(resp.buffer);
}
return resp;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_MultipleReturns1", `
function [#|f|](): Promise<void> {
let x = fetch("https://microsoft.com").then(res => console.log("Microsoft:", res));
if (x.ok) {
return fetch("https://typescriptlang.org").then(res => console.log(res));
}
return x.then(resp => {
var blob = resp.blob().then(blob => blob.byteOffset).catch(err => 'Error');
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_MultipleReturns2", `
function [#|f|](): Promise<void> {
let x = fetch("https://microsoft.com").then(res => console.log("Microsoft:", res));
if (x.ok) {
return fetch("https://typescriptlang.org").then(res => console.log(res));
}
return x.then(resp => {
var blob = resp.blob().then(blob => blob.byteOffset).catch(err => 'Error');
return fetch("https://microsoft.com").then(res => console.log("Another one!"));
});
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_SeperateLines", `
function [#|f|](): Promise<string> {
var blob = fetch("https://typescriptlang.org")
blob.then(resp => {
var blob = resp.blob().then(blob => blob.byteOffset).catch(err => 'Error');
});
blob.then(blob => {
return blob.toString();
});
return blob;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_InnerVarNameConflict", `
function [#|f|](): Promise<string> {
return fetch("https://typescriptlang.org").then(resp => {
var blob = resp.blob().then(blob => blob.byteOffset).catch(err => 'Error');
}).then(blob => {
return blob.toString();
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_InnerPromiseSimple", `
function [#|f|](): Promise<string> {
return fetch("https://typescriptlang.org").then(resp => {
return resp.blob().then(blob => blob.byteOffset);
}).then(blob => {
return blob.toString();
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_PromiseAllAndThen1", `
function [#|f|]() {
return Promise.resolve().then(function () {
return Promise.all([fetch("https://typescriptlang.org"), fetch("https://microsoft.com"), Promise.resolve().then(function () {
return fetch("https://github.com");
}).then(res => res.toString())]);
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_PromiseAllAndThen2", `
function [#|f|]() {
return Promise.resolve().then(function () {
return Promise.all([fetch("https://typescriptlang.org"), fetch("https://microsoft.com"), Promise.resolve().then(function () {
return fetch("https://github.com");
})]).then(res => res.toString());
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_PromiseAllAndThen3", `
function [#|f|]() {
return Promise.resolve().then(() =>
Promise.all([fetch("https://typescriptlang.org"), fetch("https://microsoft.com"), Promise.resolve().then(function () {
return fetch("https://github.com");
}).then(res => res.toString())]));
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_PromiseAllAndThen4", `
function [#|f|]() {
return Promise.resolve().then(() =>
Promise.all([fetch("https://typescriptlang.org"), fetch("https://microsoft.com"), Promise.resolve().then(function () {
return fetch("https://github.com");
})]).then(res => res.toString()));
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Scope1", `
function [#|f|]() {
var var1: Response, var2;
return fetch('https://typescriptlang.org').then( _ =>
Promise.resolve().then( res => {
var2 = "test";
return fetch("https://microsoft.com");
}).then(res =>
var1 === res
)
).then(res);
}
function res(response){
console.log(response);
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_Conditionals", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res => {
if (res.ok) {
return fetch("https://microsoft.com");
}
else {
if (res.buffer.length > 5) {
return res;
}
else {
return fetch("https://github.com");
}
}
});
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThen", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res).catch(rej).then(res);
}
function res(result){
return result;
}
function rej(reject){
return reject;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThenMatchingTypes01", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res).catch(rej).then(res);
}
function res(result): number {
return 5;
}
function rej(reject): number {
return 3;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThenMatchingTypes01NoAnnotations", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res).catch(rej).then(res);
}
function res(result){
return 5;
}
function rej(reject){
return 3;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThenMatchingTypes02", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res => 0).catch(rej => 1).then(res);
}
function res(result): number {
return 5;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThenMatchingTypes02NoAnnotations", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res => 0).catch(rej => 1).then(res);
}
function res(result){
return 5;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThenMismatchTypes01", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res).catch(rej).then(res);
}
function res(result){
return 5;
}
function rej(reject){
return "Error";
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThenMismatchTypes02", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res).catch(rej).then(res);
}
function res(result){
return 5;
}
function rej(reject): Response{
return reject;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThenMismatchTypes02NoAnnotations", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res).catch(rej).then(res);
}
function res(result){
return 5;
}
function rej(reject){
return reject;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThenMismatchTypes03", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res).catch(rej).then(res);
}
function res(result){
return 5;
}
function rej(reject){
return Promise.resolve(1);
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_CatchFollowedByThenMismatchTypes04", `
interface a {
name: string;
age: number;
}
interface b extends a {
color: string;
}
function [#|f|](){
return fetch("https://typescriptlang.org").then(res).catch(rej).then(res);
}
function res(result): b{
return {name: "myName", age: 22, color: "red"};
}
function rej(reject): a{
return {name: "myName", age: 27};
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_ParameterNameCollision", `
async function foo<T>(x: T): Promise<T> {
return x;
}
function [#|bar|]<T>(x: T): Promise<T> {
return foo(x).then(foo)
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Return1", `
function [#|f|](p: Promise<unknown>) {
return p.catch((error: Error) => {
return Promise.reject(error);
});
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Return2", `
function [#|f|](p: Promise<unknown>) {
return p.catch((error: Error) => Promise.reject(error));
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Return3", `
function [#|f|](p: Promise<unknown>) {
return p.catch(function (error: Error) {
return Promise.reject(error);
});
}`
);
_testConvertToAsyncFunction("convertToAsyncFunction_LocalReturn", `
function [#|f|]() {
let x = fetch("https://typescriptlang.org").then(res => console.log(res));
return x.catch(err => console.log("Error!", err));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_PromiseCallInner", `
function [#|f|]() {
return fetch(Promise.resolve(1).then(res => "https://typescriptlang.org")).catch(err => console.log(err));
}
`);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_CatchFollowedByCall", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res).catch(rej).toString();
}
function res(result){
return result;
}
function rej(reject){
return reject;
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Scope2", `
function [#|f|](){
var i:number;
return fetch("https://typescriptlang.org").then(i => i.ok).then(res => i+1).catch(err => i-1)
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Loop", `
function [#|f|](){
return fetch("https://typescriptlang.org").then(res => { for(let i=0; i<10; i++){
console.log(res);
}})
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Conditional2", `
function [#|f|](){
var res = 100;
if (res > 50) {
return fetch("https://typescriptlang.org").then(res => console.log(res));
}
else {
return fetch("https://typescriptlang.org").then(res_func);
}
}
function res_func(result){
console.log(result);
}
`
);
_testConvertToAsyncFunction("convertToAsyncFunction_Scope3", `
function [#|f|]() {
var obj;
return fetch("https://typescriptlang.org").then(function (res) {
obj = {
func: function f() {
console.log(res);
}
};
});
}
`
);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_NestedFunctionWrongLocation", `
function [#|f|]() {
function fn2(){
function fn3(){
return fetch("https://typescriptlang.org").then(res => console.log(res));
}
return fn3();
}
return fn2();
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_NestedFunctionRightLocation", `
function f() {
function fn2(){
function [#|fn3|](){
return fetch("https://typescriptlang.org").then(res => console.log(res));
}
return fn3();
}
return fn2();
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_UntypedFunction", `
function [#|f|]() {
return Promise.resolve().then(res => console.log(res));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_TernaryConditional", `
function [#|f|]() {
let i;
return Promise.resolve().then(res => res ? i = res : i = 100);
}
`);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_ResRejNoArgsArrow", `
function [#|f|]() {
return Promise.resolve().then(() => 1, () => "a");
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_simpleFunctionExpression", `
const [#|foo|] = function () {
return fetch('https://typescriptlang.org').then(result => { console.log(result) });
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_simpleFunctionExpressionWithName", `
const foo = function [#|f|]() {
return fetch('https://typescriptlang.org').then(result => { console.log(result) });
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_simpleFunctionExpressionAssignedToBindingPattern", `
const { length } = [#|function|] () {
return fetch('https://typescriptlang.org').then(result => { console.log(result) });
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_catchBlockUniqueParams", `
function [#|f|]() {
return Promise.resolve().then(x => 1).catch(x => "a").then(x => !!x);
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_catchBlockUniqueParamsBindingPattern", `
function [#|f|]() {
return Promise.resolve().then(() => ({ x: 3 })).catch(() => ({ x: "a" })).then(({ x }) => !!x);
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_bindingPattern", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(res);
}
function res({ status, trailer }){
console.log(status);
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_bindingPatternNameCollision", `
function [#|f|]() {
const result = 'https://typescriptlang.org';
return fetch(result).then(res);
}
function res({ status, trailer }){
console.log(status);
}
`);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_thenArgumentNotFunction", `
function [#|f|]() {
return Promise.resolve().then(f ? (x => x) : (y => y));
}
`);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_thenArgumentNotFunctionNotLastInChain", `
function [#|f|]() {
return Promise.resolve().then(f ? (x => x) : (y => y)).then(q => q);
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_runEffectfulContinuation", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(res).then(_ => console.log("done"));
}
function res(result) {
return Promise.resolve().then(x => console.log(result));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_callbackReturnsPromise", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(s => Promise.resolve(s.statusText.length)).then(x => console.log(x + 5));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_callbackReturnsPromiseInBlock", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(s => { return Promise.resolve(s.statusText.length) }).then(x => x + 5);
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_callbackReturnsFixablePromise", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(s => Promise.resolve(s.statusText).then(st => st.length)).then(x => console.log(x + 5));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_callbackReturnsPromiseLastInChain", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(s => Promise.resolve(s.statusText.length));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_callbackReturnsRejectedPromiseInTryBlock", `
function [#|f|]() {
return Promise.resolve(1)
.then(x => Promise.reject(x))
.catch(err => console.log(err));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_nestedPromises", `
function [#|f|]() {
return fetch('https://typescriptlang.org').then(x => Promise.resolve(3).then(y => Promise.resolve(x.statusText.length + y)));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_noArgs1", `
function delay(millis: number): Promise<void> {
throw "no"
}
function [#|main2|]() {
console.log("Please wait. Loading.");
return delay(500)
.then(() => { console.log("."); return delay(500); })
.then(() => { console.log("."); return delay(500); })
.then(() => { console.log("."); return delay(500); })
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_noArgs2", `
function delay(millis: number): Promise<void> {
throw "no"
}
function [#|main2|]() {
console.log("Please wait. Loading.");
return delay(500)
.then(() => delay(500))
.then(() => delay(500))
.then(() => delay(500))
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_exportModifier", `
export function [#|foo|]() {
return fetch('https://typescriptlang.org').then(s => console.log(s));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_OutermostOnlySuccess", `
function [#|foo|]() {
return fetch('a').then(() => {
return fetch('b').then(() => 'c');
})
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_decoratedMethod", `
function decorator() {
return (target: any, key: any, descriptor: PropertyDescriptor) => descriptor;
}
class Foo {
@decorator()
[#|method|]() {
return fetch('a').then(x => x);
}
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_decoratedMethodWithSingleLineComment", `
function decorator() {
return (target: any, key: any, descriptor: PropertyDescriptor) => descriptor;
}
class Foo {
@decorator()
// comment
[#|method|]() {
return fetch('a').then(x => x);
}
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_decoratedMethodWithMultipleLineComment", `
function decorator() {
return (target: any, key: any, descriptor: PropertyDescriptor) => descriptor;
}
class Foo {
@decorator()
/**
* comment
*/
[#|method|]() {
return fetch('a').then(x => x);
}
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_decoratedMethodWithModifier", `
function decorator() {
return (target: any, key: any, descriptor: PropertyDescriptor) => descriptor;
}
class Foo {
@decorator()
public [#|method|]() {
return fetch('a').then(x => x);
}
}
`);
_testConvertToAsyncFunctionFailedSuggestion("convertToAsyncFunction_OutermostOnlyFailure", `
function foo() {
return fetch('a').then([#|() => {|]
return fetch('b').then(() => 'c');
})
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_thenTypeArgument1", `
type APIResponse<T> = { success: true, data: T } | { success: false };
function wrapResponse<T>(response: T): APIResponse<T> {
return { success: true, data: response };
}
function [#|get|]() {
return Promise.resolve(undefined!).then<APIResponse<{ email: string }>>(wrapResponse);
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_thenTypeArgument2", `
type APIResponse<T> = { success: true, data: T } | { success: false };
function wrapResponse<T>(response: T): APIResponse<T> {
return { success: true, data: response };
}
function [#|get|]() {
return Promise.resolve(undefined!).then<APIResponse<{ email: string }>>(d => wrapResponse(d));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_thenTypeArgument3", `
type APIResponse<T> = { success: true, data: T } | { success: false };
function wrapResponse<T>(response: T): APIResponse<T> {
return { success: true, data: response };
}
function [#|get|]() {
return Promise.resolve(undefined!).then<APIResponse<{ email: string }>>(d => {
console.log(d);
return wrapResponse(d);
});
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_catchTypeArgument1", `
type APIResponse<T> = { success: true, data: T } | { success: false };
function [#|get|]() {
return Promise
.resolve<APIResponse<{ email: string }>>({ success: true, data: { email: "" } })
.catch<APIResponse<{ email: string }>>(() => ({ success: false }));
}
`);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction_threeArguments", `
function [#|f|]() {
return Promise.resolve().then(undefined, undefined, () => 1);
}`);
_testConvertToAsyncFunction("convertToAsyncFunction_callbackArgument", `
function foo(props: any): void {
return props;
}
const fn = (): Promise<(message: string) => void> =>
new Promise(resolve => resolve((message: string) => foo(message)));
function [#|f|]() {
return fn().then(res => res("test"));
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_emptyCatch1", `
function [#|f|]() {
return Promise.resolve().catch();
}
`);
_testConvertToAsyncFunction("convertToAsyncFunction_emptyCatch2", `
function [#|f|]() {
return Promise.resolve(0).then(x => x).catch();
}
`);
_testConvertToAsyncFunctionWithModule("convertToAsyncFunction_importedFunction", `
import { fn } from "./module";
function [#|f|]() {
return Promise.resolve(0).then(fn);
}
`);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction__NoSuggestionInFunctionsWithNonFixableReturnStatements1", `
function f(x: number): Promise<void>;
function f(): void;
function [#|f|](x?: number): Promise<void> | void {
if (!x) return;
return fetch('').then(() => {});
}
`);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction__NoSuggestionInFunctionsWithNonFixableReturnStatements2", `
function f(x: number): Promise<void>;
function f(): number;
function [#|f|](x?: number): Promise<void> | number {
if (x) return x;
return fetch('').then(() => {});
}
`);
_testConvertToAsyncFunctionFailed("convertToAsyncFunction__NoSuggestionInGetters", `
class Foo {
get [#|m|](): Promise<number> {
return Promise.resolve(1).then(n => n);
}
}
`);
});
}
| RyanCavanaugh/TypeScript | src/testRunner/unittests/services/convertToAsyncFunction.ts | TypeScript | apache-2.0 | 58,863 |
package com.uweather.app.util;
import android.util.Log;
public class LogUtil {
public static final int VERBOSE = 1;
public static final int DEBUG = 2;
public static final int INFO =3;
public static final int WARN = 4;
public static final int ERROR = 5;
public static final int NOTHING = 6;
public static final int LEVEL = VERBOSE;
public static void v(String tag,String msg){
if(LEVEL <= VERBOSE){
Log.v(tag, msg);
}
}
public static void d(String tag,String msg){
if(LEVEL <= DEBUG){
Log.d(tag, msg);
}
}
public static void i(String tag,String msg){
if(LEVEL <= INFO){
Log.i(tag, msg);
}
}
public static void w(String tag,String msg){
if(LEVEL <= WARN){
Log.w(tag, msg);
}
}
public static void e(String tag,String msg){
if(LEVEL <= ERROR){
Log.e(tag, msg);
}
}
}
| EmpKixs/UWeather | src/com/uweather/app/util/LogUtil.java | Java | apache-2.0 | 829 |
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
from .utils import disable_for_loaddata
from . import settings
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
@disable_for_loaddata
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
| cdelguercio/slothauth | slothauth/signals.py | Python | apache-2.0 | 409 |
package io.yosper.iot.message_router;
import org.apache.activemq.camel.component.ActiveMQComponent;
import org.apache.camel.CamelContext;
import org.apache.camel.ProducerTemplate;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.impl.DefaultCamelContext;
import io.yosper.iot.message_router.ProcessCrouton;
import io.yosper.iot.message_router.CANCOM;
import java.util.Properties;
public final class TransportCrouton {
public static void TransportCrouton() { }
private CamelContext context;
public void start(Properties prop) throws Exception
{
context = new DefaultCamelContext();
context.addComponent("activemq", ActiveMQComponent.activeMQComponent("vm://localhost?broker.persistent=false"));
final String mqtt_uri = prop.getProperty("mqtt_crouton_uri", "tcp://localhost:1883");
context.addRoutes(new RouteBuilder() {
public void configure()
{
from("activemq:topic:/ingress").
process(new ProcessCrouton()).
end();
from("activemq:topic:/egress/crouton").
to("mqtt:mqtt_egress?host="+mqtt_uri);
}
});
context.start();
}
public void stop() throws Exception
{
context.stop();
}
}
| veestr/gemini | services/message-router/src/main/java/io/yosper/iot/message_router/TransportCrouton.java | Java | apache-2.0 | 1,163 |
from common import TestID, load_common_properties, get_proxy_file_path
from eu.emi.security.authn.x509.impl import PEMCredential
from exceptions import Exception
from jarray import array
from net.grinder.script import Test
from net.grinder.script.Grinder import grinder
from org.italiangrid.dav.client import WebDAVClient, WebDAVClientFactory
import mkcol, http_put, http_get, move, delete
import random
import string
import time
import traceback
import uuid
import os
## This loads the base properties inside grinder properties
## Should be left at the top of the script execution
load_common_properties()
error = grinder.logger.error
info = grinder.logger.info
debug = grinder.logger.debug
props = grinder.properties
# Proxy authorized to write on SRM/WEBDAV endpoints
PROXY_FILE = get_proxy_file_path()
# Test specific variables
TEST_DIRECTORY = "getdav"
TEST_STORAGEAREA = props['getdav.storagearea']
TEST_ENDPOINT = props['getdav.endpoint']
TEST_NUMFILES = int(props['getdav.numfiles'])
TEST_FILESIZE = int(props['getdav.filesize'])
# Computed variables
TEST_DIRECTORY_URL = "%s/webdav/%s/%s" % (TEST_ENDPOINT, TEST_STORAGEAREA, TEST_DIRECTORY)
# HTTP Client
DAV_CLIENT = WebDAVClientFactory.newWebDAVClient(TEST_ENDPOINT,PROXY_FILE)
FILE_URLS = []
def getURLFile(filename):
return "%s/%s" % (TEST_DIRECTORY_URL, filename)
def check_http_success(statusCode, expected_code, error_msg):
if (statusCode != expected_code):
msg = "%s. Status code is %s instead of %s" % (error_msg, statusCode, expected_code)
raise Exception(msg)
def create_test_directory_if_needed():
http_get_runner=http_get.TestRunner()
statusCode = http_get_runner(TEST_DIRECTORY_URL,DAV_CLIENT)
if (statusCode != 200):
DAV_CLIENT.mkcol(TEST_DIRECTORY_URL)
def create_local_file_to_upload():
local_file_path = "/tmp/%s" % str(uuid.uuid4());
info("Creating local file %s" % local_file_path)
file = open(local_file_path, "w")
file.seek(TEST_FILESIZE*1024-1)
file.write("\0")
file.close()
size = os.stat(local_file_path).st_size
info("Local file size is %i bytes" % size)
return local_file_path
def upload_file(local_file_path, destination_URL):
http_put_runner=http_put.TestRunner()
statusCode = http_put_runner(destination_URL,local_file_path,DAV_CLIENT)
check_http_success(statusCode, 201, "Error in HTTP PUT")
return
def setup():
info("Setting up GET-WebDAV test.")
local_file_path = create_local_file_to_upload()
for i in range(0,TEST_NUMFILES):
fileURL = getURLFile(str(uuid.uuid4()));
upload_file(local_file_path, fileURL)
FILE_URLS.append(fileURL)
info("FILE_URLS contains %i elements" % len(FILE_URLS))
info("GET-WebDAV test setup completed.")
return
def get_dav(fileURL):
http_get_runner=http_get.TestRunner()
statusCode = http_get_runner(fileURL,DAV_CLIENT)
check_http_success(statusCode, 200, "Error in HTTP GET")
class TestRunner:
def __init__(self):
self.initBarrier = grinder.barrier("InitializationDone")
self.delBarrier = grinder.barrier("ExecutionDone")
if (grinder.threadNumber == 0):
create_test_directory_if_needed()
FILE_URLS = setup()
def __call__(self):
if (grinder.runNumber == 0):
self.initBarrier.await()
try:
test = Test(TestID.GET_DAV, "StoRM GET WebDAV test")
test.record(get_dav)
get_dav(random.choice(FILE_URLS))
except Exception, e:
error("Error executing get-dav: %s" % traceback.format_exc())
def __del__(self):
self.delBarrier.await()
if (grinder.threadNumber == 0):
info("Thread num. %i is the deleter" % grinder.threadNumber)
for i in range(0,TEST_NUMFILES):
info("file to remove: %s" % FILE_URLS[i])
DAV_CLIENT.delete(FILE_URLS[i]) | italiangrid/grinder-load-testsuite | storm/get-dav/getdav.py | Python | apache-2.0 | 3,998 |
/*
* Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
*/
package com.intellij.find.actions;
import com.intellij.codeInsight.TargetElementUtil;
import com.intellij.codeInsight.hint.HintManager;
import com.intellij.codeInsight.hint.HintUtil;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.find.FindManager;
import com.intellij.find.FindSettings;
import com.intellij.find.UsagesPreviewPanelProvider;
import com.intellij.find.findUsages.*;
import com.intellij.find.impl.FindManagerImpl;
import com.intellij.icons.AllIcons;
import com.intellij.ide.DataManager;
import com.intellij.ide.util.gotoByName.ModelDiff;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.application.TransactionGuard;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.FileEditorLocation;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.fileEditor.impl.text.AsyncEditorLoader;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.preview.PreviewManager;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.project.DumbAwareAction;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.ui.popup.JBPopup;
import com.intellij.openapi.ui.popup.JBPopupFactory;
import com.intellij.openapi.ui.popup.PopupChooserBuilder;
import com.intellij.openapi.ui.popup.util.PopupUtil;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.registry.Registry;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.wm.IdeFocusManager;
import com.intellij.pom.Navigatable;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiReference;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.SearchScope;
import com.intellij.ui.*;
import com.intellij.ui.awt.RelativePoint;
import com.intellij.ui.popup.AbstractPopup;
import com.intellij.ui.popup.HintUpdateSupply;
import com.intellij.ui.table.JBTable;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewBundle;
import com.intellij.usageView.UsageViewUtil;
import com.intellij.usages.*;
import com.intellij.usages.impl.*;
import com.intellij.usages.rules.UsageFilteringRuleProvider;
import com.intellij.util.Alarm;
import com.intellij.util.ArrayUtil;
import com.intellij.util.PlatformIcons;
import com.intellij.util.Processor;
import com.intellij.util.messages.MessageBusConnection;
import com.intellij.util.ui.*;
import com.intellij.xml.util.XmlStringUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableColumn;
import java.awt.*;
import java.awt.event.KeyAdapter;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.*;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
public class ShowUsagesAction extends AnAction implements PopupAction {
public static final String ID = "ShowUsages";
public static int getUsagesPageSize() {
return Math.max(1, Registry.intValue("ide.usages.page.size", 100));
}
static final Usage MORE_USAGES_SEPARATOR = NullUsage.INSTANCE;
static final Usage USAGES_OUTSIDE_SCOPE_SEPARATOR = new UsageAdapter();
private static final UsageNode MORE_USAGES_SEPARATOR_NODE = UsageViewImpl.NULL_NODE;
private static final UsageNode USAGES_OUTSIDE_SCOPE_NODE = new UsageNode(null, USAGES_OUTSIDE_SCOPE_SEPARATOR);
private static final Comparator<UsageNode> USAGE_NODE_COMPARATOR = (c1, c2) -> {
if (c1 instanceof StringNode || c2 instanceof StringNode) {
if (c1 instanceof StringNode && c2 instanceof StringNode) {
return Comparing.compare(c1.toString(), c2.toString());
}
return c1 instanceof StringNode ? 1 : -1;
}
Usage o1 = c1.getUsage();
Usage o2 = c2.getUsage();
int weight1 = o1 == USAGES_OUTSIDE_SCOPE_SEPARATOR ? 2 : o1 == MORE_USAGES_SEPARATOR ? 1 : 0;
int weight2 = o2 == USAGES_OUTSIDE_SCOPE_SEPARATOR ? 2 : o2 == MORE_USAGES_SEPARATOR ? 1 : 0;
if (weight1 != weight2) return weight1 - weight2;
if (o1 instanceof Comparable && o2 instanceof Comparable) {
//noinspection unchecked
return ((Comparable)o1).compareTo(o2);
}
VirtualFile v1 = UsageListCellRenderer.getVirtualFile(o1);
VirtualFile v2 = UsageListCellRenderer.getVirtualFile(o2);
String name1 = v1 == null ? null : v1.getName();
String name2 = v2 == null ? null : v2.getName();
int i = Comparing.compare(name1, name2);
if (i != 0) return i;
if (Comparing.equal(v1, v2)) {
FileEditorLocation loc1 = o1.getLocation();
FileEditorLocation loc2 = o2.getLocation();
return Comparing.compare(loc1, loc2);
}
else {
String path1 = v1 == null ? null : v1.getPath();
String path2 = v2 == null ? null : v2.getPath();
return Comparing.compare(path1, path2);
}
};
private final boolean myShowSettingsDialogBefore;
private Runnable mySearchEverywhereRunnable;
// used from plugin.xml
@SuppressWarnings("UnusedDeclaration")
public ShowUsagesAction() {
this(false);
}
private ShowUsagesAction(boolean showDialogBefore) {
setInjectedContext(true);
myShowSettingsDialogBefore = showDialogBefore;
}
@Override
public boolean startInTransaction() {
return true;
}
@Override
public void update(@NotNull AnActionEvent e) {
FindUsagesInFileAction.updateFindUsagesAction(e);
if (e.getPresentation().isEnabled()) {
UsageTarget[] usageTargets = e.getData(UsageView.USAGE_TARGETS_KEY);
if (usageTargets != null && !(ArrayUtil.getFirstElement(usageTargets) instanceof PsiElementUsageTarget)) {
e.getPresentation().setEnabled(false);
}
}
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
final Project project = e.getProject();
if (project == null) return;
Runnable searchEverywhere = mySearchEverywhereRunnable;
mySearchEverywhereRunnable = null;
hideHints();
if (searchEverywhere != null) {
searchEverywhere.run();
return;
}
final RelativePoint popupPosition = JBPopupFactory.getInstance().guessBestPopupLocation(e.getDataContext());
PsiDocumentManager.getInstance(project).commitAllDocuments();
FeatureUsageTracker.getInstance().triggerFeatureUsed("navigation.goto.usages");
UsageTarget[] usageTargets = e.getData(UsageView.USAGE_TARGETS_KEY);
final Editor editor = e.getData(CommonDataKeys.EDITOR);
if (usageTargets == null) {
FindUsagesAction.chooseAmbiguousTargetAndPerform(project, editor, element -> {
startFindUsages(element, popupPosition, editor, getUsagesPageSize());
return false;
});
}
else if (ArrayUtil.getFirstElement(usageTargets) instanceof PsiElementUsageTarget) {
PsiElement element = ((PsiElementUsageTarget)usageTargets[0]).getElement();
if (element != null) {
startFindUsages(element, popupPosition, editor, getUsagesPageSize());
}
}
}
private static void hideHints() {
HintManager.getInstance().hideHints(HintManager.HIDE_BY_ANY_KEY, false, false);
}
public void startFindUsages(@NotNull PsiElement element, @NotNull RelativePoint popupPosition, Editor editor, int maxUsages) {
Project project = element.getProject();
FindUsagesManager findUsagesManager = ((FindManagerImpl)FindManager.getInstance(project)).getFindUsagesManager();
FindUsagesHandler handler = findUsagesManager.getFindUsagesHandler(element, false);
if (handler == null) return;
if (myShowSettingsDialogBefore) {
showDialogAndFindUsages(handler, popupPosition, editor, maxUsages);
return;
}
showElementUsages(editor, popupPosition, handler, maxUsages, handler.getFindUsagesOptions(DataManager.getInstance().getDataContext()));
}
private void showElementUsages(final Editor editor,
@NotNull final RelativePoint popupPosition,
@NotNull final FindUsagesHandler handler,
final int maxUsages,
@NotNull final FindUsagesOptions options) {
ApplicationManager.getApplication().assertIsDispatchThread();
final UsageViewSettings usageViewSettings = UsageViewSettings.getInstance();
final ShowUsagesSettings showUsagesSettings = ShowUsagesSettings.getInstance();
final UsageViewSettings savedGlobalSettings = new UsageViewSettings();
savedGlobalSettings.loadState(usageViewSettings);
usageViewSettings.loadState(showUsagesSettings.getState());
final Project project = handler.getProject();
UsageViewManager manager = UsageViewManager.getInstance(project);
FindUsagesManager findUsagesManager = ((FindManagerImpl)FindManager.getInstance(project)).getFindUsagesManager();
final UsageViewPresentation presentation = findUsagesManager.createPresentation(handler, options);
presentation.setDetachedMode(true);
UsageViewImpl usageView = (UsageViewImpl)manager.createUsageView(UsageTarget.EMPTY_ARRAY, Usage.EMPTY_ARRAY, presentation, null);
if (editor != null) {
PsiReference reference = TargetElementUtil.findReference(editor);
if (reference != null) {
UsageInfo2UsageAdapter origin = new UsageInfo2UsageAdapter(new UsageInfo(reference));
usageView.setOriginUsage(origin);
}
}
Disposer.register(usageView, () -> {
showUsagesSettings.applyUsageViewSettings(usageViewSettings);
usageViewSettings.loadState(savedGlobalSettings);
});
final MyTable table = new MyTable();
final AsyncProcessIcon processIcon = new AsyncProcessIcon("xxx");
addUsageNodes(usageView.getRoot(), usageView, new ArrayList<>());
final List<Usage> usages = new ArrayList<>();
final Set<UsageNode> visibleNodes = new LinkedHashSet<>();
final List<UsageNode> data = collectData(usages, visibleNodes, usageView, presentation);
final AtomicInteger outOfScopeUsages = new AtomicInteger();
setTableModel(table, usageView, data, outOfScopeUsages, options.searchScope);
boolean isPreviewMode = Boolean.TRUE == PreviewManager.SERVICE.preview(handler.getProject(), UsagesPreviewPanelProvider.ID, Pair.create(usageView, table), false);
Runnable itemChosenCallback = prepareTable(table, editor, popupPosition, handler, maxUsages, options, isPreviewMode);
@Nullable final JBPopup popup = isPreviewMode ? null : createUsagePopup(usages, visibleNodes, handler, editor, popupPosition,
maxUsages, usageView, options, table, itemChosenCallback, presentation, processIcon);
if (popup != null) {
Disposer.register(popup, usageView);
// show popup only if find usages takes more than 300ms, otherwise it would flicker needlessly
Alarm alarm = new Alarm(usageView);
alarm.addRequest(() -> showPopupIfNeedTo(popup, popupPosition), 300);
}
final PingEDT pingEDT = new PingEDT("Rebuild popup in EDT", o -> popup != null && popup.isDisposed(), 100, () -> {
if (popup != null && popup.isDisposed()) return;
final List<UsageNode> nodes = new ArrayList<>();
List<Usage> copy;
synchronized (usages) {
// open up popup as soon as several usages 've been found
if (popup != null && !popup.isVisible() && (usages.size() <= 1 || !showPopupIfNeedTo(popup, popupPosition))) {
return;
}
addUsageNodes(usageView.getRoot(), usageView, nodes);
copy = new ArrayList<>(usages);
}
rebuildTable(usageView, copy, nodes, table, popup, presentation, popupPosition, !processIcon.isDisposed(), outOfScopeUsages,
options.searchScope);
});
final MessageBusConnection messageBusConnection = project.getMessageBus().connect(usageView);
messageBusConnection.subscribe(UsageFilteringRuleProvider.RULES_CHANGED, pingEDT::ping);
final UsageTarget[] myUsageTarget = {new PsiElement2UsageTargetAdapter(handler.getPsiElement())};
Processor<Usage> collect = usage -> {
if (!UsageViewManagerImpl.isInScope(usage, options.searchScope)) {
if (outOfScopeUsages.getAndIncrement() == 0) {
visibleNodes.add(USAGES_OUTSIDE_SCOPE_NODE);
usages.add(USAGES_OUTSIDE_SCOPE_SEPARATOR);
}
return true;
}
synchronized (usages) {
if (visibleNodes.size() >= maxUsages) return false;
if(UsageViewManager.isSelfUsage(usage, myUsageTarget)) return true;
UsageNode node = ReadAction.compute(() -> usageView.doAppendUsage(usage));
usages.add(usage);
if (node != null) {
visibleNodes.add(node);
boolean continueSearch = true;
if (visibleNodes.size() == maxUsages) {
visibleNodes.add(MORE_USAGES_SEPARATOR_NODE);
usages.add(MORE_USAGES_SEPARATOR);
continueSearch = false;
}
pingEDT.ping();
return continueSearch;
}
}
return true;
};
final ProgressIndicator indicator = FindUsagesManager.startProcessUsages(handler, handler.getPrimaryElements(), handler.getSecondaryElements(), collect,
options, ()-> ApplicationManager.getApplication().invokeLater(() -> {
Disposer.dispose(processIcon);
Container parent = processIcon.getParent();
if (parent != null) {
parent.remove(processIcon);
parent.repaint();
}
pingEDT.ping(); // repaint title
synchronized (usages) {
if (visibleNodes.isEmpty()) {
if (usages.isEmpty()) {
String text = UsageViewBundle.message("no.usages.found.in", searchScopePresentableName(options));
hint(editor, text, handler, popupPosition, maxUsages, options, false);
cancel(popup);
}
// else all usages filtered out
}
else if (visibleNodes.size() == 1) {
if (usages.size() == 1) {
//the only usage
Usage usage = visibleNodes.iterator().next().getUsage();
if (usage == USAGES_OUTSIDE_SCOPE_SEPARATOR) {
hint(editor, UsageViewManagerImpl.outOfScopeMessage(outOfScopeUsages.get(), options.searchScope), handler, popupPosition, maxUsages, options, true);
}
else {
String message = UsageViewBundle.message("show.usages.only.usage", searchScopePresentableName(options));
navigateAndHint(usage, message, handler, popupPosition, maxUsages, options);
}
cancel(popup);
}
else {
assert usages.size() > 1 : usages;
// usage view can filter usages down to one
Usage visibleUsage = visibleNodes.iterator().next().getUsage();
if (areAllUsagesInOneLine(visibleUsage, usages)) {
String hint = UsageViewBundle.message("all.usages.are.in.this.line", usages.size(), searchScopePresentableName(options));
navigateAndHint(visibleUsage, hint, handler, popupPosition, maxUsages, options);
cancel(popup);
}
}
}
else {
if (popup != null) {
String title = presentation.getTabText();
boolean shouldShowMoreSeparator = visibleNodes.contains(MORE_USAGES_SEPARATOR_NODE);
String fullTitle =
getFullTitle(usages, title, shouldShowMoreSeparator, visibleNodes.size() - (shouldShowMoreSeparator ? 1 : 0), false);
((AbstractPopup)popup).setCaption(fullTitle);
}
}
}
}, project.getDisposed()));
if (popup != null) {
Disposer.register(popup, indicator::cancel);
}
}
@NotNull
private static UsageNode createStringNode(@NotNull final Object string) {
return new StringNode(string);
}
private static class MyModel extends ListTableModel<UsageNode> implements ModelDiff.Model<Object> {
private MyModel(@NotNull List<UsageNode> data, int cols) {
super(cols(cols), data, 0);
}
@NotNull
private static ColumnInfo[] cols(int cols) {
ColumnInfo<UsageNode, UsageNode> o = new ColumnInfo<UsageNode, UsageNode>("") {
@Nullable
@Override
public UsageNode valueOf(UsageNode node) {
return node;
}
};
List<ColumnInfo<UsageNode, UsageNode>> list = Collections.nCopies(cols, o);
return list.toArray(new ColumnInfo[list.size()]);
}
@Override
public void addToModel(int idx, Object element) {
UsageNode node = element instanceof UsageNode ? (UsageNode)element : createStringNode(element);
if (idx < getRowCount()) {
insertRow(idx, node);
}
else {
addRow(node);
}
}
@Override
public void removeRangeFromModel(int start, int end) {
for (int i=end; i>=start; i--) {
removeRow(i);
}
}
}
private static boolean showPopupIfNeedTo(@NotNull JBPopup popup, @NotNull RelativePoint popupPosition) {
if (!popup.isDisposed() && !popup.isVisible()) {
popup.show(popupPosition);
return true;
}
else {
return false;
}
}
@NotNull
private JComponent createHintComponent(@NotNull String text,
@NotNull final FindUsagesHandler handler,
@NotNull final RelativePoint popupPosition,
final Editor editor,
@NotNull final Runnable cancelAction,
final int maxUsages,
@NotNull final FindUsagesOptions options,
boolean isWarning) {
JComponent label = HintUtil.createInformationLabel(suggestSecondInvocation(options, handler, text + " "));
if (isWarning) {
label.setBackground(MessageType.WARNING.getPopupBackground());
}
InplaceButton button = createSettingsButton(handler, popupPosition, editor, maxUsages, cancelAction);
JPanel panel = new JPanel(new BorderLayout()) {
@Override
public void addNotify() {
mySearchEverywhereRunnable = () -> searchEverywhere(options, handler, editor, popupPosition, maxUsages);
super.addNotify();
}
@Override
public void removeNotify() {
mySearchEverywhereRunnable = null;
super.removeNotify();
}
};
button.setBackground(label.getBackground());
panel.setBackground(label.getBackground());
label.setOpaque(false);
label.setBorder(null);
panel.setBorder(HintUtil.createHintBorder());
panel.add(label, BorderLayout.CENTER);
panel.add(button, BorderLayout.EAST);
return panel;
}
@NotNull
private InplaceButton createSettingsButton(@NotNull final FindUsagesHandler handler,
@NotNull final RelativePoint popupPosition,
final Editor editor,
final int maxUsages,
@NotNull final Runnable cancelAction) {
String shortcutText = "";
KeyboardShortcut shortcut = UsageViewImpl.getShowUsagesWithSettingsShortcut();
if (shortcut != null) {
shortcutText = "(" + KeymapUtil.getShortcutText(shortcut) + ")";
}
return new InplaceButton("Settings..." + shortcutText, AllIcons.General.Settings, e -> {
ApplicationManager.getApplication().invokeLater(() -> showDialogAndFindUsages(handler, popupPosition, editor, maxUsages));
cancelAction.run();
});
}
private void showDialogAndFindUsages(@NotNull FindUsagesHandler handler,
@NotNull RelativePoint popupPosition,
Editor editor,
int maxUsages) {
AbstractFindUsagesDialog dialog = handler.getFindUsagesDialog(false, false, false);
if (dialog.showAndGet()) {
dialog.calcFindUsagesOptions();
FindUsagesOptions options = handler.getFindUsagesOptions(DataManager.getInstance().getDataContext());
showElementUsages(editor, popupPosition, handler, maxUsages, options);
}
}
@NotNull
private static String searchScopePresentableName(@NotNull FindUsagesOptions options) {
return options.searchScope.getDisplayName();
}
@NotNull
private Runnable prepareTable(final MyTable table,
final Editor editor,
final RelativePoint popupPosition,
final FindUsagesHandler handler,
final int maxUsages,
@NotNull final FindUsagesOptions options,
final boolean previewMode) {
SpeedSearchBase<JTable> speedSearch = new MySpeedSearch(table);
speedSearch.setComparator(new SpeedSearchComparator(false));
table.setRowHeight(PlatformIcons.CLASS_ICON.getIconHeight()+2);
table.setShowGrid(false);
table.setShowVerticalLines(false);
table.setShowHorizontalLines(false);
table.setTableHeader(null);
table.setAutoResizeMode(JTable.AUTO_RESIZE_LAST_COLUMN);
table.setIntercellSpacing(new Dimension(0, 0));
final AtomicReference<List<Object>> selectedUsages = new AtomicReference<>();
final AtomicBoolean moreUsagesSelected = new AtomicBoolean();
final AtomicBoolean outsideScopeUsagesSelected = new AtomicBoolean();
table.getSelectionModel().addListSelectionListener(e -> {
selectedUsages.set(null);
outsideScopeUsagesSelected.set(false);
moreUsagesSelected.set(false);
List<Object> usages = null;
for (int i : table.getSelectedRows()) {
Object value = table.getValueAt(i, 0);
if (value instanceof UsageNode) {
Usage usage = ((UsageNode)value).getUsage();
if (usage == USAGES_OUTSIDE_SCOPE_SEPARATOR) {
outsideScopeUsagesSelected.set(true);
usages = null;
break;
}
else if (usage == MORE_USAGES_SEPARATOR) {
moreUsagesSelected.set(true);
usages = null;
break;
}
else {
if (usages == null) usages = new ArrayList<>();
usages.add(usage instanceof UsageInfo2UsageAdapter ? ((UsageInfo2UsageAdapter)usage).getUsageInfo().copy() : usage);
}
}
}
selectedUsages.set(usages);
});
final Runnable itemChosenCallback = () -> {
if (moreUsagesSelected.get()) {
appendMoreUsages(editor, popupPosition, handler, maxUsages, options);
return;
}
if (outsideScopeUsagesSelected.get()) {
options.searchScope = GlobalSearchScope.projectScope(handler.getProject());
showElementUsages(editor, popupPosition, handler, maxUsages, options);
return;
}
List<Object> usages = selectedUsages.get();
if (usages != null) {
for (Object usage : usages) {
if (usage instanceof UsageInfo) {
UsageViewUtil.navigateTo((UsageInfo)usage, true);
}
else if (usage instanceof Navigatable) {
((Navigatable)usage).navigate(true);
}
}
}
};
if (previewMode) {
table.addMouseListener(new MouseAdapter() {
@Override
public void mouseReleased(MouseEvent e) {
if (UIUtil.isActionClick(e, MouseEvent.MOUSE_RELEASED) && !UIUtil.isSelectionButtonDown(e) && !e.isConsumed()) {
itemChosenCallback.run();
}
}
});
table.addKeyListener(new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
if (e.getKeyCode() == KeyEvent.VK_ENTER) {
itemChosenCallback.run();
}
}
});
}
return itemChosenCallback;
}
@NotNull
private JBPopup createUsagePopup(@NotNull final List<Usage> usages,
@NotNull Set<UsageNode> visibleNodes,
@NotNull final FindUsagesHandler handler,
final Editor editor,
@NotNull final RelativePoint popupPosition,
final int maxUsages,
@NotNull final UsageViewImpl usageView,
@NotNull final FindUsagesOptions options,
@NotNull final JTable table,
@NotNull final Runnable itemChoseCallback,
@NotNull final UsageViewPresentation presentation,
@NotNull final AsyncProcessIcon processIcon) {
ApplicationManager.getApplication().assertIsDispatchThread();
PopupChooserBuilder builder = new PopupChooserBuilder(table);
final String title = presentation.getTabText();
if (title != null) {
String result = getFullTitle(usages, title, false, visibleNodes.size() - 1, true);
builder.setTitle(result);
builder.setAdText(getSecondInvocationTitle(options, handler));
}
builder.setMovable(true).setResizable(true);
builder.setMovable(true).setResizable(true);
builder.setItemChoosenCallback(itemChoseCallback);
final JBPopup[] popup = new JBPopup[1];
KeyboardShortcut shortcut = UsageViewImpl.getShowUsagesWithSettingsShortcut();
if (shortcut != null) {
new DumbAwareAction() {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
cancel(popup[0]);
showDialogAndFindUsages(handler, popupPosition, editor, maxUsages);
}
@Override
public boolean startInTransaction() {
return true;
}
}.registerCustomShortcutSet(new CustomShortcutSet(shortcut.getFirstKeyStroke()), table);
}
shortcut = getShowUsagesShortcut();
if (shortcut != null) {
new DumbAwareAction() {
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
cancel(popup[0]);
searchEverywhere(options, handler, editor, popupPosition, maxUsages);
}
@Override
public boolean startInTransaction() {
return true;
}
}.registerCustomShortcutSet(new CustomShortcutSet(shortcut.getFirstKeyStroke()), table);
}
InplaceButton settingsButton = createSettingsButton(handler, popupPosition, editor, maxUsages, () -> cancel(popup[0]));
ActiveComponent spinningProgress = new ActiveComponent.Adapter() {
@Override
public JComponent getComponent() {
return processIcon;
}
};
final DefaultActionGroup pinGroup = new DefaultActionGroup();
final ActiveComponent pin = createPinButton(handler, usageView, options, popup, pinGroup);
builder.setCommandButton(new CompositeActiveComponent(spinningProgress, settingsButton, pin));
DefaultActionGroup toolbar = new DefaultActionGroup();
usageView.addFilteringActions(toolbar);
toolbar.add(UsageGroupingRuleProviderImpl.createGroupByFileStructureAction(usageView));
ActionToolbar actionToolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.USAGE_VIEW_TOOLBAR, toolbar, true);
actionToolbar.setReservePlaceAutoPopupIcon(false);
final JComponent toolBar = actionToolbar.getComponent();
toolBar.setOpaque(false);
builder.setSettingButton(toolBar);
builder.setCancelKeyEnabled(false);
popup[0] = builder.createPopup();
JComponent content = popup[0].getContent();
myWidth = (int)(toolBar.getPreferredSize().getWidth()
+ new JLabel(getFullTitle(usages, title, false, visibleNodes.size() - 1, true)).getPreferredSize().getWidth()
+ settingsButton.getPreferredSize().getWidth());
myWidth = -1;
for (AnAction action : toolbar.getChildren(null)) {
action.unregisterCustomShortcutSet(usageView.getComponent());
action.registerCustomShortcutSet(action.getShortcutSet(), content);
}
for (AnAction action : pinGroup.getChildren(null)) {
action.unregisterCustomShortcutSet(usageView.getComponent());
action.registerCustomShortcutSet(action.getShortcutSet(), content);
}
return popup[0];
}
private ActiveComponent createPinButton(@NotNull final FindUsagesHandler handler,
@NotNull final UsageViewImpl usageView,
@NotNull final FindUsagesOptions options,
@NotNull final JBPopup[] popup,
@NotNull DefaultActionGroup pinGroup) {
final AnAction pinAction =
new AnAction("Open Find Usages Toolwindow", "Show all usages in a separate toolwindow", AllIcons.General.AutohideOff) {
{
AnAction action = ActionManager.getInstance().getAction(IdeActions.ACTION_FIND_USAGES);
setShortcutSet(action.getShortcutSet());
}
@Override
public boolean startInTransaction() {
return true;
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
hideHints();
cancel(popup[0]);
FindUsagesManager findUsagesManager = ((FindManagerImpl)FindManager.getInstance(usageView.getProject())).getFindUsagesManager();
findUsagesManager.findUsages(handler.getPrimaryElements(), handler.getSecondaryElements(), handler, options,
FindSettings.getInstance().isSkipResultsWithOneUsage());
}
};
pinGroup.add(pinAction);
final ActionToolbar pinToolbar = ActionManager.getInstance().createActionToolbar(ActionPlaces.USAGE_VIEW_TOOLBAR, pinGroup, true);
pinToolbar.setReservePlaceAutoPopupIcon(false);
final JComponent pinToolBar = pinToolbar.getComponent();
pinToolBar.setBorder(null);
pinToolBar.setOpaque(false);
return new ActiveComponent.Adapter() {
@Override
public JComponent getComponent() {
return pinToolBar;
}
};
}
private static void cancel(@Nullable JBPopup popup) {
if (popup != null) {
popup.cancel();
}
}
@NotNull
private static String getFullTitle(@NotNull List<Usage> usages,
@NotNull String title,
boolean hadMoreSeparator,
int visibleNodesCount,
boolean findUsagesInProgress) {
String s;
String soFarSuffix = findUsagesInProgress ? " so far" : "";
if (hadMoreSeparator) {
s = "<b>Some</b> " + title + " " + "<b>(Only " + visibleNodesCount + " usages shown" + soFarSuffix + ")</b>";
}
else {
s = title + " (" + UsageViewBundle.message("usages.n", usages.size()) + soFarSuffix + ")";
}
return "<html><nobr>" + s + "</nobr></html>";
}
@NotNull
private static String suggestSecondInvocation(@NotNull FindUsagesOptions options, @NotNull FindUsagesHandler handler, @NotNull String text) {
final String title = getSecondInvocationTitle(options, handler);
if (title != null) {
text += "<br><small> " + title + "</small>";
}
return XmlStringUtil.wrapInHtml(UIUtil.convertSpace2Nbsp(text));
}
@Nullable
private static String getSecondInvocationTitle(@NotNull FindUsagesOptions options, @NotNull FindUsagesHandler handler) {
if (getShowUsagesShortcut() != null) {
GlobalSearchScope maximalScope = FindUsagesManager.getMaximalScope(handler);
if (!options.searchScope.equals(maximalScope)) {
return "Press " + KeymapUtil.getShortcutText(getShowUsagesShortcut()) + " again to search in " + maximalScope.getDisplayName();
}
}
return null;
}
private void searchEverywhere(@NotNull FindUsagesOptions options,
@NotNull FindUsagesHandler handler,
Editor editor,
@NotNull RelativePoint popupPosition,
int maxUsages) {
FindUsagesOptions cloned = options.clone();
cloned.searchScope = FindUsagesManager.getMaximalScope(handler);
showElementUsages(editor, popupPosition, handler, maxUsages, cloned);
}
@Nullable
private static KeyboardShortcut getShowUsagesShortcut() {
return ActionManager.getInstance().getKeyboardShortcut(ID);
}
private static int filtered(@NotNull List<Usage> usages, @NotNull UsageViewImpl usageView) {
return (int)usages.stream().filter(usage -> !usageView.isVisible(usage)).count();
}
private static int getUsageOffset(@NotNull Usage usage) {
if (!(usage instanceof UsageInfo2UsageAdapter)) return -1;
PsiElement element = ((UsageInfo2UsageAdapter)usage).getElement();
if (element == null) return -1;
return element.getTextRange().getStartOffset();
}
private static boolean areAllUsagesInOneLine(@NotNull Usage visibleUsage, @NotNull List<Usage> usages) {
Editor editor = getEditorFor(visibleUsage);
if (editor == null) return false;
int offset = getUsageOffset(visibleUsage);
if (offset == -1) return false;
int lineNumber = editor.getDocument().getLineNumber(offset);
for (Usage other : usages) {
Editor otherEditor = getEditorFor(other);
if (otherEditor != editor) return false;
int otherOffset = getUsageOffset(other);
if (otherOffset == -1) return false;
int otherLine = otherEditor.getDocument().getLineNumber(otherOffset);
if (otherLine != lineNumber) return false;
}
return true;
}
@NotNull
private static MyModel setTableModel(@NotNull JTable table,
@NotNull UsageViewImpl usageView,
@NotNull final List<UsageNode> data,
@NotNull AtomicInteger outOfScopeUsages,
@NotNull SearchScope searchScope) {
ApplicationManager.getApplication().assertIsDispatchThread();
final int columnCount = calcColumnCount(data);
MyModel model = table.getModel() instanceof MyModel ? (MyModel)table.getModel() : null;
if (model == null || model.getColumnCount() != columnCount) {
model = new MyModel(data, columnCount);
table.setModel(model);
ShowUsagesTableCellRenderer renderer = new ShowUsagesTableCellRenderer(usageView, outOfScopeUsages, searchScope);
for (int i=0;i<table.getColumnModel().getColumnCount();i++) {
TableColumn column = table.getColumnModel().getColumn(i);
column.setPreferredWidth(0);
column.setCellRenderer(renderer);
}
}
return model;
}
private static int calcColumnCount(@NotNull List<UsageNode> data) {
return data.isEmpty() || data.get(0) instanceof StringNode ? 1 : 3;
}
@NotNull
private static List<UsageNode> collectData(@NotNull List<Usage> usages,
@NotNull Collection<UsageNode> visibleNodes,
@NotNull UsageViewImpl usageView,
@NotNull UsageViewPresentation presentation) {
@NotNull List<UsageNode> data = new ArrayList<>();
int filtered = filtered(usages, usageView);
if (filtered != 0) {
data.add(createStringNode(UsageViewBundle.message("usages.were.filtered.out", filtered)));
}
data.addAll(visibleNodes);
if (data.isEmpty()) {
String progressText = StringUtil.escapeXml(UsageViewManagerImpl.getProgressTitle(presentation));
data.add(createStringNode(progressText));
}
Collections.sort(data, USAGE_NODE_COMPARATOR);
return data;
}
private static int calcMaxWidth(JTable table) {
int colsNum = table.getColumnModel().getColumnCount();
int totalWidth = 0;
for (int col = 0; col < colsNum -1; col++) {
TableColumn column = table.getColumnModel().getColumn(col);
int preferred = column.getPreferredWidth();
int width = Math.max(preferred, columnMaxWidth(table, col));
totalWidth += width;
column.setMinWidth(width);
column.setMaxWidth(width);
column.setWidth(width);
column.setPreferredWidth(width);
}
totalWidth += columnMaxWidth(table, colsNum - 1);
return totalWidth;
}
private static int columnMaxWidth(@NotNull JTable table, int col) {
TableColumn column = table.getColumnModel().getColumn(col);
int width = 0;
for (int row = 0; row < table.getRowCount(); row++) {
Component component = table.prepareRenderer(column.getCellRenderer(), row, col);
int rendererWidth = component.getPreferredSize().width;
width = Math.max(width, rendererWidth + table.getIntercellSpacing().width);
}
return width;
}
private int myWidth;
private void rebuildTable(@NotNull final UsageViewImpl usageView,
@NotNull final List<Usage> usages,
@NotNull List<UsageNode> nodes,
@NotNull final JTable table,
@Nullable final JBPopup popup,
@NotNull final UsageViewPresentation presentation,
@NotNull final RelativePoint popupPosition,
boolean findUsagesInProgress,
@NotNull AtomicInteger outOfScopeUsages,
@NotNull SearchScope searchScope) {
ApplicationManager.getApplication().assertIsDispatchThread();
boolean shouldShowMoreSeparator = usages.contains(MORE_USAGES_SEPARATOR);
if (shouldShowMoreSeparator) {
nodes.add(MORE_USAGES_SEPARATOR_NODE);
}
boolean hasOutsideScopeUsages = usages.contains(USAGES_OUTSIDE_SCOPE_SEPARATOR);
if (hasOutsideScopeUsages && !shouldShowMoreSeparator) {
nodes.add(USAGES_OUTSIDE_SCOPE_NODE);
}
String title = presentation.getTabText();
String fullTitle = getFullTitle(usages, title, shouldShowMoreSeparator || hasOutsideScopeUsages, nodes.size() - (shouldShowMoreSeparator || hasOutsideScopeUsages ? 1 : 0), findUsagesInProgress);
if (popup != null) {
((AbstractPopup)popup).setCaption(fullTitle);
}
List<UsageNode> data = collectData(usages, nodes, usageView, presentation);
MyModel tableModel = setTableModel(table, usageView, data, outOfScopeUsages, searchScope);
List<UsageNode> existingData = tableModel.getItems();
int row = table.getSelectedRow();
int newSelection = updateModel(tableModel, existingData, data, row == -1 ? 0 : row);
if (newSelection < 0 || newSelection >= tableModel.getRowCount()) {
ScrollingUtil.ensureSelectionExists(table);
newSelection = table.getSelectedRow();
}
else {
// do not pre-select the usage under caret by default
if (newSelection == 0 && table.getModel().getRowCount() > 1) {
Object valueInTopRow = table.getModel().getValueAt(0, 0);
if (valueInTopRow instanceof UsageNode && usageView.isOriginUsage(((UsageNode)valueInTopRow).getUsage())) {
newSelection++;
}
}
table.getSelectionModel().setSelectionInterval(newSelection, newSelection);
}
ScrollingUtil.ensureIndexIsVisible(table, newSelection, 0);
if (popup != null) {
setSizeAndDimensions(table, popup, popupPosition, data);
}
}
// returns new selection
private static int updateModel(@NotNull MyModel tableModel, @NotNull List<UsageNode> listOld, @NotNull List<UsageNode> listNew, int oldSelection) {
UsageNode[] oa = listOld.toArray(new UsageNode[listOld.size()]);
UsageNode[] na = listNew.toArray(new UsageNode[listNew.size()]);
List<ModelDiff.Cmd> cmds = ModelDiff.createDiffCmds(tableModel, oa, na);
int selection = oldSelection;
if (cmds != null) {
for (ModelDiff.Cmd cmd : cmds) {
selection = cmd.translateSelection(selection);
cmd.apply();
}
}
return selection;
}
private void setSizeAndDimensions(@NotNull JTable table,
@NotNull JBPopup popup,
@NotNull RelativePoint popupPosition,
@NotNull List<UsageNode> data) {
JComponent content = popup.getContent();
Window window = SwingUtilities.windowForComponent(content);
Dimension d = window.getSize();
int width = calcMaxWidth(table);
width = (int)Math.max(d.getWidth(), width);
Dimension headerSize = ((AbstractPopup)popup).getHeaderPreferredSize();
width = Math.max((int)headerSize.getWidth(), width);
width = Math.max(myWidth, width);
if (myWidth == -1) myWidth = width;
int newWidth = Math.max(width, d.width + width - myWidth);
myWidth = newWidth;
int rowsToShow = Math.min(30, data.size());
Dimension dimension = new Dimension(newWidth, table.getRowHeight() * rowsToShow);
Rectangle rectangle = fitToScreen(dimension, popupPosition, table);
if (!data.isEmpty()) {
ScrollingUtil.ensureSelectionExists(table);
}
table.setSize(rectangle.getSize());
//table.setPreferredSize(dimension);
//table.setMaximumSize(dimension);
//table.setPreferredScrollableViewportSize(dimension);
Dimension footerSize = ((AbstractPopup)popup).getFooterPreferredSize();
int footer = footerSize.height;
int footerBorder = footer == 0 ? 0 : 1;
Insets insets = ((AbstractPopup)popup).getPopupBorder().getBorderInsets(content);
rectangle.height += headerSize.height + footer + footerBorder + insets.top + insets.bottom;
ScreenUtil.fitToScreen(rectangle);
Dimension newDim = rectangle.getSize();
window.setBounds(rectangle);
window.setMinimumSize(newDim);
window.setMaximumSize(newDim);
window.validate();
window.repaint();
}
private static Rectangle fitToScreen(@NotNull Dimension newDim, @NotNull RelativePoint popupPosition, JTable table) {
Rectangle rectangle = new Rectangle(popupPosition.getScreenPoint(), newDim);
ScreenUtil.fitToScreen(rectangle);
if (rectangle.getHeight() != newDim.getHeight()) {
int newHeight = (int)rectangle.getHeight();
int roundedHeight = newHeight - newHeight % table.getRowHeight();
rectangle.setSize((int)rectangle.getWidth(), Math.max(roundedHeight, table.getRowHeight()));
}
return rectangle;
}
private void appendMoreUsages(Editor editor,
@NotNull RelativePoint popupPosition,
@NotNull FindUsagesHandler handler,
int maxUsages,
@NotNull FindUsagesOptions options) {
TransactionGuard.submitTransaction(handler.getProject(), () ->
showElementUsages(editor, popupPosition, handler, maxUsages + getUsagesPageSize(), options));
}
private static void addUsageNodes(@NotNull GroupNode root, @NotNull final UsageViewImpl usageView, @NotNull List<UsageNode> outNodes) {
for (UsageNode node : root.getUsageNodes()) {
Usage usage = node.getUsage();
if (usageView.isVisible(usage)) {
node.setParent(root);
outNodes.add(node);
}
}
for (GroupNode groupNode : root.getSubGroups()) {
groupNode.setParent(root);
addUsageNodes(groupNode, usageView, outNodes);
}
}
private void navigateAndHint(@NotNull Usage usage,
@Nullable final String hint,
@NotNull final FindUsagesHandler handler,
@NotNull final RelativePoint popupPosition,
final int maxUsages,
@NotNull final FindUsagesOptions options) {
usage.navigate(true);
if (hint == null) return;
final Editor newEditor = getEditorFor(usage);
if (newEditor == null) return;
hint(newEditor, hint, handler, popupPosition, maxUsages, options, false);
}
private void showHint(@Nullable final Editor editor,
@NotNull String hint,
@NotNull FindUsagesHandler handler,
@NotNull final RelativePoint popupPosition,
int maxUsages,
@NotNull FindUsagesOptions options,
boolean isWarning) {
Runnable runnable = () -> {
if (!handler.getPsiElement().isValid()) return;
JComponent label = createHintComponent(hint, handler, popupPosition, editor, ShowUsagesAction::hideHints, maxUsages, options, isWarning);
if (editor == null || editor.isDisposed() || !editor.getComponent().isShowing()) {
HintManager.getInstance().showHint(label, popupPosition, HintManager.HIDE_BY_ANY_KEY |
HintManager.HIDE_BY_TEXT_CHANGE | HintManager.HIDE_BY_SCROLLING, 0);
}
else {
HintManager.getInstance().showInformationHint(editor, label);
}
};
if (editor == null) {
runnable.run();
} else {
AsyncEditorLoader.performWhenLoaded(editor, runnable);
}
}
private void hint(@Nullable final Editor editor,
@NotNull final String hint,
@NotNull final FindUsagesHandler handler,
@NotNull final RelativePoint popupPosition,
final int maxUsages,
@NotNull final FindUsagesOptions options,
final boolean isWarning) {
final Project project = handler.getProject();
//opening editor is performing in invokeLater
IdeFocusManager.getInstance(project).doWhenFocusSettlesDown(() -> {
Runnable runnable = () -> {
// after new editor created, some editor resizing events are still bubbling. To prevent hiding hint, invokeLater this
IdeFocusManager.getInstance(project).doWhenFocusSettlesDown(
() -> showHint(editor, hint, handler, popupPosition, maxUsages, options, isWarning));
};
if (editor == null) {
runnable.run();
}
else {
editor.getScrollingModel().runActionOnScrollingFinished(runnable);
}
});
}
@Nullable
private static Editor getEditorFor(@NotNull Usage usage) {
FileEditorLocation location = usage.getLocation();
FileEditor newFileEditor = location == null ? null : location.getEditor();
return newFileEditor instanceof TextEditor ? ((TextEditor)newFileEditor).getEditor() : null;
}
private static class MyTable extends JBTable implements DataProvider {
private static final int MARGIN = 2;
public MyTable() {
ScrollingUtil.installActions(this);
HintUpdateSupply.installDataContextHintUpdateSupply(this);
}
@Override
public boolean getScrollableTracksViewportWidth() {
return true;
}
@Override
public Object getData(@NonNls String dataId) {
if (CommonDataKeys.PSI_ELEMENT.is(dataId)) {
final int[] selected = getSelectedRows();
if (selected.length == 1) {
return getPsiElementForHint(getValueAt(selected[0], 0));
}
}
else if (LangDataKeys.POSITION_ADJUSTER_POPUP.is(dataId)) {
return PopupUtil.getPopupContainerFor(this);
}
return null;
}
@Override
public int getRowHeight() {
return super.getRowHeight() + 2 * MARGIN;
}
@NotNull
@Override
public Component prepareRenderer(@NotNull TableCellRenderer renderer, int row, int column) {
Component component = super.prepareRenderer(renderer, row, column);
if (component instanceof JComponent) {
((JComponent)component).setBorder(JBUI.Borders.empty(MARGIN, MARGIN, MARGIN, 0));
}
return component;
}
@Nullable
private static PsiElement getPsiElementForHint(Object selectedValue) {
if (selectedValue instanceof UsageNode) {
final Usage usage = ((UsageNode)selectedValue).getUsage();
if (usage instanceof UsageInfo2UsageAdapter) {
final PsiElement element = ((UsageInfo2UsageAdapter)usage).getElement();
if (element != null) {
final PsiElement view = UsageToPsiElementProvider.findAppropriateParentFrom(element);
return view == null ? element : view;
}
}
}
return null;
}
}
static class StringNode extends UsageNode {
@NotNull private final Object myString;
StringNode(@NotNull Object string) {
super(null, NullUsage.INSTANCE);
myString = string;
}
@Override
public String toString() {
return myString.toString();
}
}
private static class MySpeedSearch extends SpeedSearchBase<JTable> {
MySpeedSearch(@NotNull MyTable table) {
super(table);
}
@Override
protected int getSelectedIndex() {
return getTable().getSelectedRow();
}
@Override
protected int convertIndexToModel(int viewIndex) {
return getTable().convertRowIndexToModel(viewIndex);
}
@NotNull
@Override
protected Object[] getAllElements() {
return ((MyModel)getTable().getModel()).getItems().toArray();
}
@Override
protected String getElementText(@NotNull Object element) {
if (!(element instanceof UsageNode)) return element.toString();
UsageNode node = (UsageNode)element;
if (node instanceof StringNode) return "";
Usage usage = node.getUsage();
if (usage == MORE_USAGES_SEPARATOR || usage == USAGES_OUTSIDE_SCOPE_SEPARATOR) return "";
GroupNode group = (GroupNode)node.getParent();
String groupText = group == null ? "" : group.getGroup().getText(null);
return groupText + usage.getPresentation().getPlainText();
}
@Override
protected void selectElement(Object element, String selectedText) {
List<UsageNode> data = ((MyModel)getTable().getModel()).getItems();
int i = data.indexOf(element);
if (i == -1) return;
final int viewRow = getTable().convertRowIndexToView(i);
getTable().getSelectionModel().setSelectionInterval(viewRow, viewRow);
TableUtil.scrollSelectionToVisible(getTable());
}
private MyTable getTable() {
return (MyTable)myComponent;
}
}
}
| ThiagoGarciaAlves/intellij-community | platform/lang-impl/src/com/intellij/find/actions/ShowUsagesAction.java | Java | apache-2.0 | 51,268 |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.observable;
import static org.junit.Assert.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
import java.util.*;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Test;
import io.reactivex.*;
import io.reactivex.Observable;
import io.reactivex.Observer;
import io.reactivex.disposables.*;
import io.reactivex.exceptions.TestException;
import io.reactivex.functions.*;
import io.reactivex.observers.TestObserver;
import io.reactivex.schedulers.Schedulers;
import io.reactivex.subjects.PublishSubject;
public class ObservableRepeatTest {
@Test(timeout = 2000)
public void testRepetition() {
int num = 10;
final AtomicInteger count = new AtomicInteger();
int value = Observable.unsafeCreate(new ObservableSource<Integer>() {
@Override
public void subscribe(final Observer<? super Integer> o) {
o.onNext(count.incrementAndGet());
o.onComplete();
}
}).repeat().subscribeOn(Schedulers.computation())
.take(num).blockingLast();
assertEquals(num, value);
}
@Test(timeout = 2000)
public void testRepeatTake() {
Observable<Integer> xs = Observable.just(1, 2);
Object[] ys = xs.repeat().subscribeOn(Schedulers.newThread()).take(4).toList().blockingGet().toArray();
assertArrayEquals(new Object[] { 1, 2, 1, 2 }, ys);
}
@Test(timeout = 20000)
public void testNoStackOverFlow() {
Observable.just(1).repeat().subscribeOn(Schedulers.newThread()).take(100000).blockingLast();
}
@Test
public void testRepeatTakeWithSubscribeOn() throws InterruptedException {
final AtomicInteger counter = new AtomicInteger();
Observable<Integer> oi = Observable.unsafeCreate(new ObservableSource<Integer>() {
@Override
public void subscribe(Observer<? super Integer> sub) {
sub.onSubscribe(Disposables.empty());
counter.incrementAndGet();
sub.onNext(1);
sub.onNext(2);
sub.onComplete();
}
}).subscribeOn(Schedulers.newThread());
Object[] ys = oi.repeat().subscribeOn(Schedulers.newThread()).map(new Function<Integer, Integer>() {
@Override
public Integer apply(Integer t1) {
try {
Thread.sleep(50);
} catch (InterruptedException e) {
e.printStackTrace();
}
return t1;
}
}).take(4).toList().blockingGet().toArray();
assertEquals(2, counter.get());
assertArrayEquals(new Object[] { 1, 2, 1, 2 }, ys);
}
@Test(timeout = 2000)
public void testRepeatAndTake() {
Observer<Object> o = TestHelper.mockObserver();
Observable.just(1).repeat().take(10).subscribe(o);
verify(o, times(10)).onNext(1);
verify(o).onComplete();
verify(o, never()).onError(any(Throwable.class));
}
@Test(timeout = 2000)
public void testRepeatLimited() {
Observer<Object> o = TestHelper.mockObserver();
Observable.just(1).repeat(10).subscribe(o);
verify(o, times(10)).onNext(1);
verify(o).onComplete();
verify(o, never()).onError(any(Throwable.class));
}
@Test(timeout = 2000)
public void testRepeatError() {
Observer<Object> o = TestHelper.mockObserver();
Observable.error(new TestException()).repeat(10).subscribe(o);
verify(o).onError(any(TestException.class));
verify(o, never()).onNext(any());
verify(o, never()).onComplete();
}
@Test(timeout = 2000)
public void testRepeatZero() {
Observer<Object> o = TestHelper.mockObserver();
Observable.just(1).repeat(0).subscribe(o);
verify(o).onComplete();
verify(o, never()).onNext(any());
verify(o, never()).onError(any(Throwable.class));
}
@Test(timeout = 2000)
public void testRepeatOne() {
Observer<Object> o = TestHelper.mockObserver();
Observable.just(1).repeat(1).subscribe(o);
verify(o).onComplete();
verify(o, times(1)).onNext(any());
verify(o, never()).onError(any(Throwable.class));
}
/** Issue #2587. */
@Test
public void testRepeatAndDistinctUnbounded() {
Observable<Integer> src = Observable.fromIterable(Arrays.asList(1, 2, 3, 4, 5))
.take(3)
.repeat(3)
.distinct();
TestObserver<Integer> to = new TestObserver<Integer>();
src.subscribe(to);
to.assertNoErrors();
to.assertTerminated();
to.assertValues(1, 2, 3);
}
/** Issue #2844: wrong target of request. */
@Test(timeout = 3000)
public void testRepeatRetarget() {
final List<Integer> concatBase = new ArrayList<Integer>();
TestObserver<Integer> to = new TestObserver<Integer>();
Observable.just(1, 2)
.repeat(5)
.concatMap(new Function<Integer, Observable<Integer>>() {
@Override
public Observable<Integer> apply(Integer x) {
System.out.println("testRepeatRetarget -> " + x);
concatBase.add(x);
return Observable.<Integer>empty()
.delay(200, TimeUnit.MILLISECONDS);
}
})
.subscribe(to);
to.awaitTerminalEvent();
to.assertNoErrors();
to.assertNoValues();
assertEquals(Arrays.asList(1, 2, 1, 2, 1, 2, 1, 2, 1, 2), concatBase);
}
@Test
public void repeatUntil() {
Observable.just(1)
.repeatUntil(new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return false;
}
})
.take(5)
.test()
.assertResult(1, 1, 1, 1, 1);
}
@Test
public void repeatLongPredicateInvalid() {
try {
Observable.just(1).repeat(-99);
fail("Should have thrown");
} catch (IllegalArgumentException ex) {
assertEquals("times >= 0 required but it was -99", ex.getMessage());
}
}
@Test
public void repeatUntilError() {
Observable.error(new TestException())
.repeatUntil(new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return true;
}
})
.test()
.assertFailure(TestException.class);
}
@Test
public void repeatUntilFalse() {
Observable.just(1)
.repeatUntil(new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
return true;
}
})
.test()
.assertResult(1);
}
@Test
public void repeatUntilSupplierCrash() {
Observable.just(1)
.repeatUntil(new BooleanSupplier() {
@Override
public boolean getAsBoolean() throws Exception {
throw new TestException();
}
})
.test()
.assertFailure(TestException.class, 1);
}
@Test
public void shouldDisposeInnerObservable() {
final PublishSubject<Object> subject = PublishSubject.create();
final Disposable disposable = Observable.just("Leak")
.repeatWhen(new Function<Observable<Object>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Object> completions) throws Exception {
return completions.switchMap(new Function<Object, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Object ignore) throws Exception {
return subject;
}
});
}
})
.subscribe();
assertTrue(subject.hasObservers());
disposable.dispose();
assertFalse(subject.hasObservers());
}
@Test
public void testRepeatWhen() {
Observable.error(new TestException())
.repeatWhen(new Function<Observable<Object>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Object> v) throws Exception {
return v.delay(10, TimeUnit.SECONDS);
}
})
.test()
.awaitDone(5, TimeUnit.SECONDS)
.assertFailure(TestException.class);
}
@Test
public void whenTake() {
Observable.range(1, 3).repeatWhen(new Function<Observable<Object>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Object> handler) throws Exception {
return handler.take(2);
}
})
.test()
.assertResult(1, 2, 3, 1, 2, 3);
}
@Test
public void handlerError() {
Observable.range(1, 3)
.repeatWhen(new Function<Observable<Object>, ObservableSource<Object>>() {
@Override
public ObservableSource<Object> apply(Observable<Object> v) throws Exception {
return v.map(new Function<Object, Object>() {
@Override
public Object apply(Object w) throws Exception {
throw new TestException();
}
});
}
})
.test()
.assertFailure(TestException.class, 1, 2, 3);
}
}
| akarnokd/RxJava | src/test/java/io/reactivex/internal/operators/observable/ObservableRepeatTest.java | Java | apache-2.0 | 10,332 |
/*
* Copyright 2010-2011 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import javax.xml.stream.events.XMLEvent;
import com.amazonaws.services.ec2.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.MapEntry;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* Start Instances Result StAX Unmarshaller
*/
public class StartInstancesResultStaxUnmarshaller implements Unmarshaller<StartInstancesResult, StaxUnmarshallerContext> {
public StartInstancesResult unmarshall(StaxUnmarshallerContext context) throws Exception {
StartInstancesResult startInstancesResult = new StartInstancesResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument()) targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument()) return startInstancesResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("instancesSet/item", targetDepth)) {
startInstancesResult.getStartingInstances().add(InstanceStateChangeStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return startInstancesResult;
}
}
}
}
private static StartInstancesResultStaxUnmarshaller instance;
public static StartInstancesResultStaxUnmarshaller getInstance() {
if (instance == null) instance = new StartInstancesResultStaxUnmarshaller();
return instance;
}
}
| apetresc/aws-sdk-for-java-on-gae | src/main/java/com/amazonaws/services/ec2/model/transform/StartInstancesResultStaxUnmarshaller.java | Java | apache-2.0 | 2,478 |
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import binascii
import pytest
from cryptography.exceptions import (
AlreadyFinalized, InvalidKey, _Reasons
)
from cryptography.hazmat.backends.interfaces import HMACBackend
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.kdf.hkdf import HKDF, HKDFExpand
from ...utils import raises_unsupported_algorithm
@pytest.mark.requires_backend_interface(interface=HMACBackend)
class TestHKDF(object):
def test_length_limit(self, backend):
big_length = 255 * (hashes.SHA256().digest_size // 8) + 1
with pytest.raises(ValueError):
HKDF(
hashes.SHA256(),
big_length,
salt=None,
info=None,
backend=backend
)
def test_already_finalized(self, backend):
hkdf = HKDF(
hashes.SHA256(),
16,
salt=None,
info=None,
backend=backend
)
hkdf.derive(b"\x01" * 16)
with pytest.raises(AlreadyFinalized):
hkdf.derive(b"\x02" * 16)
hkdf = HKDF(
hashes.SHA256(),
16,
salt=None,
info=None,
backend=backend
)
hkdf.verify(b"\x01" * 16, b"gJ\xfb{\xb1Oi\xc5sMC\xb7\xe4@\xf7u")
with pytest.raises(AlreadyFinalized):
hkdf.verify(b"\x02" * 16, b"gJ\xfb{\xb1Oi\xc5sMC\xb7\xe4@\xf7u")
hkdf = HKDF(
hashes.SHA256(),
16,
salt=None,
info=None,
backend=backend
)
def test_verify(self, backend):
hkdf = HKDF(
hashes.SHA256(),
16,
salt=None,
info=None,
backend=backend
)
hkdf.verify(b"\x01" * 16, b"gJ\xfb{\xb1Oi\xc5sMC\xb7\xe4@\xf7u")
def test_verify_invalid(self, backend):
hkdf = HKDF(
hashes.SHA256(),
16,
salt=None,
info=None,
backend=backend
)
with pytest.raises(InvalidKey):
hkdf.verify(b"\x02" * 16, b"gJ\xfb{\xb1Oi\xc5sMC\xb7\xe4@\xf7u")
def test_unicode_typeerror(self, backend):
with pytest.raises(TypeError):
HKDF(
hashes.SHA256(),
16,
salt=u"foo",
info=None,
backend=backend
)
with pytest.raises(TypeError):
HKDF(
hashes.SHA256(),
16,
salt=None,
info=u"foo",
backend=backend
)
with pytest.raises(TypeError):
hkdf = HKDF(
hashes.SHA256(),
16,
salt=None,
info=None,
backend=backend
)
hkdf.derive(u"foo")
with pytest.raises(TypeError):
hkdf = HKDF(
hashes.SHA256(),
16,
salt=None,
info=None,
backend=backend
)
hkdf.verify(u"foo", b"bar")
with pytest.raises(TypeError):
hkdf = HKDF(
hashes.SHA256(),
16,
salt=None,
info=None,
backend=backend
)
hkdf.verify(b"foo", u"bar")
def test_derive_short_output(self, backend):
hkdf = HKDF(
hashes.SHA256(),
4,
salt=None,
info=None,
backend=backend
)
assert hkdf.derive(b"\x01" * 16) == b"gJ\xfb{"
@pytest.mark.requires_backend_interface(interface=HMACBackend)
class TestHKDFExpand(object):
def test_derive(self, backend):
prk = binascii.unhexlify(
b"077709362c2e32df0ddc3f0dc47bba6390b6c73bb50f9c3122ec844ad7c2b3e5"
)
okm = (b"3cb25f25faacd57a90434f64d0362f2a2d2d0a90cf1a5a4c5db02d56ecc4c"
b"5bf34007208d5b887185865")
info = binascii.unhexlify(b"f0f1f2f3f4f5f6f7f8f9")
hkdf = HKDFExpand(hashes.SHA256(), 42, info, backend)
assert binascii.hexlify(hkdf.derive(prk)) == okm
def test_verify(self, backend):
prk = binascii.unhexlify(
b"077709362c2e32df0ddc3f0dc47bba6390b6c73bb50f9c3122ec844ad7c2b3e5"
)
okm = (b"3cb25f25faacd57a90434f64d0362f2a2d2d0a90cf1a5a4c5db02d56ecc4c"
b"5bf34007208d5b887185865")
info = binascii.unhexlify(b"f0f1f2f3f4f5f6f7f8f9")
hkdf = HKDFExpand(hashes.SHA256(), 42, info, backend)
assert hkdf.verify(prk, binascii.unhexlify(okm)) is None
def test_invalid_verify(self, backend):
prk = binascii.unhexlify(
b"077709362c2e32df0ddc3f0dc47bba6390b6c73bb50f9c3122ec844ad7c2b3e5"
)
info = binascii.unhexlify(b"f0f1f2f3f4f5f6f7f8f9")
hkdf = HKDFExpand(hashes.SHA256(), 42, info, backend)
with pytest.raises(InvalidKey):
hkdf.verify(prk, b"wrong key")
def test_already_finalized(self, backend):
info = binascii.unhexlify(b"f0f1f2f3f4f5f6f7f8f9")
hkdf = HKDFExpand(hashes.SHA256(), 42, info, backend)
hkdf.derive(b"first")
with pytest.raises(AlreadyFinalized):
hkdf.derive(b"second")
def test_unicode_error(self, backend):
info = binascii.unhexlify(b"f0f1f2f3f4f5f6f7f8f9")
hkdf = HKDFExpand(hashes.SHA256(), 42, info, backend)
with pytest.raises(TypeError):
hkdf.derive(u"first")
def test_invalid_backend():
pretend_backend = object()
with raises_unsupported_algorithm(_Reasons.BACKEND_MISSING_INTERFACE):
HKDF(hashes.SHA256(), 16, None, None, pretend_backend)
with raises_unsupported_algorithm(_Reasons.BACKEND_MISSING_INTERFACE):
HKDFExpand(hashes.SHA256(), 16, None, pretend_backend)
| hipnusleo/laserjet | resource/pypi/cryptography-1.7.1/tests/hazmat/primitives/test_hkdf.py | Python | apache-2.0 | 6,389 |
// Package airspace maintains a single, central snapshot of current aircraft status
package airspace
import(
"fmt"
"sort"
"time"
"github.com/skypies/adsb"
fdb "github.com/skypies/flightdb"
)
var DefaultRollAfter = time.Minute * 5
var DefaultMaxQuietTime = time.Minute * 5
type AircraftData struct {
Msg *adsb.CompositeMsg
fdb.Airframe // We might get this from an airframe lookup
fdb.Schedule // We might get this from a schedule lookup
NumMessagesSeen int64
Source string // Where this data was sourced
}
type Signatures struct {
//// Data for message deduping.
// We roll curr into prev (and lose the old prev) as per rollAfter; this is to
// keep the memory footprint under control.
CurrMsgs map[adsb.Signature]bool
PrevMsgs map[adsb.Signature]bool
RollAfter time.Duration
RollWhenThisMany int
TimeOfLastRoll time.Time
}
func (s Signatures)TooManySignatures() bool {
return s.RollWhenThisMany > 0 && len(s.CurrMsgs) >= s.RollWhenThisMany
}
type Airspace struct {
Signatures `json:"-"` // What we've seen "recently"; for deduping
Aircraft map[adsb.IcaoId]AircraftData // "what is in the sky right now"; for realtime serving
}
func (a Airspace)Sizes() (int64,int64) {
return int64(len(a.Signatures.CurrMsgs) + len(a.Signatures.PrevMsgs)), int64(len(a.Aircraft))
}
// {{{ NewAirspace
func NewAirspace() Airspace {
return Airspace{
Signatures: Signatures{
CurrMsgs: map[adsb.Signature]bool{},
PrevMsgs: map[adsb.Signature]bool{},
},
Aircraft: map[adsb.IcaoId]AircraftData{},
}
}
// }}}
// {{{ a.rollMsgs
func (a *Airspace)rollMsgs() {
a.PrevMsgs = a.CurrMsgs
a.CurrMsgs = make(map[adsb.Signature]bool)
a.TimeOfLastRoll = time.Now()
// Perhaps this should happen elsewhere, but hey, here works.
for k,_ := range a.Aircraft {
age := time.Since(a.Aircraft[k].Msg.GeneratedTimestampUTC)
if age > DefaultMaxQuietTime {
delete(a.Aircraft, k)
}
}
}
// }}}
// {{{ a.thisIsNewContent
func (a *Airspace)thisIsNewContent(msg *adsb.CompositeMsg) (wasNew bool) {
// Lazy init
if a.RollAfter == time.Minute * 0 { a.RollAfter = DefaultRollAfter }
if a.Aircraft == nil { a.Aircraft = make(map[adsb.IcaoId]AircraftData) }
sig := msg.GetSignature()
if _,existsCurr := a.CurrMsgs[sig]; !existsCurr {
// Add it into Curr in all cases
a.CurrMsgs[sig] = true
existsPrev := false
if a.PrevMsgs != nil { _,existsPrev = a.PrevMsgs[sig] }
// If the thing was already in prev, then it isn't new; else it is
return !existsPrev
}
return false
}
// }}}
// {{{ a.String
func (a Airspace)String() string {
str := ""
keys := []string{}
for k,_ := range a.Aircraft { keys = append(keys, string(k)) }
sort.Strings(keys)
for _,k := range keys {
ac := a.Aircraft[adsb.IcaoId(k)]
str += fmt.Sprintf(" %8.8s/%-8.8s/%-6.6s (%s last:%6.1fs at %s/%-13.13s, %5d msgs) %5df, %3dk\n",
ac.Msg.Callsign, ac.Msg.Icao24, ac.Registration,
ac.Msg.DataSystem(),
time.Since(ac.Msg.GeneratedTimestampUTC).Seconds(),
ac.Source, ac.Msg.ReceiverName,
ac.NumMessagesSeen,
ac.Msg.Altitude, ac.Msg.GroundSpeed)
}
return str
}
// }}}
// {{{ a.Youngest
func (a Airspace)Youngest() time.Duration {
youngest := time.Hour * 480
for _,ad := range a.Aircraft {
age := time.Since(ad.Msg.GeneratedTimestampUTC)
if age < youngest { youngest = age }
}
return youngest
}
// }}}
// {{{ a.MaybeUpdate
// If any messages are new, update our view of the world. Return the indices of the messages
// we thought were new.
func (a *Airspace) MaybeUpdate(msgs []*adsb.CompositeMsg) []*adsb.CompositeMsg {
ret := []*adsb.CompositeMsg{}
// Time to roll (or lazily init) ?
if time.Since(a.TimeOfLastRoll) > a.RollAfter || a.TooManySignatures() {
a.rollMsgs()
}
for _,msg := range msgs {
if a.thisIsNewContent(msg) {
numMsg := int64(0)
if _,exists := a.Aircraft[msg.Icao24]; exists==true {
numMsg = a.Aircraft[msg.Icao24].NumMessagesSeen
}
ret = append(ret,msg)
a.Aircraft[msg.Icao24] = AircraftData{Msg: msg, NumMessagesSeen: int64(numMsg+1)}
}
}
return ret
}
// }}}
// {{{ -------------------------={ E N D }=----------------------------------
// Local variables:
// folded-file: t
// end:
// }}}
| skypies/pi | airspace/airspace.go | GO | apache-2.0 | 4,236 |
package thereisnospon.acclient.modules.licenses;
import com.google.gson.annotations.SerializedName;
final class Library {
@SerializedName("name") private String mName;
@SerializedName("owner") private String mOwner;
@SerializedName("copyright") private String mCopyright;
public String getName() {
return mName;
}
public void setName(String name) {
mName = name;
}
public String getOwner() {
return mOwner;
}
public void setOwner(String owner) {
mOwner = owner;
}
public String getCopyright() {
return mCopyright;
}
public void setCopyright(String copyright) {
mCopyright = copyright;
}
}
| Thereisnospon/AcClient | app/src/main/java/thereisnospon/acclient/modules/licenses/Library.java | Java | apache-2.0 | 625 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.test.spring;
import org.apache.camel.api.management.JmxSystemPropertyKeys;
import org.springframework.test.context.TestContext;
import org.springframework.test.context.support.AbstractTestExecutionListener;
/**
* Provides reset to pre-test state behavior for global enable/disable of JMX
* support in Camel through the use of {@link DisableJmx}.
* Tries to ensure that the pre-test value is restored.
*/
public class DisableJmxTestExecutionListener extends AbstractTestExecutionListener {
@Override
public void afterTestClass(TestContext testContext) throws Exception {
if (CamelSpringTestHelper.getOriginalJmxDisabled() == null) {
System.clearProperty(JmxSystemPropertyKeys.DISABLED);
} else {
System.setProperty(JmxSystemPropertyKeys.DISABLED, CamelSpringTestHelper.getOriginalJmxDisabled());
}
}
}
| punkhorn/camel-upstream | components/camel-test-spring/src/main/java/org/apache/camel/test/spring/DisableJmxTestExecutionListener.java | Java | apache-2.0 | 1,698 |
package dk.silverbullet.telemed.device.vitalographlungmonitor;
import java.io.IOException;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import dk.silverbullet.telemed.device.vitalographlungmonitor.packet.PacketReceiver;
import dk.silverbullet.telemed.device.vitalographlungmonitor.packet.states.ReceiverState;
@RunWith(MockitoJUnitRunner.class)
public class LungMonitorPacketCollectorTest {
byte[] measurementBytes = new byte[] { (byte) 2, (byte) 70, (byte) 84,
(byte) 68, (byte) 48, (byte) 48, (byte) 48, (byte) 48, (byte) 49,
(byte) 50, (byte) 57, (byte) 52, (byte) 48, (byte) 54, (byte) 51,
(byte) 54, (byte) 51, (byte) 51, (byte) 57, (byte) 57, (byte) 48,
(byte) 57, (byte) 49, (byte) 52, (byte) 48, (byte) 52, (byte) 48,
(byte) 48, (byte) 48, (byte) 48, (byte) 48, (byte) 48, (byte) 48,
(byte) 57, (byte) 53, (byte) 48, (byte) 57, (byte) 48, (byte) 48,
(byte) 48, (byte) 48, (byte) 49, (byte) 51, (byte) 48, (byte) 53,
(byte) 50, (byte) 56, (byte) 49, (byte) 48, (byte) 52, (byte) 52,
(byte) 49, (byte) 56, (byte) 48, (byte) 57, (byte) 50, (byte) 51,
(byte) 3, (byte) 106 };
byte[] measurementBytesWithWrongChecksum = new byte[] { (byte) 2, (byte) 70, (byte) 84,
(byte) 68, (byte) 48, (byte) 48, (byte) 48, (byte) 48, (byte) 49,
(byte) 50, (byte) 57, (byte) 52, (byte) 48, (byte) 54, (byte) 51,
(byte) 54, (byte) 51, (byte) 51, (byte) 57, (byte) 57, (byte) 48,
(byte) 57, (byte) 49, (byte) 52, (byte) 48, (byte) 52, (byte) 48,
(byte) 48, (byte) 48, (byte) 48, (byte) 48, (byte) 48, (byte) 48,
(byte) 57, (byte) 53, (byte) 48, (byte) 57, (byte) 48, (byte) 48,
(byte) 48, (byte) 48, (byte) 49, (byte) 51, (byte) 48, (byte) 53,
(byte) 50, (byte) 56, (byte) 49, (byte) 48, (byte) 52, (byte) 52,
(byte) 49, (byte) 56, (byte) 48, (byte) 57, (byte) 50, (byte) 51,
(byte) 3, (byte) 107 };
@Mock PacketReceiver receiver;
LungMonitorPacketCollector collector;
@Before
public void before() {
collector = new LungMonitorPacketCollector();
collector.setListener(receiver);
}
@Test
public void canParseSingleMeasurement() throws Exception {
receiveBytes(measurementBytes);
ArgumentCaptor<FevMeasurementPacket> captor = ArgumentCaptor.forClass(FevMeasurementPacket.class);
verify(receiver).receive(captor.capture());
verify(receiver).sendByte(ReceiverState.ACK);
FevMeasurementPacket measurement = captor.getValue();
assertEquals("0000129406", measurement.getDeviceId());
assertEquals(3.63, measurement.getFev1(), 0.005);
assertEquals(3.99, measurement.getFev6(), 0.005);
assertEquals(0.91, measurement.getFev1Fev6Ratio(), 0.005);
assertEquals(4.04, measurement.getFef2575(), 0.005);
assertTrue(measurement.isGoodTest());
assertEquals(923, measurement.getSoftwareVersion());
}
@Test
public void sendsErrorWhenChecksumDoesNotMatch() throws Exception {
receiveBytes(measurementBytesWithWrongChecksum);
ArgumentCaptor<IOException> captor = ArgumentCaptor.forClass(IOException.class);
verify(receiver).error(captor.capture());
verify(receiver).sendByte(ReceiverState.NAK);
assertEquals("Invalid checksum. Got 107, expected 106", captor.getValue().getMessage());
}
@Test
public void recoversFromBadMessageAndParsesCorrectMessageAfterwards() throws Exception {
receiveBytes(measurementBytesWithWrongChecksum);
receiveBytes(measurementBytes);
InOrder inOrder = inOrder(receiver);
inOrder.verify(receiver).sendByte(ReceiverState.NAK);
inOrder.verify(receiver).error(any(IOException.class));
inOrder.verify(receiver).sendByte(ReceiverState.ACK);
inOrder.verify(receiver).receive(any(FevMeasurementPacket.class));
}
private void receiveBytes(byte[] bytes) {
for (byte b : bytes) {
collector.receive(b);
}
}
}
| silverbullet-dk/opentele-client-android | questionnaire-test/src/test/java/dk/silverbullet/telemed/device/vitalographlungmonitor/LungMonitorPacketCollectorTest.java | Java | apache-2.0 | 3,992 |
(function () {
'use strict';
angular.module('app').factory('contactService', ['$http', function ($http) {
return {
send : function(data) {
return $http.post('api/contact', data);
}
};
}]);
})(); | bmobsoftwares/bmobsoftwares.github.io | app/contact/contactService.js | JavaScript | apache-2.0 | 267 |
package org.opencds.cqf.r4.builders;
import java.util.Date;
import org.opencds.cqf.common.builders.BaseBuilder;
import org.opencds.cqf.cql.engine.runtime.DateTime;
public class JavaDateBuilder extends BaseBuilder<Date> {
public JavaDateBuilder() {
super(new Date());
}
public JavaDateBuilder buildFromDateTime(DateTime dateTime) {
complexProperty = Date.from(dateTime.getDateTime().toInstant());
return this;
}
}
| DBCG/cql_measure_processor | r4/src/main/java/org/opencds/cqf/r4/builders/JavaDateBuilder.java | Java | apache-2.0 | 458 |
"""Support for LIFX lights."""
import asyncio
from datetime import timedelta
from functools import partial
import logging
import math
import sys
import aiolifx as aiolifx_module
import aiolifx_effects as aiolifx_effects_module
import voluptuous as vol
from homeassistant import util
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_BRIGHTNESS_PCT,
ATTR_COLOR_NAME,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_HS_COLOR,
ATTR_KELVIN,
ATTR_RGB_COLOR,
ATTR_TRANSITION,
ATTR_XY_COLOR,
COLOR_GROUP,
DOMAIN,
LIGHT_TURN_ON_SCHEMA,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_TRANSITION,
VALID_BRIGHTNESS,
VALID_BRIGHTNESS_PCT,
Light,
preprocess_turn_on_alternatives,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_MODE,
ENTITY_MATCH_ALL,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
import homeassistant.helpers.device_registry as dr
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.helpers.service import async_extract_entity_ids
import homeassistant.util.color as color_util
from . import (
CONF_BROADCAST,
CONF_PORT,
CONF_SERVER,
DATA_LIFX_MANAGER,
DOMAIN as LIFX_DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=10)
DISCOVERY_INTERVAL = 60
MESSAGE_TIMEOUT = 1.0
MESSAGE_RETRIES = 8
UNAVAILABLE_GRACE = 90
SERVICE_LIFX_SET_STATE = "set_state"
ATTR_INFRARED = "infrared"
ATTR_ZONES = "zones"
ATTR_POWER = "power"
LIFX_SET_STATE_SCHEMA = cv.make_entity_service_schema(
{
**LIGHT_TURN_ON_SCHEMA,
ATTR_INFRARED: vol.All(vol.Coerce(int), vol.Clamp(min=0, max=255)),
ATTR_ZONES: vol.All(cv.ensure_list, [cv.positive_int]),
ATTR_POWER: cv.boolean,
}
)
SERVICE_EFFECT_PULSE = "effect_pulse"
SERVICE_EFFECT_COLORLOOP = "effect_colorloop"
SERVICE_EFFECT_STOP = "effect_stop"
ATTR_POWER_ON = "power_on"
ATTR_PERIOD = "period"
ATTR_CYCLES = "cycles"
ATTR_SPREAD = "spread"
ATTR_CHANGE = "change"
PULSE_MODE_BLINK = "blink"
PULSE_MODE_BREATHE = "breathe"
PULSE_MODE_PING = "ping"
PULSE_MODE_STROBE = "strobe"
PULSE_MODE_SOLID = "solid"
PULSE_MODES = [
PULSE_MODE_BLINK,
PULSE_MODE_BREATHE,
PULSE_MODE_PING,
PULSE_MODE_STROBE,
PULSE_MODE_SOLID,
]
LIFX_EFFECT_SCHEMA = vol.Schema(
{
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(ATTR_POWER_ON, default=True): cv.boolean,
}
)
LIFX_EFFECT_PULSE_SCHEMA = LIFX_EFFECT_SCHEMA.extend(
{
ATTR_BRIGHTNESS: VALID_BRIGHTNESS,
ATTR_BRIGHTNESS_PCT: VALID_BRIGHTNESS_PCT,
vol.Exclusive(ATTR_COLOR_NAME, COLOR_GROUP): cv.string,
vol.Exclusive(ATTR_RGB_COLOR, COLOR_GROUP): vol.All(
vol.ExactSequence((cv.byte, cv.byte, cv.byte)), vol.Coerce(tuple)
),
vol.Exclusive(ATTR_XY_COLOR, COLOR_GROUP): vol.All(
vol.ExactSequence((cv.small_float, cv.small_float)), vol.Coerce(tuple)
),
vol.Exclusive(ATTR_HS_COLOR, COLOR_GROUP): vol.All(
vol.ExactSequence(
(
vol.All(vol.Coerce(float), vol.Range(min=0, max=360)),
vol.All(vol.Coerce(float), vol.Range(min=0, max=100)),
)
),
vol.Coerce(tuple),
),
vol.Exclusive(ATTR_COLOR_TEMP, COLOR_GROUP): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
vol.Exclusive(ATTR_KELVIN, COLOR_GROUP): vol.All(
vol.Coerce(int), vol.Range(min=0)
),
ATTR_PERIOD: vol.All(vol.Coerce(float), vol.Range(min=0.05)),
ATTR_CYCLES: vol.All(vol.Coerce(float), vol.Range(min=1)),
ATTR_MODE: vol.In(PULSE_MODES),
}
)
LIFX_EFFECT_COLORLOOP_SCHEMA = LIFX_EFFECT_SCHEMA.extend(
{
ATTR_BRIGHTNESS: VALID_BRIGHTNESS,
ATTR_BRIGHTNESS_PCT: VALID_BRIGHTNESS_PCT,
ATTR_PERIOD: vol.All(vol.Coerce(float), vol.Clamp(min=0.05)),
ATTR_CHANGE: vol.All(vol.Coerce(float), vol.Clamp(min=0, max=360)),
ATTR_SPREAD: vol.All(vol.Coerce(float), vol.Clamp(min=0, max=360)),
ATTR_TRANSITION: vol.All(vol.Coerce(float), vol.Range(min=0)),
}
)
LIFX_EFFECT_STOP_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.entity_ids})
def aiolifx():
"""Return the aiolifx module."""
return aiolifx_module
def aiolifx_effects():
"""Return the aiolifx_effects module."""
return aiolifx_effects_module
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the LIFX light platform. Obsolete."""
_LOGGER.warning("LIFX no longer works with light platform configuration.")
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up LIFX from a config entry."""
if sys.platform == "win32":
_LOGGER.warning(
"The lifx platform is known to not work on Windows. "
"Consider using the lifx_legacy platform instead"
)
# Priority 1: manual config
interfaces = hass.data[LIFX_DOMAIN].get(DOMAIN)
if not interfaces:
# Priority 2: scanned interfaces
lifx_ip_addresses = await aiolifx().LifxScan(hass.loop).scan()
interfaces = [{CONF_SERVER: ip} for ip in lifx_ip_addresses]
if not interfaces:
# Priority 3: default interface
interfaces = [{}]
lifx_manager = LIFXManager(hass, async_add_entities)
hass.data[DATA_LIFX_MANAGER] = lifx_manager
for interface in interfaces:
lifx_manager.start_discovery(interface)
return True
def lifx_features(bulb):
"""Return a feature map for this bulb, or a default map if unknown."""
return aiolifx().products.features_map.get(
bulb.product
) or aiolifx().products.features_map.get(1)
def find_hsbk(**kwargs):
"""Find the desired color from a number of possible inputs."""
hue, saturation, brightness, kelvin = [None] * 4
preprocess_turn_on_alternatives(kwargs)
if ATTR_HS_COLOR in kwargs:
hue, saturation = kwargs[ATTR_HS_COLOR]
hue = int(hue / 360 * 65535)
saturation = int(saturation / 100 * 65535)
kelvin = 3500
if ATTR_COLOR_TEMP in kwargs:
kelvin = int(
color_util.color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP])
)
saturation = 0
if ATTR_BRIGHTNESS in kwargs:
brightness = convert_8_to_16(kwargs[ATTR_BRIGHTNESS])
hsbk = [hue, saturation, brightness, kelvin]
return None if hsbk == [None] * 4 else hsbk
def merge_hsbk(base, change):
"""Copy change on top of base, except when None."""
if change is None:
return None
return [b if c is None else c for b, c in zip(base, change)]
class LIFXManager:
"""Representation of all known LIFX entities."""
def __init__(self, hass, async_add_entities):
"""Initialize the light."""
self.entities = {}
self.hass = hass
self.async_add_entities = async_add_entities
self.effects_conductor = aiolifx_effects().Conductor(hass.loop)
self.discoveries = []
self.cleanup_unsub = self.hass.bus.async_listen(
EVENT_HOMEASSISTANT_STOP, self.cleanup
)
self.register_set_state()
self.register_effects()
def start_discovery(self, interface):
"""Start discovery on a network interface."""
kwargs = {"discovery_interval": DISCOVERY_INTERVAL}
broadcast_ip = interface.get(CONF_BROADCAST)
if broadcast_ip:
kwargs["broadcast_ip"] = broadcast_ip
lifx_discovery = aiolifx().LifxDiscovery(self.hass.loop, self, **kwargs)
kwargs = {}
listen_ip = interface.get(CONF_SERVER)
if listen_ip:
kwargs["listen_ip"] = listen_ip
listen_port = interface.get(CONF_PORT)
if listen_port:
kwargs["listen_port"] = listen_port
lifx_discovery.start(**kwargs)
self.discoveries.append(lifx_discovery)
@callback
def cleanup(self, event=None):
"""Release resources."""
self.cleanup_unsub()
for discovery in self.discoveries:
discovery.cleanup()
for service in [
SERVICE_LIFX_SET_STATE,
SERVICE_EFFECT_STOP,
SERVICE_EFFECT_PULSE,
SERVICE_EFFECT_COLORLOOP,
]:
self.hass.services.async_remove(LIFX_DOMAIN, service)
def register_set_state(self):
"""Register the LIFX set_state service call."""
async def service_handler(service):
"""Apply a service."""
tasks = []
for light in await self.async_service_to_entities(service):
if service.service == SERVICE_LIFX_SET_STATE:
task = light.set_state(**service.data)
tasks.append(self.hass.async_create_task(task))
if tasks:
await asyncio.wait(tasks)
self.hass.services.async_register(
LIFX_DOMAIN,
SERVICE_LIFX_SET_STATE,
service_handler,
schema=LIFX_SET_STATE_SCHEMA,
)
def register_effects(self):
"""Register the LIFX effects as hass service calls."""
async def service_handler(service):
"""Apply a service, i.e. start an effect."""
entities = await self.async_service_to_entities(service)
if entities:
await self.start_effect(entities, service.service, **service.data)
self.hass.services.async_register(
LIFX_DOMAIN,
SERVICE_EFFECT_PULSE,
service_handler,
schema=LIFX_EFFECT_PULSE_SCHEMA,
)
self.hass.services.async_register(
LIFX_DOMAIN,
SERVICE_EFFECT_COLORLOOP,
service_handler,
schema=LIFX_EFFECT_COLORLOOP_SCHEMA,
)
self.hass.services.async_register(
LIFX_DOMAIN,
SERVICE_EFFECT_STOP,
service_handler,
schema=LIFX_EFFECT_STOP_SCHEMA,
)
async def start_effect(self, entities, service, **kwargs):
"""Start a light effect on entities."""
bulbs = [light.bulb for light in entities]
if service == SERVICE_EFFECT_PULSE:
effect = aiolifx_effects().EffectPulse(
power_on=kwargs.get(ATTR_POWER_ON),
period=kwargs.get(ATTR_PERIOD),
cycles=kwargs.get(ATTR_CYCLES),
mode=kwargs.get(ATTR_MODE),
hsbk=find_hsbk(**kwargs),
)
await self.effects_conductor.start(effect, bulbs)
elif service == SERVICE_EFFECT_COLORLOOP:
preprocess_turn_on_alternatives(kwargs)
brightness = None
if ATTR_BRIGHTNESS in kwargs:
brightness = convert_8_to_16(kwargs[ATTR_BRIGHTNESS])
effect = aiolifx_effects().EffectColorloop(
power_on=kwargs.get(ATTR_POWER_ON),
period=kwargs.get(ATTR_PERIOD),
change=kwargs.get(ATTR_CHANGE),
spread=kwargs.get(ATTR_SPREAD),
transition=kwargs.get(ATTR_TRANSITION),
brightness=brightness,
)
await self.effects_conductor.start(effect, bulbs)
elif service == SERVICE_EFFECT_STOP:
await self.effects_conductor.stop(bulbs)
async def async_service_to_entities(self, service):
"""Return the known entities that a service call mentions."""
if service.data.get(ATTR_ENTITY_ID) == ENTITY_MATCH_ALL:
return self.entities.values()
entity_ids = await async_extract_entity_ids(self.hass, service)
return [
entity
for entity in self.entities.values()
if entity.entity_id in entity_ids
]
@callback
def register(self, bulb):
"""Handle aiolifx detected bulb."""
self.hass.async_create_task(self.register_new_bulb(bulb))
async def register_new_bulb(self, bulb):
"""Handle newly detected bulb."""
if bulb.mac_addr in self.entities:
entity = self.entities[bulb.mac_addr]
entity.registered = True
_LOGGER.debug("%s register AGAIN", entity.who)
await entity.update_hass()
else:
_LOGGER.debug("%s register NEW", bulb.ip_addr)
# Read initial state
ack = AwaitAioLIFX().wait
color_resp = await ack(bulb.get_color)
if color_resp:
version_resp = await ack(bulb.get_version)
if color_resp is None or version_resp is None:
_LOGGER.error("Failed to initialize %s", bulb.ip_addr)
bulb.registered = False
else:
bulb.timeout = MESSAGE_TIMEOUT
bulb.retry_count = MESSAGE_RETRIES
bulb.unregister_timeout = UNAVAILABLE_GRACE
if lifx_features(bulb)["multizone"]:
entity = LIFXStrip(bulb, self.effects_conductor)
elif lifx_features(bulb)["color"]:
entity = LIFXColor(bulb, self.effects_conductor)
else:
entity = LIFXWhite(bulb, self.effects_conductor)
_LOGGER.debug("%s register READY", entity.who)
self.entities[bulb.mac_addr] = entity
self.async_add_entities([entity], True)
@callback
def unregister(self, bulb):
"""Handle aiolifx disappearing bulbs."""
if bulb.mac_addr in self.entities:
entity = self.entities[bulb.mac_addr]
_LOGGER.debug("%s unregister", entity.who)
entity.registered = False
self.hass.async_create_task(entity.async_update_ha_state())
class AwaitAioLIFX:
"""Wait for an aiolifx callback and return the message."""
def __init__(self):
"""Initialize the wrapper."""
self.message = None
self.event = asyncio.Event()
@callback
def callback(self, bulb, message):
"""Handle responses."""
self.message = message
self.event.set()
async def wait(self, method):
"""Call an aiolifx method and wait for its response."""
self.message = None
self.event.clear()
method(callb=self.callback)
await self.event.wait()
return self.message
def convert_8_to_16(value):
"""Scale an 8 bit level into 16 bits."""
return (value << 8) | value
def convert_16_to_8(value):
"""Scale a 16 bit level into 8 bits."""
return value >> 8
class LIFXLight(Light):
"""Representation of a LIFX light."""
def __init__(self, bulb, effects_conductor):
"""Initialize the light."""
self.bulb = bulb
self.effects_conductor = effects_conductor
self.registered = True
self.postponed_update = None
self.lock = asyncio.Lock()
@property
def device_info(self):
"""Return information about the device."""
info = {
"identifiers": {(LIFX_DOMAIN, self.unique_id)},
"name": self.name,
"connections": {(dr.CONNECTION_NETWORK_MAC, self.bulb.mac_addr)},
"manufacturer": "LIFX",
}
model = aiolifx().products.product_map.get(self.bulb.product)
if model is not None:
info["model"] = model
return info
@property
def available(self):
"""Return the availability of the bulb."""
return self.registered
@property
def unique_id(self):
"""Return a unique ID."""
return self.bulb.mac_addr
@property
def name(self):
"""Return the name of the bulb."""
return self.bulb.label
@property
def who(self):
"""Return a string identifying the bulb."""
return f"{self.bulb.ip_addr} ({self.name})"
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
kelvin = lifx_features(self.bulb)["max_kelvin"]
return math.floor(color_util.color_temperature_kelvin_to_mired(kelvin))
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
kelvin = lifx_features(self.bulb)["min_kelvin"]
return math.ceil(color_util.color_temperature_kelvin_to_mired(kelvin))
@property
def supported_features(self):
"""Flag supported features."""
support = SUPPORT_BRIGHTNESS | SUPPORT_TRANSITION | SUPPORT_EFFECT
bulb_features = lifx_features(self.bulb)
if bulb_features["min_kelvin"] != bulb_features["max_kelvin"]:
support |= SUPPORT_COLOR_TEMP
return support
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
fade = self.bulb.power_level / 65535
return convert_16_to_8(int(fade * self.bulb.color[2]))
@property
def color_temp(self):
"""Return the color temperature."""
_, sat, _, kelvin = self.bulb.color
if sat:
return None
return color_util.color_temperature_kelvin_to_mired(kelvin)
@property
def is_on(self):
"""Return true if light is on."""
return self.bulb.power_level != 0
@property
def effect(self):
"""Return the name of the currently running effect."""
effect = self.effects_conductor.effect(self.bulb)
if effect:
return "lifx_effect_" + effect.name
return None
async def update_hass(self, now=None):
"""Request new status and push it to hass."""
self.postponed_update = None
await self.async_update()
await self.async_update_ha_state()
async def update_during_transition(self, when):
"""Update state at the start and end of a transition."""
if self.postponed_update:
self.postponed_update()
# Transition has started
await self.update_hass()
# Transition has ended
if when > 0:
self.postponed_update = async_track_point_in_utc_time(
self.hass,
self.update_hass,
util.dt.utcnow() + timedelta(milliseconds=when),
)
async def async_turn_on(self, **kwargs):
"""Turn the light on."""
kwargs[ATTR_POWER] = True
self.hass.async_create_task(self.set_state(**kwargs))
async def async_turn_off(self, **kwargs):
"""Turn the light off."""
kwargs[ATTR_POWER] = False
self.hass.async_create_task(self.set_state(**kwargs))
async def set_state(self, **kwargs):
"""Set a color on the light and turn it on/off."""
async with self.lock:
bulb = self.bulb
await self.effects_conductor.stop([bulb])
if ATTR_EFFECT in kwargs:
await self.default_effect(**kwargs)
return
if ATTR_INFRARED in kwargs:
bulb.set_infrared(convert_8_to_16(kwargs[ATTR_INFRARED]))
if ATTR_TRANSITION in kwargs:
fade = int(kwargs[ATTR_TRANSITION] * 1000)
else:
fade = 0
# These are both False if ATTR_POWER is not set
power_on = kwargs.get(ATTR_POWER, False)
power_off = not kwargs.get(ATTR_POWER, True)
hsbk = find_hsbk(**kwargs)
# Send messages, waiting for ACK each time
ack = AwaitAioLIFX().wait
if not self.is_on:
if power_off:
await self.set_power(ack, False)
if hsbk:
await self.set_color(ack, hsbk, kwargs)
if power_on:
await self.set_power(ack, True, duration=fade)
else:
if power_on:
await self.set_power(ack, True)
if hsbk:
await self.set_color(ack, hsbk, kwargs, duration=fade)
if power_off:
await self.set_power(ack, False, duration=fade)
# Avoid state ping-pong by holding off updates as the state settles
await asyncio.sleep(0.3)
# Update when the transition starts and ends
await self.update_during_transition(fade)
async def set_power(self, ack, pwr, duration=0):
"""Send a power change to the bulb."""
await ack(partial(self.bulb.set_power, pwr, duration=duration))
async def set_color(self, ack, hsbk, kwargs, duration=0):
"""Send a color change to the bulb."""
hsbk = merge_hsbk(self.bulb.color, hsbk)
await ack(partial(self.bulb.set_color, hsbk, duration=duration))
async def default_effect(self, **kwargs):
"""Start an effect with default parameters."""
service = kwargs[ATTR_EFFECT]
data = {ATTR_ENTITY_ID: self.entity_id}
await self.hass.services.async_call(LIFX_DOMAIN, service, data)
async def async_update(self):
"""Update bulb status."""
if self.available and not self.lock.locked():
await AwaitAioLIFX().wait(self.bulb.get_color)
class LIFXWhite(LIFXLight):
"""Representation of a white-only LIFX light."""
@property
def effect_list(self):
"""Return the list of supported effects for this light."""
return [SERVICE_EFFECT_PULSE, SERVICE_EFFECT_STOP]
class LIFXColor(LIFXLight):
"""Representation of a color LIFX light."""
@property
def supported_features(self):
"""Flag supported features."""
support = super().supported_features
support |= SUPPORT_COLOR
return support
@property
def effect_list(self):
"""Return the list of supported effects for this light."""
return [SERVICE_EFFECT_COLORLOOP, SERVICE_EFFECT_PULSE, SERVICE_EFFECT_STOP]
@property
def hs_color(self):
"""Return the hs value."""
hue, sat, _, _ = self.bulb.color
hue = hue / 65535 * 360
sat = sat / 65535 * 100
return (hue, sat) if sat else None
class LIFXStrip(LIFXColor):
"""Representation of a LIFX light strip with multiple zones."""
async def set_color(self, ack, hsbk, kwargs, duration=0):
"""Send a color change to the bulb."""
bulb = self.bulb
num_zones = len(bulb.color_zones)
zones = kwargs.get(ATTR_ZONES)
if zones is None:
# Fast track: setting all zones to the same brightness and color
# can be treated as a single-zone bulb.
if hsbk[2] is not None and hsbk[3] is not None:
await super().set_color(ack, hsbk, kwargs, duration)
return
zones = list(range(0, num_zones))
else:
zones = [x for x in set(zones) if x < num_zones]
# Zone brightness is not reported when powered off
if not self.is_on and hsbk[2] is None:
await self.set_power(ack, True)
await asyncio.sleep(0.3)
await self.update_color_zones()
await self.set_power(ack, False)
await asyncio.sleep(0.3)
# Send new color to each zone
for index, zone in enumerate(zones):
zone_hsbk = merge_hsbk(bulb.color_zones[zone], hsbk)
apply = 1 if (index == len(zones) - 1) else 0
set_zone = partial(
bulb.set_color_zones,
start_index=zone,
end_index=zone,
color=zone_hsbk,
duration=duration,
apply=apply,
)
await ack(set_zone)
async def async_update(self):
"""Update strip status."""
if self.available and not self.lock.locked():
await super().async_update()
await self.update_color_zones()
async def update_color_zones(self):
"""Get updated color information for each zone."""
zone = 0
top = 1
while self.available and zone < top:
# Each get_color_zones can update 8 zones at once
resp = await AwaitAioLIFX().wait(
partial(self.bulb.get_color_zones, start_index=zone)
)
if resp:
zone += 8
top = resp.count
# We only await multizone responses so don't ask for just one
if zone == top - 1:
zone -= 1
| leppa/home-assistant | homeassistant/components/lifx/light.py | Python | apache-2.0 | 24,645 |
module HttpdCookbook
class HttpdServiceRhelSystemd < HttpdServiceRhel
use_automatic_resource_name
# This is Chef-12.0.0 back-compat, it is different from current
# core chef 12.4.0 declarations
if defined?(provides)
provides :httpd_service, platform_family: %w(rhel fedora suse) do
Chef::Platform::ServiceHelpers.service_resource_providers.include?(:systemd)
end
end
action :start do
httpd_module "#{new_resource.name} :create systemd" do
module_name 'systemd'
httpd_version parsed_version
instance new_resource.instance
notifies :reload, "service[#{new_resource.name} :create #{apache_name}]"
action :create
end
directory "#{name} :create /run/#{apache_name}" do
path "/run/#{apache_name}"
owner 'root'
group 'root'
mode '0755'
recursive true
action :create
end
template "#{name} :create /usr/lib/systemd/system/#{apache_name}.service" do
path "/usr/lib/systemd/system/#{apache_name}.service"
source 'systemd/httpd.service.erb'
owner 'root'
group 'root'
mode '0644'
cookbook 'httpd'
variables(apache_name: apache_name)
action :create
end
directory "#{name} :create /usr/lib/systemd/system/#{apache_name}.service.d" do
path "/usr/lib/systemd/system/#{apache_name}.service.d"
owner 'root'
group 'root'
mode '0755'
recursive true
action :create
end
service "#{name} :create #{apache_name}" do
service_name apache_name
supports restart: true, reload: true, status: true
provider Chef::Provider::Service::Init::Systemd
action [:start, :enable]
end
end
action :stop do
service "#{name} :stop #{apache_name}" do
service_name apache_name
supports restart: true, reload: true, status: true
provider Chef::Provider::Service::Init::Systemd
action :stop
end
end
action :restart do
service "#{name} :restart #{apache_name}" do
service_name apache_name
supports restart: true, reload: true, status: true
provider Chef::Provider::Service::Init::Systemd
action :restart
end
end
action :reload do
service "#{name} :reload #{apache_name}" do
service_name apache_name
supports restart: true, reload: true, status: true
provider Chef::Provider::Service::Init::Systemd
action :reload
end
end
action_class.class_eval do
def create_stop_system_service
service "#{name} :create httpd" do
service_name 'httpd'
provider Chef::Provider::Service::Init::Systemd
action [:stop, :disable]
end
end
def delete_stop_service
service "#{name} :delete #{apache_name}" do
supports restart: true, reload: true, status: true
provider Chef::Provider::Service::Init::Systemd
action [:stop, :disable]
end
end
end
end
end
| juliandunn/httpd | libraries/httpd_service_rhel_systemd.rb | Ruby | apache-2.0 | 3,113 |
package com.vaadin.book.examples.addons.jpacontainer;
import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.OneToMany;
// BEGIN-EXAMPLE: jpacontainer.basic
@Entity
public class Country implements Serializable {
private static final long serialVersionUID = 508426585088564210L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
private Long id;
private String name;
@OneToMany(mappedBy = "country")
private Set<Person> people;
/** Default constructor is required by JPA */
public Country() {
people = new HashSet<Person>();
}
public Country(String name) {
this.name = name;
this.people = new HashSet<Person>();
}
/** Adds a person to the country. */
public void addPerson(Person person) {
people.add(person);
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Set<Person> getPeople() {
return people;
}
public void setPeople(Set<Person> people) {
this.people = people;
}
}
// END-EXAMPLE: jpacontainer.basic
| BillHan/book-examples-v6 | src/main/java/com/vaadin/book/examples/addons/jpacontainer/Country.java | Java | apache-2.0 | 1,457 |
package ova.bean;
import org.apache.commons.lang3.StringUtils;
import ova.api.MessagesProvider;
import ova.util.MessageUtils;
import javax.enterprise.context.ApplicationScoped;
import javax.enterprise.inject.Any;
import javax.enterprise.inject.Instance;
import javax.faces.application.FacesMessage;
import javax.inject.Inject;
import javax.inject.Named;
import java.io.Serializable;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
/**
* Application scoped bean to give an access to all available messages in a modular JSF web application.
*/
@ApplicationScoped
@Named(value = "msgs")
public class MessagesProxy implements Map<String, String>, Serializable
{
@Inject
private UserSettingsData userSettingsData;
@Any
@Inject
private Instance<MessagesProvider> messagesProviders;
/** all cached locale specific messages */
private Map<Locale, Map<String, String>> msgs = new ConcurrentHashMap<Locale, Map<String, String>>();
@Override
public String get(Object key)
{
if (key == null) {
return null;
}
Locale locale = userSettingsData.getLocale();
Map<String, String> messages = msgs.get(locale);
if (messages == null) {
// no messages to current locale are available yet
messages = new HashMap<String, String>();
msgs.put(locale, messages);
// load messages from JSF impl. first
messages.putAll(MessageUtils.getMessages(locale, MessageUtils.FACES_MESSAGES));
// load messages from providers in JARs
for (MessagesProvider messagesProvider : messagesProviders) {
messages.putAll(messagesProvider.getMessages(locale));
}
}
return messages.get(key);
}
public String getText(String key)
{
return this.get(key);
}
public String getText(String key, Object... params)
{
String text = this.get(key);
if ((text != null) && (params != null)) {
text = MessageFormat.format(text, params);
}
return text;
}
public FacesMessage getMessage(FacesMessage.Severity severity, String key, Object... params)
{
String summary = this.get(key);
String detail = this.get(key + "_detail");
if ((summary != null) && (params != null)) {
summary = MessageFormat.format(summary, params);
}
if ((detail != null) && (params != null)) {
detail = MessageFormat.format(detail, params);
}
if (summary != null) {
return new FacesMessage(severity, summary, ((detail != null) ? detail : StringUtils.EMPTY));
}
return new FacesMessage(severity, "???" + key + "???", ((detail != null) ? detail : StringUtils.EMPTY));
}
public FacesMessage getMessage(String key, Object... params)
{
// set severity to error
return getMessage(FacesMessage.SEVERITY_ERROR, key, params);
}
/////////////////////////////////////////////////////////
// java.util.Map interface
/////////////////////////////////////////////////////////
public int size() {
throw new UnsupportedOperationException();
}
public boolean isEmpty() {
throw new UnsupportedOperationException();
}
public boolean containsKey(Object key) {
throw new UnsupportedOperationException();
}
public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}
public String put(String key, String value) {
throw new UnsupportedOperationException();
}
public String remove(Object key) {
throw new UnsupportedOperationException();
}
public void putAll(Map<? extends String, ? extends String> m) {
throw new UnsupportedOperationException();
}
public void clear() {
throw new UnsupportedOperationException();
}
public Set<String> keySet() {
throw new UnsupportedOperationException();
}
public Collection<String> values() {
throw new UnsupportedOperationException();
}
public Set<Entry<String, String>> entrySet() {
throw new UnsupportedOperationException();
}
}
| ova2/jsf-portal | jsftoolkit-jar/src/main/java/ova/bean/MessagesProxy.java | Java | apache-2.0 | 4,126 |
using Esri.ArcGISRuntime.Controls;
using Esri.ArcGISRuntime.Data;
using Esri.ArcGISRuntime.Geometry;
using Esri.ArcGISRuntime.Layers;
using Esri.ArcGISRuntime.Tasks.Query;
using System;
using System.Collections.ObjectModel;
using System.Globalization;
using System.Linq;
using System.Threading.Tasks;
using System.Windows;
using System.Windows.Controls;
namespace ArcGISRuntimeSDKDotNet_DesktopSamples.Samples
{
/// <summary>
/// Demonstrates how to update feature geometry in feature layer.
/// </summary>
/// <title>Feature Layer Edit Geometry</title>
/// <category>Editing</category>
public partial class FeatureLayerEditGeometry: UserControl
{
public FeatureLayerEditGeometry()
{
InitializeComponent();
}
/// <summary>
/// Selects feature for editing.
/// </summary>
private async void MyMapView_MapViewTapped(object sender, MapViewInputEventArgs e)
{
// Ignore tap events while in edit mode so we do not interfere with edit geometry.
if (MyMapView.Editor.IsActive)
return;
var layer = MyMapView.Map.Layers["Incidents"] as FeatureLayer;
layer.ClearSelection();
SetGeometryEditor();
string message = null;
try
{
// Performs hit test on layer to select feature.
var features = await layer.HitTestAsync(MyMapView, e.Position);
if (features == null || !features.Any())
return;
var featureID = features.FirstOrDefault();
layer.SelectFeatures(new long[] { featureID });
var feature = await layer.FeatureTable.QueryAsync(featureID);
SetGeometryEditor(feature);
}
catch (Exception ex)
{
message = ex.Message;
}
if (!string.IsNullOrWhiteSpace(message))
MessageBox.Show(message);
}
/// <summary>
/// Prepares GeometryEditor for editing.
/// </summary>
private void SetGeometryEditor(Feature feature = null)
{
EditButton.Tag = feature;
EditButton.IsEnabled = feature == null ? false : true;
}
/// <summary>
/// Enables geometry editing and submits geometry edit back to the server.
/// </summary>
private async void EditButton_Click(object sender, RoutedEventArgs e)
{
var feature = (Feature)EditButton.Tag;
var layer = MyMapView.Map.Layers["Incidents"] as FeatureLayer;
var table = (ArcGISFeatureTable)layer.FeatureTable;
// Hides feature from feature layer while its geometry is being modified.
layer.SetFeatureVisibility(layer.SelectedFeatureIDs, false);
string message = null;
try
{
// Enables geometry editing and update its geometry
// using GeometryEngine to correct ring orientation.
var geometry = await MyMapView.Editor.EditGeometryAsync(feature.Geometry);
feature.Geometry = GeometryEngine.Simplify(geometry);
await table.UpdateAsync(feature);
if (table.HasEdits)
{
if (table is ServiceFeatureTable)
{
var serviceTable = (ServiceFeatureTable)table;
// Pushes geometry edits back to the server.
var result = await serviceTable.ApplyEditsAsync();
if (result.UpdateResults == null || result.UpdateResults.Count < 1)
return;
var updateResult = result.UpdateResults[0];
if (updateResult.Error != null)
message = updateResult.Error.Message;
}
}
}
catch (TaskCanceledException)
{
// Ignore TaskCanceledException - usually happens if the editor gets cancelled or restarted
}
catch (Exception ex)
{
message = ex.Message;
}
finally
{
layer.SetFeatureVisibility(layer.SelectedFeatureIDs, true);
layer.ClearSelection();
SetGeometryEditor();
}
if (!string.IsNullOrWhiteSpace(message))
MessageBox.Show(message);
}
}
}
| Tyshark9/arcgis-runtime-samples-dotnet | src/Desktop/ArcGISRuntimeSDKDotNet_DesktopSamples/Samples/Editing/FeatureLayerEditGeometry.xaml.cs | C# | apache-2.0 | 4,573 |
#
# Cookbook Name:: oneview_test
# Recipe:: server_profile_template_add_volume_attachments
#
# (c) Copyright 2017 Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
oneview_server_profile_template 'ServerProfileTemplate1' do
client node['oneview_test']['client']
volume_attachments(
[
{
'volume' => 'Volume1',
'attachment_data' => { 'attr_1' => 'attr 1' }
},
{
'volume' => 'Volume2',
'attachment_data' => { 'attr_2' => 'attr 2' }
}
]
)
action :create
end
| HewlettPackard/oneview-chef | spec/fixtures/cookbooks/oneview_test/recipes/server_profile_template_add_volume_attachments.rb | Ruby | apache-2.0 | 1,057 |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.simpleworkflow.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* RequestCancelExternalWorkflowExecutionDecisionAttributes JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class RequestCancelExternalWorkflowExecutionDecisionAttributesJsonUnmarshaller implements
Unmarshaller<RequestCancelExternalWorkflowExecutionDecisionAttributes, JsonUnmarshallerContext> {
public RequestCancelExternalWorkflowExecutionDecisionAttributes unmarshall(JsonUnmarshallerContext context) throws Exception {
RequestCancelExternalWorkflowExecutionDecisionAttributes requestCancelExternalWorkflowExecutionDecisionAttributes = new RequestCancelExternalWorkflowExecutionDecisionAttributes();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return null;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("workflowId", targetDepth)) {
context.nextToken();
requestCancelExternalWorkflowExecutionDecisionAttributes.setWorkflowId(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("runId", targetDepth)) {
context.nextToken();
requestCancelExternalWorkflowExecutionDecisionAttributes.setRunId(context.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("control", targetDepth)) {
context.nextToken();
requestCancelExternalWorkflowExecutionDecisionAttributes.setControl(context.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return requestCancelExternalWorkflowExecutionDecisionAttributes;
}
private static RequestCancelExternalWorkflowExecutionDecisionAttributesJsonUnmarshaller instance;
public static RequestCancelExternalWorkflowExecutionDecisionAttributesJsonUnmarshaller getInstance() {
if (instance == null)
instance = new RequestCancelExternalWorkflowExecutionDecisionAttributesJsonUnmarshaller();
return instance;
}
}
| aws/aws-sdk-java | aws-java-sdk-simpleworkflow/src/main/java/com/amazonaws/services/simpleworkflow/model/transform/RequestCancelExternalWorkflowExecutionDecisionAttributesJsonUnmarshaller.java | Java | apache-2.0 | 3,804 |
/* global Buffer */
var dgram = require('dgram');
var client = dgram.createSocket("udp4");
var rfidReader = require("../index.js");
var broadcastAddress = "255.255.255.255";
var deviceBroadcastPort = 39169;
function failOnError(err) {
if (err) {
throw err;
}
}
client.on("error", function (err) {
console.log("Server error:\n" + err.stack);
client.close();
});
//var deviceIpMask = "255.255.255.0";
var deviceIpMask = "255.255.240.0";
//var deviceMask = "255.255.254.0";
var data = [
{
ip: "192.168.1.218",
gateway: "192.168.1.108",
serial: "144-1-31-95"
},
{
ip: "192.168.1.217",
gateway: "192.168.1.108",
serial: "199-1-112-40"
},
// 01
{
ip: "10.240.66.10",
gateway: "10.240.66.1",
serial: "193-1-110-217"
},
// 02
{
ip: "10.240.66.11",
gateway: "10.240.66.1",
serial: "141-1-33-145"
},
// 03
{
ip: "10.240.66.12",
gateway: "10.240.66.1",
serial: "193-1-110-141"
},
// 04
{
ip: "10.240.66.13",
gateway: "10.240.66.1",
serial: "144-1-31-91"
},
// 05
{
ip: "10.240.66.14",
gateway: "10.240.66.1",
serial: "144-1-31-84"
},
// 06
{
ip: "10.240.66.15",
gateway: "10.240.66.1",
serial: "144-1-31-119"
},
// 07
{
ip: "10.240.66.16",
gateway: "10.240.66.1",
serial: "152-1-34-192"
},
// 08
{
ip: "10.240.66.17",
gateway: "10.240.66.1",
serial: "193-1-110-184"
},
// 09
{
ip: "10.240.66.18",
gateway: "10.240.66.1",
serial: "172-1-115-52"
},
// 10
{
ip: "10.240.66.19",
gateway: "10.240.66.1",
serial: "146-1-34-58"
},
// 11
{
ip: "10.240.66.20",
gateway: "10.240.66.1",
serial: "172-1-115-88"
},
// 12
{
ip: "10.240.66.21",
gateway: "10.240.66.1",
serial: "141-1-33-202"
},
// 13
{
ip: "10.240.66.22",
gateway: "10.240.66.1",
serial: "199-1-112-40"
},
// 14
{
ip: "10.240.66.23",
gateway: "10.240.66.1",
serial: "144-1-31-95"
}
];
client.on("listening", function () {
var address = client.address();
console.log("Server listening " + address.address + ":" + address.port);
console.log("Start listening.")
client.setBroadcast(true);
var setSound = rfidReader.setSoundCommand(0, rfidReader.soundType.shortBeepOnce);
client.send(setSound, 0, setSound.length, deviceBroadcastPort, broadcastAddress, failOnError);
var deviceData = data[10];
var updateReaderCommand = rfidReader.updateReaderCommand(deviceData.ip, deviceIpMask, deviceData.gateway, 0, deviceData.serial, 1);
client.send(updateReaderCommand, 0, updateReaderCommand.length, deviceBroadcastPort, broadcastAddress, function (err) {
console.log(arguments);
if (err) {
throw err;
}
var setSound = rfidReader.setSoundCommand(0, rfidReader.soundType.shortBeepOnce);
client.send(setSound, 0, setSound.length, deviceBroadcastPort, broadcastAddress, failOnError);
});
});
client.bind({
port: deviceBroadcastPort,
address: "169.254.167.154"
});
| kant2002/node-rfidreader | tools/updateSettings.js | JavaScript | apache-2.0 | 2,783 |
<?php
namespace CourseHero\ResponsysPHP\Api\Types;
class LaunchResult
{
/**
*
* @var int $launchId
* @access public
*/
public $launchId = null;
/**
*
* @param int $launchId
* @access public
*/
public function __construct($launchId = null)
{
$this->launchId = $launchId;
}
}
| course-hero/responsys-php | src/CourseHero/ResponsysPHP/Api/Types/LaunchResult.php | PHP | apache-2.0 | 321 |
namespace CompositeDiagrammer
{
using System.Drawing.Drawing2D;
public interface Path
{
GraphicsPath Get();
}
} | rogeralsing/qi4n | CompositeDiagrammer/Element/Path.cs | C# | apache-2.0 | 139 |
/*
* Licensed to STRATIO (C) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. The STRATIO (C) licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.stratio.connector.mongodb.core.configuration;
import static com.stratio.connector.mongodb.core.configuration.ConfigurationOptions.HOST;
import static com.stratio.connector.mongodb.core.configuration.ConfigurationOptions.MAX_CONNECTIONS_PER_HOST;
import static com.stratio.connector.mongodb.core.configuration.ConfigurationOptions.PORT;
import static com.stratio.connector.mongodb.core.configuration.ConfigurationOptions.READ_PREFERENCE;
import static com.stratio.connector.mongodb.core.configuration.ConfigurationOptions.WRITE_CONCERN;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.HashMap;
import java.util.Map;
import org.junit.Test;
import com.mongodb.ReadPreference;
import com.mongodb.WriteConcern;
import com.stratio.connector.mongodb.core.exceptions.MongoValidationException;
import com.stratio.crossdata.common.connector.ConnectorClusterConfig;
import com.stratio.crossdata.common.data.ClusterName;
import com.stratio.crossdata.common.exceptions.ConnectionException;
/**
* The Test for the configuration for Mongo connector.
*/
public class MongoClientConfigurationTest {
private final static ClusterName CLUSTER_NAME = new ClusterName("clusterMongo");
private final static String SERVER_IP = "125.0.1.1";
private final static String SERVER_PORT = "12";
private final static String SERVER_IP_LIST = "125.0.1.1, 127.0.1.1";
private final static String SERVER_PORT_LIST = "2700, 2701";
private final static String CUSTOM_READ_PREFERENCE = "secondaryPreferred";
private final static String CUSTOM_WRITE_PREFERENCE = "unacknowledged";
private final static String CUSTOM_MAX_CONNECTION = "5000";
@Test
public void defaultConfigurationTest() throws MongoValidationException, ConnectionException {
Map<String, String> properties = null;
ConnectorClusterConfig defaultConfig = new ConnectorClusterConfig(CLUSTER_NAME, null, properties);
MongoClientConfiguration config = new MongoClientConfiguration(defaultConfig);
// Review the default options in Mongo
assertEquals("The lattency default value is not the expected", config.getMongoClientOptions()
.getAcceptableLatencyDifference(), 15);
assertEquals("The connections per host default value is not the expected", config.getMongoClientOptions()
.getConnectionsPerHost(), 10000);
assertEquals("The max idle time default value is not the expected", config.getMongoClientOptions()
.getMaxConnectionIdleTime(), 0);
assertEquals("The connection timeout default value is not the expected", config.getMongoClientOptions()
.getConnectTimeout(), 10000);
assertEquals("The read preference default value is not the expected", config.getMongoClientOptions()
.getReadPreference(), ReadPreference.primaryPreferred());
assertEquals("The write concern default value is not the expected", config.getMongoClientOptions()
.getWriteConcern(), WriteConcern.ACKNOWLEDGED);
// Connector properties
assertTrue("There should be only 1 seed by default", config.getSeeds().size() == 1);
assertEquals("The default port should be 27017", config.getSeeds().get(0).getPort(), 27017);
}
@Test
public void customConfigurationTest() throws NumberFormatException, MongoValidationException, ConnectionException {
Map<String, String> properties = new HashMap<String, String>();
properties.put(HOST.getOptionName(), SERVER_IP);
properties.put(PORT.getOptionName(), SERVER_PORT);
properties.put(READ_PREFERENCE.getOptionName(), CUSTOM_READ_PREFERENCE);
properties.put(WRITE_CONCERN.getOptionName(), CUSTOM_WRITE_PREFERENCE);
properties.put(MAX_CONNECTIONS_PER_HOST.getOptionName(), CUSTOM_MAX_CONNECTION);
ConnectorClusterConfig customConfi = new ConnectorClusterConfig(CLUSTER_NAME, null, properties);
MongoClientConfiguration config = new MongoClientConfiguration(customConfi);
// Review the default options in Mongo
assertEquals("The connections per host value is not the expected", config.getMongoClientOptions()
.getConnectionsPerHost(), Integer.parseInt(CUSTOM_MAX_CONNECTION));
assertEquals("The read preference is not the expected", config.getMongoClientOptions().getReadPreference(),
ReadPreference.secondaryPreferred());
assertEquals("The write concern is not the expected", config.getMongoClientOptions().getWriteConcern(),
WriteConcern.UNACKNOWLEDGED);
// Connector properties
assertTrue("There must be 1 seed", config.getSeeds().size() == 1);
assertEquals("The port is not the expected", config.getSeeds().get(0).getPort(), Integer.parseInt(SERVER_PORT));
}
@Test
public void wrongReadPreferenceTest() {
Map<String, String> properties = new HashMap<String, String>();
properties.put(READ_PREFERENCE.getOptionName(), "falseReadPreference");
ConnectorClusterConfig wrongConfig = new ConnectorClusterConfig(CLUSTER_NAME, null, properties);
MongoClientConfiguration config = new MongoClientConfiguration(wrongConfig);
try {
config.getMongoClientOptions();
fail("A exception must be thrown when no match any option");
} catch (MongoValidationException e) {
}
}
@Test
public void wrongIntegerPreferenceTest() {
Map<String, String> properties = new HashMap<String, String>();
properties.put(MAX_CONNECTIONS_PER_HOST.getOptionName(), "ten");
ConnectorClusterConfig wrongConfig = new ConnectorClusterConfig(CLUSTER_NAME, null, properties);
MongoClientConfiguration config = new MongoClientConfiguration(wrongConfig);
try {
config.getMongoClientOptions();
fail("An exception must be thrown when a non integer received");
} catch (MongoValidationException e) {
}
}
@Test
public void multipleHostsTest() throws ConnectionException {
Map<String, String> properties = new HashMap<String, String>();
properties.put(HOST.getOptionName(), SERVER_IP_LIST);
properties.put(PORT.getOptionName(), SERVER_PORT_LIST);
ConnectorClusterConfig wrongConfig = new ConnectorClusterConfig(CLUSTER_NAME, null, properties);
MongoClientConfiguration config = new MongoClientConfiguration(wrongConfig);
assertTrue("There must be 2 seeds", config.getSeeds().size() == 2);
assertEquals("The port is not the expected", 2700, config.getSeeds().get(0).getPort());
assertEquals("The port is not the expected", 2701, config.getSeeds().get(1).getPort());
}
@Test
public void wrongSeedsTest() {
Map<String, String> properties = new HashMap<String, String>();
properties.put(HOST.getOptionName(), SERVER_IP_LIST);
properties.put(PORT.getOptionName(), SERVER_PORT);
ConnectorClusterConfig wrongConfig = new ConnectorClusterConfig(CLUSTER_NAME, null, properties);
MongoClientConfiguration config = new MongoClientConfiguration(wrongConfig);
try {
config.getSeeds();
fail("An exception must be thrown when number of ports and hosts are different");
} catch (ConnectionException e) {
}
}
}
| Stratio/stratio-connector-mongodb | connector-mongodb/src/test/java/com/stratio/connector/mongodb/core/configuration/MongoClientConfigurationTest.java | Java | apache-2.0 | 8,353 |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package controller
import (
"encoding/json"
osb "github.com/kubernetes-sigs/go-open-service-broker-client/v2"
"github.com/kubernetes-sigs/service-catalog/pkg/apis/servicecatalog/v1beta1"
)
const (
originatingIdentityPlatform = "kubernetes"
)
func buildOriginatingIdentity(userInfo *v1beta1.UserInfo) (*osb.OriginatingIdentity, error) {
if userInfo == nil {
return nil, nil
}
oiValue, err := json.Marshal(userInfo)
if err != nil {
return nil, err
}
oi := &osb.OriginatingIdentity{
Platform: originatingIdentityPlatform,
Value: string(oiValue),
}
return oi, nil
}
| kubernetes-incubator/service-catalog | pkg/controller/originating_identity.go | GO | apache-2.0 | 1,160 |
// Generated from /POI/java/org/apache/poi/poifs/crypt/cryptoapi/CryptoAPIEncryptor.java
#pragma once
#include <fwd-POI.hpp>
#include <java/io/fwd-POI.hpp>
#include <java/lang/fwd-POI.hpp>
#include <javax/crypto/fwd-POI.hpp>
#include <org/apache/poi/poifs/crypt/fwd-POI.hpp>
#include <org/apache/poi/poifs/crypt/cryptoapi/fwd-POI.hpp>
#include <org/apache/poi/poifs/filesystem/fwd-POI.hpp>
#include <org/apache/poi/poifs/crypt/Encryptor.hpp>
#include <java/lang/Cloneable.hpp>
#include <org/apache/poi/poifs/crypt/ChunkedCipherOutputStream.hpp>
#include <org/apache/poi/poifs/crypt/cryptoapi/CryptoAPIEncryptor_CryptoAPICipherOutputStream.hpp>
struct default_init_tag;
class poi::poifs::crypt::cryptoapi::CryptoAPIEncryptor
: public ::poi::poifs::crypt::Encryptor
, public virtual ::java::lang::Cloneable
{
public:
typedef ::poi::poifs::crypt::Encryptor super;
private:
int32_t chunkSize { };
protected:
void ctor();
public:
void confirmPassword(::java::lang::String* password) override;
void confirmPassword(::java::lang::String* password, ::int8_tArray* keySpec, ::int8_tArray* keySalt, ::int8_tArray* verifier, ::int8_tArray* verifierSalt, ::int8_tArray* integritySalt) override;
virtual ::javax::crypto::Cipher* initCipherForBlock(::javax::crypto::Cipher* cipher, int32_t block) /* throws(GeneralSecurityException) */;
::poi::poifs::crypt::ChunkedCipherOutputStream* getDataStream(::poi::poifs::filesystem::DirectoryNode* dir) /* throws(IOException, GeneralSecurityException) */ override;
CryptoAPIEncryptor_CryptoAPICipherOutputStream* getDataStream(::java::io::OutputStream* stream, int32_t initialOffset) /* throws(IOException, GeneralSecurityException) */ override;
virtual void setSummaryEntries(::poi::poifs::filesystem::DirectoryNode* dir, ::java::lang::String* encryptedStream, ::poi::poifs::filesystem::NPOIFSFileSystem* entries) /* throws(IOException, GeneralSecurityException) */;
public: /* protected */
virtual int32_t getKeySizeInBytes();
public:
void setChunkSize(int32_t chunkSize) override;
public: /* protected */
virtual void createEncryptionInfoEntry(::poi::poifs::filesystem::DirectoryNode* dir) /* throws(IOException) */;
public:
CryptoAPIEncryptor* clone() /* throws(CloneNotSupportedException) */ override;
// Generated
public: /* protected */
CryptoAPIEncryptor();
protected:
CryptoAPIEncryptor(const ::default_init_tag&);
public:
static ::java::lang::Class *class_();
private:
void init();
public:
virtual ::java::io::OutputStream* getDataStream(::poi::poifs::filesystem::NPOIFSFileSystem* fs);
virtual ::java::io::OutputStream* getDataStream(::poi::poifs::filesystem::OPOIFSFileSystem* fs);
virtual ::java::io::OutputStream* getDataStream(::poi::poifs::filesystem::POIFSFileSystem* fs);
private:
virtual ::java::lang::Class* getClass0();
friend class CryptoAPIEncryptor_createEncryptionInfoEntry_1;
friend class CryptoAPIEncryptor_CryptoAPICipherOutputStream;
};
| pebble2015/cpoi | src/org/apache/poi/poifs/crypt/cryptoapi/CryptoAPIEncryptor.hpp | C++ | apache-2.0 | 3,017 |
package com.dvoiss.sensorannotations;
import com.dvoiss.sensorannotations.exception.ProcessingException;
import java.lang.annotation.Annotation;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import org.checkerframework.checker.nullness.qual.NonNull;
/**
* This wrapper class holds all the annotations found in a given class {@link
* #mEnclosingClassName}.
*
* The {@link #mItemsMap} is a map with sensor types as the key and a value of a map between the
* annotation class to the method annotated.
*/
class AnnotatedMethodsPerClass {
@NonNull private String mEnclosingClassName;
@NonNull private Map<Integer, Map<Class, AnnotatedMethod>> mItemsMap = new LinkedHashMap<>();
AnnotatedMethodsPerClass(@NonNull String enclosingClassName) {
this.mEnclosingClassName = enclosingClassName;
}
void add(@NonNull Class<? extends Annotation> annotationClass, @NonNull AnnotatedMethod method)
throws ProcessingException {
Map<Class, AnnotatedMethod> annotationMap = mItemsMap.get(method.getSensorType());
if (annotationMap == null) {
annotationMap = new HashMap<>();
}
if (annotationMap.get(annotationClass) != null) {
String error =
String.format("@%s is already annotated on a different method in class %s",
annotationClass.getSimpleName(), method.getExecutableElement().getSimpleName());
throw new ProcessingException(method.getExecutableElement(), error);
}
annotationMap.put(annotationClass, method);
mItemsMap.put(method.getSensorType(), annotationMap);
}
boolean hasAnnotationsOfType(Class<? extends Annotation> annotationClass) {
for (Map<Class, AnnotatedMethod> values : mItemsMap.values()) {
if (values.get(annotationClass) != null) {
return true;
}
}
return false;
}
@NonNull String getEnclosingClassName() {
return mEnclosingClassName;
}
@NonNull Map<Integer, Map<Class, AnnotatedMethod>> getItemsMap() {
return mItemsMap;
}
}
| dvoiss/SensorAnnotations | sensorannotations-compiler/src/main/java/com/dvoiss/sensorannotations/AnnotatedMethodsPerClass.java | Java | apache-2.0 | 2,149 |
/*
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.samples.apps.ourstreets.data;
import androidx.annotation.NonNull;
import com.google.android.gms.maps.model.LatLng;
import com.google.samples.apps.ourstreets.BuildConfig;
import com.google.samples.apps.ourstreets.model.Detail;
import com.firebase.client.DataSnapshot;
/**
* Presents gallery details to other components of this app.
*/
public final class DetailPresenter extends DataPresenter<Detail> {
public DetailPresenter(@NonNull DataView<Detail> view, @NonNull String childId) {
super(view, BuildConfig.DETAIL_URL + "/" + childId);
}
@NonNull
@Override
protected Detail parseData(DataSnapshot data) {
String title = data.child(JsonKeys.TITLE).getValue(String.class);
String description = data.child(JsonKeys.DESCRIPTION).getValue(String.class);
LatLng latLng = DataUtils.readLatLng(data);
DataSnapshot location = data.child(JsonKeys.LOCATION);
Float tilt = location.child(JsonKeys.TILT).getValue(Float.class);
Float bearing = location.child(JsonKeys.BEARING).getValue(Float.class);
return new Detail(title, description, latLng, tilt, bearing);
}
}
| android/animation-samples | OurStreets/app/src/main/java/com/google/samples/apps/ourstreets/data/DetailPresenter.java | Java | apache-2.0 | 1,809 |
// Copyright The OpenTelemetry Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package componenttest // import "go.opentelemetry.io/collector/component/componenttest"
import (
"context"
"go.opentelemetry.io/collector/component"
"go.opentelemetry.io/collector/config"
)
// NewNopExtensionCreateSettings returns a new nop settings for Create*Extension functions.
func NewNopExtensionCreateSettings() component.ExtensionCreateSettings {
return component.ExtensionCreateSettings{
TelemetrySettings: NewNopTelemetrySettings(),
BuildInfo: component.NewDefaultBuildInfo(),
}
}
type nopExtensionConfig struct {
config.ExtensionSettings `mapstructure:",squash"` // squash ensures fields are correctly decoded in embedded struct
}
var nopExtensionFactoryInstance = component.NewExtensionFactory(
"nop",
func() config.Extension {
return &nopExtensionConfig{
ExtensionSettings: config.NewExtensionSettings(config.NewComponentID("nop")),
}
},
func(context.Context, component.ExtensionCreateSettings, config.Extension) (component.Extension, error) {
return nopExtensionInstance, nil
})
// NewNopExtensionFactory returns a component.ExtensionFactory that constructs nop extensions.
func NewNopExtensionFactory() component.ExtensionFactory {
return nopExtensionFactoryInstance
}
var nopExtensionInstance = &nopExtension{}
// nopExtension stores consumed traces and metrics for testing purposes.
type nopExtension struct {
nopComponent
}
| open-telemetry/opentelemetry-collector | component/componenttest/nop_extension.go | GO | apache-2.0 | 1,984 |
package net.floodlightcontroller.qos;
/**
* Copyright 2012 Marist College, New York
* Author Ryan Wallner (ryan.wallner1@marist.edu)
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* Provides Queuing and L2/L3 Quality of Service Policies to a
* virtualized network using DiffServ/ToS class based model, and queuing techniques.
* This module provides overlapping flowspace for policies that governed by their priority.
* This QoS modules acts in a pro-active manner having to abide by existing "Policies"
* within a network.
*
**/
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.openflow.protocol.OFFlowMod;
import org.openflow.protocol.OFMatch;
import org.openflow.protocol.OFMessage;
import org.openflow.protocol.OFPacketOut;
import org.openflow.protocol.OFPort;
import org.openflow.protocol.OFType;
import org.openflow.protocol.action.OFAction;
import org.openflow.protocol.action.OFActionEnqueue;
import org.openflow.protocol.action.OFActionNetworkTypeOfService;
import org.openflow.protocol.action.OFActionType;
import org.openflow.util.HexString;
import net.floodlightcontroller.core.FloodlightContext;
import net.floodlightcontroller.core.IOFMessageListener;
import net.floodlightcontroller.core.IOFSwitch;
import net.floodlightcontroller.core.module.FloodlightModuleContext;
import net.floodlightcontroller.core.module.FloodlightModuleException;
import net.floodlightcontroller.core.module.IFloodlightModule;
import net.floodlightcontroller.core.module.IFloodlightService;
import net.floodlightcontroller.restserver.IRestApiService;
import net.floodlightcontroller.staticflowentry.IStaticFlowEntryPusherService;
import net.floodlightcontroller.storage.IResultSet;
import net.floodlightcontroller.storage.IStorageSourceService;
import net.floodlightcontroller.storage.StorageException;
import net.floodlightcontroller.qos.QoSPolicy;
import net.floodlightcontroller.qos.QoSTypeOfService;
import net.floodlightcontroller.core.IFloodlightProviderService;
import java.util.ArrayList;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QoS implements IQoSService, IFloodlightModule,
IOFMessageListener {
protected IFloodlightProviderService floodlightProvider;
protected IStaticFlowEntryPusherService flowPusher;
protected List<QoSPolicy> policies; //Synchronized
protected List<QoSTypeOfService> services; //Synchronized
protected IRestApiService restApi;
protected FloodlightContext cntx;
protected IStorageSourceService storageSource;
protected Properties props = new Properties();
protected String[] tools;
protected static Logger logger;
protected boolean enabled;
//regex for dpid string, this can/needs to be more elegant. Maybe use of a Matcher
protected String dpidPattern = "^[\\d|\\D][\\d|\\D]:[\\d|\\D][\\d|\\D]:" +
"[\\d|\\D][\\d|\\D]:[\\d|\\D][\\d|\\D]:" +
"[\\d|\\D][\\d|\\D]:[\\d|\\D][\\d|\\D]:" +
"[\\d|\\D][\\d|\\D]:[\\d|\\D][\\d|\\D]$";
public static final String TABLE_NAME = "controller_qos";
public static final String COLUMN_POLID = "policyid";
public static final String COLUMN_NAME = "name";
public static final String COLUMN_MATCH_PROTOCOL = "protocol";
public static final String COLUMN_MATCH_ETHTYPE = "eth-type";
public static final String COLUMN_MATCH_INGRESSPRT = "ingressport";
public static final String COLUMN_MATCH_IPDST = "ipdst";
public static final String COLUMN_MATCH_IPSRC = "ipsrc";
public static final String COLUMN_MATCH_VLANID = "vlanid";
public static final String COLUMN_MATCH_ETHSRC = "ethsrc";
public static final String COLUMN_MATCH_ETHDST = "ethdst";
public static final String COLUMN_MATCH_TCPUDP_SRCPRT = "tcpudpsrcport";
public static final String COLUMN_MATCH_TCPUDP_DSTPRT = "tcpudpdstport";
public static final String COLUMN_NW_TOS = "nw_tos";
public static final String COLUMN_SW = "switches";
public static final String COLUMN_QUEUE = "queue";
public static final String COLUMN_ENQPORT = "equeueport";
public static final String COLUMN_PRIORITY = "priority";
public static final String COLUMN_SERVICE = "service";
public static String ColumnNames[] = { COLUMN_POLID,
COLUMN_NAME,COLUMN_MATCH_PROTOCOL, COLUMN_MATCH_ETHTYPE,COLUMN_MATCH_INGRESSPRT,
COLUMN_MATCH_IPDST,COLUMN_MATCH_IPSRC,COLUMN_MATCH_VLANID,
COLUMN_MATCH_ETHSRC,COLUMN_MATCH_ETHDST,COLUMN_MATCH_TCPUDP_SRCPRT,
COLUMN_MATCH_TCPUDP_DSTPRT,COLUMN_NW_TOS,COLUMN_SW,
COLUMN_QUEUE,COLUMN_ENQPORT,COLUMN_PRIORITY,COLUMN_SERVICE,};
public static final String TOS_TABLE_NAME = "controller_qos_tos";
public static final String COLUMN_SID = "serviceid";
public static final String COLUMN_SNAME = "servicename";
public static final String COLUMN_TOSBITS = "tosbits";
public static String TOSColumnNames[] = {COLUMN_SID,
COLUMN_SNAME,
COLUMN_TOSBITS};
@Override
public String getName() {
return "qos";
}
@Override
public boolean isCallbackOrderingPrereq(OFType type, String name) {
return false;
}
@Override
public boolean isCallbackOrderingPostreq(OFType type, String name) {
return false;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleServices() {
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IQoSService.class);
return l;
}
@Override
public Map<Class<? extends IFloodlightService>, IFloodlightService> getServiceImpls() {
Map<Class<? extends IFloodlightService>,
IFloodlightService> m =
new HashMap<Class<? extends IFloodlightService>,
IFloodlightService>();
// We are the class that implements the service
m.put(IQoSService.class, this);
return m;
}
@Override
public Collection<Class<? extends IFloodlightService>> getModuleDependencies() {
//This module should depend on FloodlightProviderService,
// IStorageSourceProviderService, IRestApiService, &
// IStaticFlowEntryPusherService
Collection<Class<? extends IFloodlightService>> l =
new ArrayList<Class<? extends IFloodlightService>>();
l.add(IFloodlightProviderService.class);
l.add(IStorageSourceService.class);
l.add(IRestApiService.class);
l.add(IStaticFlowEntryPusherService.class);
return l;
}
/**
* Reads the policies from the storage and creates a sorted
* ArrayList of QoSPolicy's from them.
* @return the sorted ArrayList of Policy instances (rules from storage)
*
* Based on work from below, Credit to
* @author Amer Tahir
* @edited KC Wang
* @author (re-authored) Ryan Wallner
*/
protected ArrayList<QoSPolicy> readPoliciesFromStorage() {
ArrayList<QoSPolicy> l = new ArrayList<QoSPolicy>();
try{
Map<String, Object> row;
IResultSet policySet = storageSource
.executeQuery(TABLE_NAME, ColumnNames, null, null );
for( Iterator<IResultSet> iter = policySet.iterator(); iter.hasNext();){
row = iter.next().getRow();
QoSPolicy p = new QoSPolicy();
if(!row.containsKey(COLUMN_POLID)
|| !row.containsKey(COLUMN_SW)
|| !row.containsKey(COLUMN_QUEUE)
|| !row.containsKey(COLUMN_ENQPORT)
|| !row.containsKey(COLUMN_SERVICE)){
logger.error("Skipping entry with required fields {}", row);
continue;
}
try{
p.policyid = Integer.parseInt((String) row.get(COLUMN_POLID));
p.queue = Short.parseShort((String) row.get(COLUMN_QUEUE));
p.enqueueport = Short.parseShort((String) row.get(COLUMN_ENQPORT));
p.service = (String) row.get(COLUMN_SERVICE);
//TODO change for String[] of switches
p.sw = (String) row.get(COLUMN_SW);
for(String key: row.keySet()){
if(row.get(key) == null){
continue;
}
else if(key.equals(COLUMN_POLID)
|| key.equals(COLUMN_SW)
|| key.equals(COLUMN_QUEUE)
|| key.equals(COLUMN_ENQPORT)
|| key.equals(COLUMN_SERVICE)){
continue;
}
else if(key.equals(COLUMN_NAME)){
p.name = (String) row.get(COLUMN_NAME);
}
else if(key.equals(COLUMN_MATCH_ETHDST)){
p.ethdst = (String) row.get(COLUMN_MATCH_ETHDST);
}
else if(key.equals(COLUMN_MATCH_ETHSRC)){
p.ethsrc = (String) row.get(COLUMN_MATCH_ETHSRC);
}
else if(key.equals(COLUMN_MATCH_ETHTYPE)){
p.ethtype = Short.parseShort((String)
row.get(COLUMN_MATCH_ETHTYPE));
}
else if(key.equals(COLUMN_MATCH_INGRESSPRT)){
p.ingressport = Short.parseShort((String)
row.get(COLUMN_MATCH_INGRESSPRT));
}
else if(key.equals(COLUMN_MATCH_IPDST)){
p.ipdst = Integer.parseInt((String)
row.get(COLUMN_MATCH_IPDST));
}
else if(key.equals(COLUMN_MATCH_IPSRC)){
p.ipsrc = Integer.parseInt((String)
row.get(COLUMN_MATCH_IPSRC));
}
else if(key.equals(COLUMN_MATCH_PROTOCOL)){
p.protocol = Byte.parseByte((String)
row.get(COLUMN_MATCH_PROTOCOL));
}
else if(key.equals(COLUMN_MATCH_TCPUDP_DSTPRT)){
p.tcpudpdstport = Short.parseShort((String)
row.get(COLUMN_MATCH_TCPUDP_DSTPRT));
}
else if(key.equals(COLUMN_MATCH_TCPUDP_SRCPRT)){
p.tcpudpsrcport = Short.parseShort((String)
row.get(COLUMN_MATCH_TCPUDP_SRCPRT));
}
else if(key.equals(COLUMN_MATCH_VLANID)){
p.vlanid = Short.parseShort((String)
row.get(COLUMN_MATCH_VLANID));
}
else if(key.equals(COLUMN_NW_TOS)){
p.tos = Byte.parseByte((String)
row.get(COLUMN_NW_TOS));
}
else if(key.equals(COLUMN_PRIORITY)){
p.priority = Short.parseShort((String)
row.get(COLUMN_PRIORITY));
}
}
}catch(ClassCastException e){
logger.error("Error, Skipping rule, Bad Data "
+ e.getMessage()+" on Rule {}", p.policyid);
}
//make sure its a queueing rule or service rule only.
if(p.enqueueport != -1 && p.queue != -1 && p.service != null){
l.add(p);
}
else if(p.enqueueport > -1 && p.queue > -1 && p.service == null){
l.add(p);
}
else{
continue;//not a valid rule
}
}
}catch(StorageException e){
logger.error("Error with storage source: {}", e);
}
Collections.sort(l);
return l;
}
/**
* Reads the types of services from the storage and creates a
* sorted ArrayList of QoSTypeOfService from them
* @return the sorted ArrayList of Type of Service instances (rules from storage)
*
* Based on work from below, Credit to
* @author Amer Tahir
* @edited KC Wang
* @author (re-authored) Ryan Wallner
*/
protected ArrayList<QoSTypeOfService> readServicesFromStorage() {
ArrayList<QoSTypeOfService> l = new ArrayList<QoSTypeOfService>();
try{
Map<String, Object> row;
IResultSet serviceSet = storageSource
.executeQuery(TOS_TABLE_NAME, TOSColumnNames, null, null );
for( Iterator<IResultSet> iter = serviceSet.iterator(); iter.hasNext();){
row = iter.next().getRow();
QoSTypeOfService s = new QoSTypeOfService();
if(!row.containsKey(COLUMN_SID)
|| !row.containsKey(COLUMN_TOSBITS)){
logger.error("Skipping entry with required fields {}", row);
continue;
}
try{
s.sid = Integer.parseInt((String) row.get(COLUMN_SID));
s.tos = Byte.parseByte((String) row.get(COLUMN_TOSBITS));
for(String key: row.keySet()){
if(row.get(key) == null){
continue;
}
else if(key.equals(COLUMN_SID)
|| key.equals(COLUMN_TOSBITS)){
continue;
}
else if(key.equals(COLUMN_SNAME)){
s.name = (String) row.get(COLUMN_SNAME);
}
}
}catch(ClassCastException e){
logger.error("Error, Skipping rule, Bad Data "
+ e.getMessage()+" on Rule {}", s.sid);
}
if(s.tos != -1){
l.add(s);
}
}
}catch(StorageException e){
logger.error("Error with storage source: {}", e);
}
Collections.sort(l);
return l;
}
@Override
public void init(FloodlightModuleContext context)
throws FloodlightModuleException {
//initiate services
floodlightProvider = context.getServiceImpl(IFloodlightProviderService.class);
flowPusher = context.getServiceImpl(IStaticFlowEntryPusherService.class);
logger = LoggerFactory.getLogger(QoS.class);
storageSource = context.getServiceImpl(IStorageSourceService.class);
restApi = context.getServiceImpl(IRestApiService.class);
policies = new ArrayList<QoSPolicy>();
services = new ArrayList<QoSTypeOfService>();
logger = LoggerFactory.getLogger(QoS.class);
// start disabled
// can be overridden by tools.properties.
enabled = false;
try {
//load a properties file
props.load(new FileInputStream("src/main/resources/tools.properties"));
tools = props.getProperty("tools").split(",");
System.out.println(props.getProperty("qos"));
if(props.getProperty("qos").equalsIgnoreCase("enabled")){
logger.info("Enabling QoS on Start-up. Edit tools.properties to change this.");
this.enableQoS(true);
}
}catch (FileNotFoundException e) {
e.printStackTrace();
} catch(IOException e) {
e.printStackTrace();
}
}
@Override
public void startUp(FloodlightModuleContext context) {
// initialize REST interface
restApi.addRestletRoutable(new QoSWebRoutable());
floodlightProvider.addOFMessageListener(OFType.PACKET_IN, this);
//Storage for policies
storageSource.createTable(TABLE_NAME, null);
storageSource.setTablePrimaryKeyName(TABLE_NAME, COLUMN_POLID);
//avoid thread issues for concurrency
synchronized (policies) {
this.policies = readPoliciesFromStorage();
}
//Storage for services
storageSource.createTable(TOS_TABLE_NAME, null);
storageSource.setTablePrimaryKeyName(TOS_TABLE_NAME, COLUMN_SID);
//avoid thread issues for concurrency
synchronized (services) {
this.services = readServicesFromStorage();
}
// create default "Best Effort" service
// most networks use this as default, adding here for defaulting
try{
QoSTypeOfService service = new QoSTypeOfService();
service.name = "Best Effort";
service.tos = (byte)0x00;
service.sid = service.genID();
this.addService(service);
}catch(Exception e){
logger.error("Error adding default Best Effort {}", e);
}
enabled = true ;
// test
/* String qosJson = new String();
qosJson = "{\n \"ip-src\": \"10.0.0.1\",\n \"protocol\": \"6\",\n \"name\": \"Qoos.00:00:00:00:00:00:00:02\",\n \"ip-dst\": \"10.0.0.2\",\n \"queue\": \"2\",\n \"eth-type\": \"0x0800\"\n}\n"; // 設定Json
System.out.println(qosJson); // 印出
try{
QoSPolicy p = QoSPoliciesResource.jsonToPolicy(qosJson); // 轉
addPolicy(p);
}
catch (Exception e){
System.out.println("\n\n fail\n\n");
}
*/
}
@Override
public net.floodlightcontroller.core.IListener.Command receive(
IOFSwitch sw, OFMessage msg, FloodlightContext cntx) {
//do not process packet if not enabled
if (!this.enabled) {
return Command.CONTINUE;
}
//logger.debug("Message Recieved: Type - {}",msg.getType().toString());
//Listen for Packets that match Policies
switch (msg.getType()) {
case PACKET_IN:
//logger.debug("PACKET_IN recieved");
byte[] packetData = OFMessage.getData(sw, msg, cntx);
//Temporary match from packet to compare
OFMatch tmpMatch = new OFMatch();
tmpMatch.loadFromPacket(packetData, OFPort.OFPP_NONE.getValue());
// checkIfQoSApplied(tmpMatch);
break;
default:
return Command.CONTINUE;
}
return Command.CONTINUE;
}
/**
* Allow access to enable module
* @param boolean
*/
@Override
public void enableQoS(boolean enable) {
logger.info("Setting QoS to {}", enable);
this.enabled = enable;
}
/**
* Return whether of not the module is enabled
*/
@Override
public boolean isEnabled(){
return this.enabled;
}
/**
* Return a List of Quality of Service Policies
*/
@Override
public List<QoSPolicy> getPolicies() {
return this.policies;
}
/**
* Returns a list of services available for Network Type of Service
* @return List
*/
@Override
public List<QoSTypeOfService> getServices() {
return this.services;
}
/**
* Add a service class to use in policies
* Used to make ToS/DiffServ Bits human readable.
* Bit notation 000000 becomes "Best Effort"
* @param QoSTypeOfService
*/
@Override
public synchronized void addService(QoSTypeOfService service) {
//debug
logger.debug("Adding Service to List and Storage");
//create the UID
service.sid = service.genID();
//check tos bits are within bounds
if (service.tos >= (byte)0x00 && service.tos <= (byte)0x3F ){
try{
//Add to the list of services
//un-ordered, naturally a short list
this.services.add(service);
//add to the storage source
Map<String, Object> serviceEntry = new HashMap<String,Object>();
serviceEntry.put(COLUMN_SID, Integer.toString(service.sid));
serviceEntry.put(COLUMN_SNAME, service.name);
serviceEntry.put(COLUMN_TOSBITS, Byte.toString(service.tos));
//ad to storage
storageSource.insertRow(TOS_TABLE_NAME, serviceEntry);
}catch(Exception e){
logger.debug("Error adding service, error: {}" ,e);
}
}
else{
logger.debug("Type of Service must be 0-64");
}
}
/**
* Removes a Network Type of Service
* @category by sid
*/
@Override
public synchronized void deleteService(int sid) {
Iterator<QoSTypeOfService> sIter = this.services.iterator();
while(sIter.hasNext()){
QoSTypeOfService s = sIter.next();
if(s.sid == sid){
sIter.remove();
break; //done only one can exist
}
}
}
/** Adds a policy
* @author wallnerryan
* @overloaded
**/
@Override
public synchronized void addPolicy(QoSPolicy policy){
//debug
logger.debug("Adding Policy to List and Storage");
//create the UID
policy.policyid = policy.genID();
if (checkIfPolicyExists(policy, this.policies)){
logger.info("Policy exist, acquire disregarded.");
return;
}
int p = 0;
for (p = 0; p < this.policies.size(); p++){
//check if empy
if(this.policies.isEmpty()){
//p is zero
break;
}
//starts at the first(lowest) policy based on priority
//insertion sort, gets hairy when n # of switches increases.
//larger networks may need a merge sort.
if(this.policies.get(p).priority >= policy.priority){
//this keeps "p" in the correct position to place new policy in
break;
}
}
if (p <= this.policies.size()) {
this.policies.add(p, policy);
}
else {
this.policies.add(policy);
}
//Add to the storageSource
Map<String, Object> policyEntry = new HashMap<String, Object>();
policyEntry.put(COLUMN_POLID, Long.toString(policy.policyid));
policyEntry.put(COLUMN_NAME, policy.name);
policyEntry.put(COLUMN_MATCH_PROTOCOL, Short.toString(policy.protocol));
policyEntry.put(COLUMN_MATCH_ETHTYPE, Short.toString(policy.ethtype));
policyEntry.put(COLUMN_MATCH_INGRESSPRT, Short.toString(policy.ingressport));
policyEntry.put(COLUMN_MATCH_IPSRC, Integer.toString(policy.ipsrc));
policyEntry.put(COLUMN_MATCH_IPDST, Integer.toBinaryString(policy.ipdst));
policyEntry.put(COLUMN_MATCH_VLANID, Short.toString(policy.vlanid));
policyEntry.put(COLUMN_MATCH_ETHSRC, policy.ethsrc);
policyEntry.put(COLUMN_MATCH_ETHDST, policy.ethdst);
policyEntry.put(COLUMN_MATCH_TCPUDP_SRCPRT, Short.toString(policy.tcpudpsrcport));
policyEntry.put(COLUMN_MATCH_TCPUDP_DSTPRT, Short.toString(policy.tcpudpdstport));
policyEntry.put(COLUMN_NW_TOS, policy.service);
policyEntry.put(COLUMN_SW, policy.sw);
policyEntry.put(COLUMN_QUEUE, Short.toString(policy.queue));
policyEntry.put(COLUMN_ENQPORT, Short.toString(policy.enqueueport));
policyEntry.put(COLUMN_PRIORITY, Short.toString(policy.priority));
policyEntry.put(COLUMN_SERVICE, policy.service);
storageSource.insertRow(TABLE_NAME, policyEntry);
}
public void addPolicyToSwitch(QoSPolicy policy) {
/**
* TODO Morph this to use a String[] of switches
**/
if (policy.sw.equals("all")) {
logger.debug("Adding Policy {} to Entire Network",
policy.toString());
addPolicyToNetwork(policy);
}
/**
* [NOTE] Note utilized yet, future revision used to "save" policies to
* the controller, then modified to be added to switched
**/
else if (policy.sw.equals("none")) {
logger.debug("Adding Policy {} to Controller", policy.toString());
}
// add to a specified switch b/c "all" not specified
else if (policy.sw.matches(dpidPattern)) {
logger.debug("Adding policy {} to Switch {}", policy.toString(),
policy.sw);
// add appropriate hex string converted to a long type
addPolicy(policy, policy.sw);
} else {
logger.error("***Policy {} error at switch input {} ***" + "",
policy.toString(), policy.sw);
}
}
/**
* Add a policy-flowMod to all switches in network
* @param policy
*/
@Override
public void addPolicyToNetwork(QoSPolicy policy) {
OFFlowMod flow = policyToFlowMod(policy);
logger.info("adding policy-flow {} to all switches",flow.toString());
//add to all switches
Map<Long, IOFSwitch> switches = floodlightProvider.getSwitches();
//simple check
if(!(switches.isEmpty())){
for(IOFSwitch sw : switches.values()){
if(!(sw.isConnected())){
break;// cannot add
}
logger.info("Add flow Name: {} Flow: {} Switch "+ sw.getStringId(),
policy.name, flow.toString());
//add unique flow names based on dpid hasCode :)
flowPusher.addFlow(policy.name+Integer
.toString(sw.getStringId()
.hashCode()), flow, sw.getStringId());
}
}
}
/**
* Adds a policy to a switch (dpid)
* @param QoSPolicy policy
* @param String sw
*/
//This will change to list sws[]
//of switches, including a single sw
@Override
public void addPolicy(QoSPolicy policy, String swid) {
//get the flowmod
OFFlowMod flow = policyToFlowMod(policy);
logger.info("Adding policy-flow {} to switch {}",flow.toString(),swid);
//add unique flow names based on dpid hasCode :)
flowPusher.addFlow(policy.name+Integer
.toString(swid.hashCode()), flow, swid);
}
/**
* Removes a policy from entire network
* @param policy
*/
@Override
public void deletePolicyFromNetwork(String policyName) {
//all switches
Map<Long, IOFSwitch> switches = floodlightProvider.getSwitches();
//simple check
if(!(switches.isEmpty())){
for(IOFSwitch sw : switches.values()){
if(!(sw.isConnected())){
break;// cannot add
}
logger.debug("{} has {}",sw.getStringId(),flowPusher.getFlows());
flowPusher.deleteFlow(policyName+Integer
.toString(sw.getStringId().hashCode()));
}
}
}
/** Deletes a policy
* Called by @DELETE from REST API
* @author wallnerryan
* @overloaded
* @param by policyid
**/
@Override
public synchronized void deletePolicy(QoSPolicy policy){
logger.info("Deleting policy {} attached to switches: {}", policy.name, policy.sw);
//dont have to catch policy.sw == "non" just delete it
if(policy.sw.equalsIgnoreCase("none")){logger.info("policy match no switches, removeing from storage");}
else if(policy.sw.equalsIgnoreCase("all")){
logger.info("Delete flows from network!");
deletePolicyFromNetwork(policy.name);
}
else if(policy.sw.matches(dpidPattern)){
deletePolicy(policy.sw, policy.name);
}
else{
logger.error("Error!, Unrecognized switches! Switch is : {}",policy.sw);
}
//remove from storage
Iterator<QoSPolicy> sIter = this.policies.iterator();
while(sIter.hasNext()){
QoSPolicy p = sIter.next();
if(p.policyid == policy.policyid){
sIter.remove();
break; //done only one can exist
}
}
}
/**
* Delete policy from a switch (dpid)
* @param policyid
* @param sw
* @throws
*/
//This will change to list sws[]
//of switches, including a single sw
@Override
public void deletePolicy(String switches, String policyName){
//TODO Morph this to use a String[] of switches
IOFSwitch sw = floodlightProvider.getSwitches()
.get(HexString.toLong(switches));
if(sw != null){
assert(sw.isActive());
}
// delete flow based on hasCode
flowPusher.deleteFlow(policyName+sw.getStringId().hashCode());
}
/**
* Returns a flowmod from a policy
* @param policy
* @return
*/
public OFFlowMod policyToFlowMod(QoSPolicy policy){
//initialize a match structure that matches everything
OFMatch match = new OFMatch();
//Based on the policy match appropriately.
//no wildcards
match.setWildcards(0);
if(policy.ethtype != -1){
match.setDataLayerType((policy.ethtype));
//logger.debug("setting match on eth-type");
}
if(policy.protocol != -1){
match.setNetworkProtocol(policy.protocol);
//logger.debug("setting match on protocol ");
}
if(policy.ingressport != -1){
match.setInputPort(policy.ingressport);
//logger.debug("setting match on ingress port ");
}
if(policy.ipdst != -1){
match.setNetworkDestination(policy.ipdst);
//logger.debug("setting match on network destination");
}
if(policy.ipsrc != -1){
match.setNetworkSource(policy.ipsrc);
//logger.debug("setting match on network source");
}
if(policy.vlanid != -1){
match.setDataLayerVirtualLan(policy.vlanid);
//logger.debug("setting match on VLAN");
}
if(policy.tos != -1){
match.setNetworkTypeOfService(policy.tos);
//logger.debug("setting match on ToS");
}
if(policy.ethsrc != null){
match.setDataLayerSource(policy.ethsrc);
//logger.debug("setting match on data layer source");
}
if(policy.ethdst != null){
match.setDataLayerDestination(policy.ethdst);
//logger.debug("setting match on data layer destination");
}
if(policy.tcpudpsrcport != -1){
match.setTransportSource(policy.tcpudpsrcport);
//logger.debug("setting match on transport source port");
}
if(policy.tcpudpdstport != -1){
match.setTransportDestination(policy.tcpudpdstport);
//logger.debug("setting match on transport destination");
}
//Create a flow mod using the previous match structure
OFFlowMod fm = new OFFlowMod();
fm.setType(OFType.FLOW_MOD);
//depending on the policy nw_tos or queue the flow mod
// will change the type of service bits or enqueue the packets
if(policy.queue > -1 && policy.service == null){
logger.info("This policy is a queuing policy");
List<OFAction> actions = new ArrayList<OFAction>();
//add the queuing action
OFActionEnqueue enqueue = new OFActionEnqueue();
enqueue.setLength((short) 0xffff);
enqueue.setType(OFActionType.OPAQUE_ENQUEUE); // I think this happens anyway in the constructor
enqueue.setPort(policy.enqueueport);
enqueue.setQueueId(policy.queue);
actions.add((OFAction) enqueue);
logger.info("Match is : {}", match.toString());
//add the matches and actions and return
fm.setMatch(match)
.setActions(actions)
.setIdleTimeout((short) 0) // infinite
.setHardTimeout((short) 5) // infinite
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setFlags((short) 0)
.setOutPort(OFPort.OFPP_NONE.getValue())
.setPriority(policy.priority)
.setLengthU((short)OFFlowMod.MINIMUM_LENGTH + OFActionEnqueue.MINIMUM_LENGTH);
}
else if(policy.queue == -1 && policy.service != null){
logger.info("This policy is a type of service policy");
List<OFAction> actions = new ArrayList<OFAction>();
//add the queuing action
OFActionNetworkTypeOfService tosAction = new OFActionNetworkTypeOfService();
tosAction.setType(OFActionType.SET_NW_TOS);
tosAction.setLength((short) 0xffff);
//Find the appropriate type of service bits in policy
Byte pTos = null;
List<QoSTypeOfService> serviceList = this.getServices();
for(QoSTypeOfService s : serviceList){
if(s.name.equals(policy.service)){
//policy's service ToS bits
pTos = s.tos;
}
}
tosAction.setNetworkTypeOfService(pTos);
actions.add((OFAction)tosAction);
logger.info("Match is : {}", match.toString());
//add the matches and actions and return.class.ge
fm.setMatch(match)
.setActions(actions)
.setIdleTimeout((short) 3) // infinite
.setHardTimeout((short) 5) // infinite
.setBufferId(OFPacketOut.BUFFER_ID_NONE)
.setFlags((short) 0)
.setOutPort(OFPort.OFPP_NONE.getValue())
.setPriority(Short.MAX_VALUE)
.setLengthU((short)OFFlowMod.MINIMUM_LENGTH + OFActionNetworkTypeOfService.MINIMUM_LENGTH);
}
else{
logger.error("Policy Misconfiguration");
}
return fm;
}
/**
* Logs which, if any QoS is applied to PACKET_IN
* @param tmpMatch
* @throws IOException
*/
private void checkIfQoSApplied(OFMatch tmpMatch){
List<QoSPolicy> pols = this.getPolicies();
//policies dont apply to wildcards, (yet)
if (!pols.isEmpty()){
for(QoSPolicy policy : pols){
OFMatch m = new OFMatch();
m = policyToFlowMod(policy).getMatch();
//check
if (tmpMatch.equals(m)){
logger.info("PACKET_IN matched, Applied QoS Policy {}",policy.toString());
}
//Commented out checks, annoying in console log. For debug I'll leave though.
//else{
// logger.debug("Pass, no match on PACKET_IN");
//}
}
}//else{logger.info("No Policies to Check Against PACKET_IN");}
}
private boolean checkIfPolicyExists(QoSPolicy policy,
List<QoSPolicy> policies) {
Iterator<QoSPolicy> pIter = policies.iterator();
while (pIter.hasNext()) {
QoSPolicy p = pIter.next();
if (policy.isSameAs(p) || policy.name.equals(p.name)) {
return true;
}
}
return false;
}
}
| jimmyoic/floodlight-qosmanager | src/main/java/net/floodlightcontroller/qos/QoS.java | Java | apache-2.0 | 32,011 |
#!/usr/bin/env python
#
# Copyright 2014 cloudysunny14.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from ryu.base import app_manager
from ryu.controller import dpset
from ryu.controller.handler import set_ev_cls
from ryu.exception import OFPUnknownVersion
from ryu.lib import ofctl_v1_0
from ryu.lib import ofctl_v1_2
from ryu.lib import ofctl_v1_3
from ryu.lib import hub
from ryu.lib.ovs import bridge
from ryu.ofproto import ofproto_v1_0
from ryu.ofproto import ofproto_v1_2
from ryu.ofproto import ofproto_v1_3
from lib import qoslib
LOG = logging.getLogger(__name__)
LOG_TEST_FINISH = 'TEST_FINISHED: Tests=[%s] (OK=%s NG=%s SKIP=%s)'
OVSDB_ADDR = 'tcp:127.0.0.1:6632'
class OFMangleTester(app_manager.RyuApp):
OFP_VERSIONS = [ofproto_v1_3.OFP_VERSION]
_CONTEXTS = {'dpset': dpset.DPSet,
'qoslib': qoslib.QoSLib}
_OFCTL = {ofproto_v1_0.OFP_VERSION: ofctl_v1_0,
ofproto_v1_2.OFP_VERSION: ofctl_v1_2,
ofproto_v1_3.OFP_VERSION: ofctl_v1_3}
def __init__(self, *args, **kwargs):
super(OFMangleTester, self).__init__(*args, **kwargs)
self.dpset = kwargs['dpset']
self.qoslib = kwargs['qoslib']
self.qoslib.use_switch_flow = False
self.waiters = {}
self.pending = []
self.results = {}
for t in dir(self):
if t.startswith("test_"):
self.pending.append(t)
self.pending.sort(reverse=True)
@set_ev_cls(dpset.EventDP, dpset.DPSET_EV_DISPATCHER)
def datapath_handler(self, ev):
# Target switch datapath
self.dp = ev.dp
version = self.dp.ofproto.OFP_VERSION
if version not in self._OFCTL:
raise OFPUnknownVersion(version=version)
self.ofctl = self._OFCTL[version]
hub.spawn(self._do_test)
def test_queue_setup(self):
self.ovsctl = bridge.OVSBridge(self.dp.id, OVSDB_ADDR)
queue = qoslib.QoSLib.queue_tree(self.ovsctl, self.dp)
queue.queue('high-priority', '500', '500')
self.qoslib.register_queue(queue)
queue = qoslib.QoSLib.queue_tree(self.ovsctl, self.dp)
queue.queue('high-priority', '700', '700')
self.qoslib.register_queue(queue)
queue = qoslib.QoSLib.queue_tree(self.ovsctl, self.dp)
queue.queue('best-effort', '10000', '10000')
def _print_results(self):
LOG.info("TEST_RESULTS:")
ok = 0
ng = 0
skip = 0
for t in sorted(self.results.keys()):
if self.results[t] is True:
ok += 1
else:
ng += 1
LOG.info(" %s: %s", t, self.results[t])
LOG.info(LOG_TEST_FINISH, len(self.pending), ok, ng, skip)
def _do_test(self):
""""""
for test in self.pending:
self.results[test] = getattr(self, test)()
self._print_results()
| cloudysunny14/faucet | tests/test_ovs_mangle.py | Python | apache-2.0 | 3,411 |
#!/usr/bin/env python
# Copyright 2012 Google Inc. All Rights Reserved.
"""Utils exporting data from AFF4 to the rest of the world."""
import os
import Queue
import stat
import time
import logging
from grr.lib import aff4
from grr.lib import rdfvalue
from grr.lib import serialize
from grr.lib import threadpool
from grr.lib import type_info
from grr.lib import utils
from grr.lib.aff4_objects import aff4_grr
BUFFER_SIZE = 16 * 1024 * 1024
def GetAllClients(token=None):
"""Return a list of all client urns."""
results = []
for urn in aff4.FACTORY.Open(aff4.ROOT_URN, token=token).ListChildren():
try:
results.append(rdfvalue.ClientURN(urn))
except type_info.TypeValueError:
pass
return results
class IterateAllClientUrns(object):
"""Class to iterate over all URNs."""
THREAD_POOL_NAME = "ClientUrnIter"
QUEUE_TIMEOUT = 30
def __init__(self, func=None, max_threads=10, token=None):
"""Iterate over all clients in a threadpool.
Args:
func: A function to call with each client urn.
max_threads: Number of threads to use.
token: Auth token.
Raises:
RuntimeError: If function not specified.
"""
self.thread_pool = threadpool.ThreadPool.Factory(self.THREAD_POOL_NAME,
max_threads)
self.thread_pool.Start()
self.token = token
self.func = func
self.broken_subjects = [] # Entries that are broken or fail to run.
self.out_queue = Queue.Queue()
def GetInput(self):
"""Yield client urns."""
clients = GetAllClients(token=self.token)
logging.debug("Got %d clients", len(clients))
return clients
def Run(self):
"""Run the iteration."""
count = 0
for count, input_data in enumerate(self.GetInput()):
if count % 2000 == 0:
logging.debug("%d processed.", count)
args = (input_data, self.out_queue, self.token)
self.thread_pool.AddTask(target=self.IterFunction, args=args,
name=self.THREAD_POOL_NAME)
while count >= 0:
try:
# We only use the timeout to wait if we got to the end of the Queue but
# didn't process everything yet.
out = self.out_queue.get(timeout=self.QUEUE_TIMEOUT, block=True)
if out:
yield out
count -= 1
except Queue.Empty:
break
# Join and stop to clean up the threadpool.
self.thread_pool.Stop()
def IterFunction(self, *args):
"""Function to run on each input. This can be overridden."""
self.func(*args)
class IterateAllClients(IterateAllClientUrns):
"""Class to iterate over all GRR Client objects."""
def __init__(self, max_age, client_chunksize=25, **kwargs):
"""Iterate over all clients in a threadpool.
Args:
max_age: Maximum age in seconds of clients to check.
client_chunksize: A function to call with each client urn.
**kwargs: Arguments passed to init.
"""
super(IterateAllClients, self).__init__(**kwargs)
self.client_chunksize = client_chunksize
self.max_age = max_age
def GetInput(self):
"""Yield client urns."""
client_list = GetAllClients(token=self.token)
logging.debug("Got %d clients", len(client_list))
for client_group in utils.Grouper(client_list, self.client_chunksize):
for fd in aff4.FACTORY.MultiOpen(client_group, mode="r",
aff4_type="VFSGRRClient",
token=self.token):
if isinstance(fd, aff4_grr.VFSGRRClient):
# Skip if older than max_age
oldest_time = (time.time() - self.max_age) * 1e6
if fd.Get(aff4.VFSGRRClient.SchemaCls.PING) >= oldest_time:
yield fd
def DownloadFile(file_obj, target_path, buffer_size=BUFFER_SIZE):
"""Download an aff4 file to the local filesystem overwriting it if it exists.
Args:
file_obj: An aff4 object that supports the file interface (Read, Seek)
target_path: Full path of file to write to.
buffer_size: Read in chunks this size.
"""
logging.info("Downloading: %s to: %s", file_obj.urn, target_path)
target_file = open(target_path, "w")
file_obj.Seek(0)
count = 0
data_buffer = file_obj.Read(buffer_size)
while data_buffer:
target_file.write(data_buffer)
data_buffer = file_obj.Read(buffer_size)
count += 1
if not count % 3:
logging.debug("Downloading: %s: %s done", file_obj.urn,
utils.FormatNumberAsString(count*buffer_size))
target_file.close()
def RecursiveDownload(dir_obj, target_dir, max_depth=10, depth=1,
overwrite=False, max_threads=10):
"""Recursively downloads a file entry to the target path.
Args:
dir_obj: An aff4 object that contains children.
target_dir: Full path of the directory to write to.
max_depth: Depth to download to. 1 means just the directory itself.
depth: Current depth of recursion.
overwrite: Should we overwrite files that exist.
max_threads: Use this many threads to do the downloads.
"""
if (not isinstance(dir_obj, aff4.AFF4Volume) or
isinstance(dir_obj, aff4.HashImage)):
return
# Reuse the same threadpool as we call recursively.
thread_pool = threadpool.ThreadPool.Factory("Downloader", max_threads)
thread_pool.Start()
for sub_file_entry in dir_obj.OpenChildren():
path_elements = [target_dir]
sub_target_dir = u"/".join(path_elements)
try:
# Any file-like object with data in AFF4 should inherit AFF4Stream.
if isinstance(sub_file_entry, aff4.AFF4Stream):
args = (sub_file_entry.urn, sub_target_dir, sub_file_entry.token,
overwrite)
thread_pool.AddTask(target=CopyAFF4ToLocal, args=args,
name="Downloader")
elif "Container" in sub_file_entry.behaviours:
if depth >= max_depth: # Don't go any deeper.
continue
try:
os.makedirs(sub_target_dir)
except OSError:
pass
RecursiveDownload(sub_file_entry, sub_target_dir, overwrite=overwrite,
depth=depth+1)
except IOError:
logging.exception("Unable to download %s", sub_file_entry.urn)
finally:
sub_file_entry.Close()
# Join and stop the threadpool.
if depth <= 1:
thread_pool.Stop()
def DownloadCollection(coll_path, target_path, token=None, overwrite=False,
dump_client_info=False, max_threads=10):
"""Iterate through a Collection object downloading all files.
Args:
coll_path: Path to an AFF4 collection.
target_path: Base directory to write to.
token: Token for access.
overwrite: If True, overwrite existing files.
dump_client_info: If True, this will detect client paths, and dump a yaml
version of the client object to the root path. This is useful for seeing
the hostname/users of the machine the client id refers to.
max_threads: Use this many threads to do the downloads.
"""
completed_clients = set()
try:
coll = aff4.FACTORY.Open(coll_path, aff4_type="RDFValueCollection",
token=token)
except IOError:
logging.error("%s is not a valid collection. Typo? "
"Are you sure something was written to it?", coll_path)
return
thread_pool = threadpool.ThreadPool.Factory("Downloader", max_threads)
thread_pool.Start()
logging.info("Expecting to download %s files", coll.size)
# Collections can include anything they want, but we only handle RDFURN and
# StatEntry entries in this function.
for grr_message in coll:
source = None
# If a raw message, work out the type.
if isinstance(grr_message, rdfvalue.GrrMessage):
source = grr_message.source
grr_message = grr_message.payload
# Collections can contain AFF4ObjectSummary objects which encapsulate
# RDFURNs and StatEntrys.
if isinstance(grr_message, rdfvalue.AFF4ObjectSummary):
urn = grr_message.urn
elif isinstance(grr_message, rdfvalue.RDFURN):
urn = grr_message
elif isinstance(grr_message, rdfvalue.StatEntry):
urn = rdfvalue.RDFURN(grr_message.aff4path)
elif isinstance(grr_message, rdfvalue.FileFinderResult):
urn = rdfvalue.RDFURN(grr_message.stat_entry.aff4path)
elif isinstance(grr_message, rdfvalue.RDFBytes):
try:
os.makedirs(target_path)
except OSError:
pass
try:
# We just dump out bytes and carry on.
client_id = source.Split()[0]
with open(os.path.join(target_path, client_id), "wb") as fd:
fd.write(str(grr_message))
except AttributeError:
pass
continue
else:
continue
# Handle dumping client info, but only once per client.
client_id = urn.Split()[0]
re_match = aff4.AFF4Object.VFSGRRClient.CLIENT_ID_RE.match(client_id)
if dump_client_info and re_match and client_id not in completed_clients:
args = (rdfvalue.RDFURN(client_id), target_path, token, overwrite)
thread_pool.AddTask(target=DumpClientYaml, args=args,
name="ClientYamlDownloader")
completed_clients.add(client_id)
# Now queue downloading the actual files.
args = (urn, target_path, token, overwrite)
thread_pool.AddTask(target=CopyAFF4ToLocal, args=args,
name="Downloader")
# Join and stop the threadpool.
thread_pool.Stop()
def CopyAFF4ToLocal(aff4_urn, target_dir, token=None, overwrite=False):
"""Copy an AFF4 object that supports a read interface to local filesystem.
Args:
aff4_urn: URN of thing to copy.
target_dir: Directory to copy the file to.
token: Auth token.
overwrite: If True overwrite the file if it exists.
By default file will only be overwritten if file size differs.
"""
try:
fd = aff4.FACTORY.Open(aff4_urn, "AFF4Stream", token=token)
filepath = os.path.join(target_dir, fd.urn.Path()[1:])
if not os.path.isfile(filepath):
try:
# Ensure directory exists.
os.makedirs(os.path.dirname(filepath))
except OSError:
pass
DownloadFile(fd, filepath)
elif (os.stat(filepath)[stat.ST_SIZE] != fd.Get(fd.Schema.SIZE) or
overwrite):
# We should overwrite because user said, or file sizes differ.
DownloadFile(fd, filepath)
else:
logging.info("File %s exists, skipping", filepath)
except IOError as e:
logging.error("Failed to read %s due to %s", aff4_urn, e)
raise
def DumpClientYaml(client_urn, target_dir, token=None, overwrite=False):
"""Dump a yaml file containing client info."""
fd = aff4.FACTORY.Open(client_urn, "VFSGRRClient", token=token)
dirpath = os.path.join(target_dir, fd.urn.Split()[0])
try:
# Due to threading this can actually be created by another thread.
os.makedirs(dirpath)
except OSError:
pass
filepath = os.path.join(dirpath, "client_info.yaml")
if not os.path.isfile(filepath) or overwrite:
with open(filepath, "w") as out_file:
out_file.write(serialize.YamlDumper(fd))
| simsong/grr-insider | lib/export_utils.py | Python | apache-2.0 | 11,103 |
// Copyright 2017 Pilosa Corp.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package server_test
import (
"reflect"
"testing"
"time"
"github.com/pilosa/pilosa/server"
"github.com/pilosa/pilosa/toml"
)
func Test_NewConfig(t *testing.T) {
c := server.NewConfig()
if c.Cluster.Disabled {
t.Fatalf("unexpected Cluster.Disabled: %v", c.Cluster.Disabled)
}
}
func TestDuration(t *testing.T) {
d := toml.Duration(time.Second * 182)
if d.String() != "3m2s" {
t.Fatalf("Unexpected time Duration %s", d)
}
b := []byte{51, 109, 50, 115}
v, _ := d.MarshalText()
if !reflect.DeepEqual(b, v) {
t.Fatalf("Unexpected marshalled value %v", v)
}
v, _ = d.MarshalTOML()
if !reflect.DeepEqual(b, v) {
t.Fatalf("Unexpected marshalled value %v", v)
}
err := d.UnmarshalText([]byte("5"))
if err.Error() != "time: missing unit in duration 5" {
t.Fatalf("expected time: missing unit in duration: %s", err)
}
err = d.UnmarshalText([]byte("3m2s"))
if err != nil {
t.Fatalf("unexpected error: %v", err)
}
v, _ = d.MarshalText()
if !reflect.DeepEqual(b, v) {
t.Fatalf("Unexpected marshalled value %v", v)
}
}
| travisturner/pilosa | server/config_test.go | GO | apache-2.0 | 1,643 |
/*
* Copyright 2015 Fabien Renaud.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.lympid.core.behaviorstatemachines.impl;
import com.lympid.core.behaviorstatemachines.State;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.function.Consumer;
/**
*
* @author Fabien Renaud
*/
public class OrthogonalStateConfiguration implements MutableStateConfiguration<OrthogonalStateConfiguration> {
private OrthogonalStateConfiguration parent;
private State state;
private final List<OrthogonalStateConfiguration> children;
private Iterator<OrthogonalStateConfiguration> iterator;
public OrthogonalStateConfiguration() {
this.children = new LinkedList<>();
}
private OrthogonalStateConfiguration(final OrthogonalStateConfiguration config) {
this.parent = config.parent;
this.state = config.state;
this.children = new ArrayList<>(config.children.size());
for (OrthogonalStateConfiguration stateConfig : config.children) {
this.children.add(new OrthogonalStateConfiguration(stateConfig));
}
}
@Override
public OrthogonalStateConfiguration parent() {
return parent;
}
@Override
public State state() {
return state;
}
@Override
public List<OrthogonalStateConfiguration> children() {
return children;
}
@Override
public void setState(final State state) {
assert this.state == null;
assert children.isEmpty();
this.state = state;
}
@Override
public OrthogonalStateConfiguration addChild(final State state) {
OrthogonalStateConfiguration config = new OrthogonalStateConfiguration();
config.parent = this;
config.state = state;
children.add(config);
return config;
}
@Override
public void removeChild(final OrthogonalStateConfiguration state) {
if (iterator == null) {
children.remove(state);
} else {
iterator.remove();
}
state.parent = null;
}
@Override
public void clear() {
assert children.isEmpty();
this.state = null;
}
@Override
public int size() {
return children.size();
}
@Override
public boolean isEmpty() {
return children.isEmpty();
}
@Override
public void forEach(final Consumer<OrthogonalStateConfiguration> consumer) {
if (!children.isEmpty()) {
iterator = children.iterator();
while (iterator.hasNext()) {
consumer.accept(iterator.next());
}
iterator = null;
}
}
@Override
public MutableStateConfiguration copy() {
return new OrthogonalStateConfiguration(this);
}
}
| lympid/lympid-core | src/main/java/com/lympid/core/behaviorstatemachines/impl/OrthogonalStateConfiguration.java | Java | apache-2.0 | 3,129 |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.opsworks.model.transform;
import java.math.*;
import javax.annotation.Generated;
import com.amazonaws.services.opsworks.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* DescribeElasticIpsResult JSON Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DescribeElasticIpsResultJsonUnmarshaller implements Unmarshaller<DescribeElasticIpsResult, JsonUnmarshallerContext> {
public DescribeElasticIpsResult unmarshall(JsonUnmarshallerContext context) throws Exception {
DescribeElasticIpsResult describeElasticIpsResult = new DescribeElasticIpsResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL) {
return describeElasticIpsResult;
}
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("ElasticIps", targetDepth)) {
context.nextToken();
describeElasticIpsResult.setElasticIps(new ListUnmarshaller<ElasticIp>(ElasticIpJsonUnmarshaller.getInstance()).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return describeElasticIpsResult;
}
private static DescribeElasticIpsResultJsonUnmarshaller instance;
public static DescribeElasticIpsResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new DescribeElasticIpsResultJsonUnmarshaller();
return instance;
}
}
| dagnir/aws-sdk-java | aws-java-sdk-opsworks/src/main/java/com/amazonaws/services/opsworks/model/transform/DescribeElasticIpsResultJsonUnmarshaller.java | Java | apache-2.0 | 2,905 |
// File: backpropagator.cc
// Author: Karl Moritz Hermann (mail@karlmoritz.com)
// Created: 22-04-2013
// Last Update: Thu 03 Oct 2013 11:38:39 AM BST
/*------------------------------------------------------------------------
* Description: <DESC>
*
*------------------------------------------------------------------------
* History:
* TODO:
*========================================================================
*/
#include "backpropagator.h"
namespace ccaeb
{
Backpropagator::Backpropagator (RecursiveAutoencoder* rae, Model &model,int n) : rae_(rae), model(model),
grad_D(0,0,0), grad_Wd(0,0), grad_Wdr(0,0), grad_Bd(0,0), grad_Bdr(0,0),
grad_Wl(0,0), grad_Bl(0,0), weights(0,0), correctly_classified_sent(0),
zero_should_be_one(0), zero_should_be_zero(0), one_should_be_zero(0),
one_should_be_one(0), is_a_zero(0), is_a_one(0), error_(0.0), count_nodes_(0),
count_words_(0)
{
/***************************************************************************
* Define a couple of frequently needed variables *
***************************************************************************/
word_width = rae_->config.word_representation_size;
dict_size = rae_->getDictSize();
/***************************************************************************
* Define access vectors to the various gradient parts *
***************************************************************************/
data = new Real[n];
Real *ptr = data;
new (&grad_D) WeightMatrixType(ptr, dict_size, word_width);
grad_D.setZero();
ptr += rae_->theta_D_size_;
new (&grad_Wd) WeightVectorType(ptr, rae_->theta_Wd_size_);
grad_Wd.setZero();
ptr += rae_->theta_Wd_size_;
new (&grad_Wdr) WeightVectorType(ptr, rae_->theta_Wdr_size_);
grad_Wdr.setZero();
ptr += rae_->theta_Wdr_size_;
new (&grad_Bd) WeightVectorType(ptr, rae_->theta_Bd_size_);
grad_Bd.setZero();
ptr += rae_->theta_Bd_size_;
new (&grad_Bdr) WeightVectorType(ptr, rae_->theta_Bdr_size_);
grad_Bdr.setZero();
ptr += rae_->theta_Bdr_size_;
new (&grad_Wl) WeightVectorType(ptr, rae_->theta_Wl_size_);
grad_Wl.setZero();
ptr += rae_->theta_Wl_size_;
new (&grad_Bl) WeightVectorType(ptr, rae_->theta_Bl_size_);
grad_Bl.setZero();
ptr += rae_->theta_Bl_size_;
assert (data + n == ptr);
new (&weights) WeightVectorType(data,n);
}
Backpropagator::~Backpropagator ()
{
delete [] data;
}
int Backpropagator::backPropagateLbl(int i, VectorReal& x)
{
SingleProp* propagator = nullptr;
if(rae_->config.tree == TREE_CCG or rae_->config.tree == TREE_STANFORD)
propagator = new SingleProp(rae_,model.corpus[i],model.beta,model.bools);
else
assert(false);
propagator->forwardPropagate(true);
propagator->setToD(x,0);
int correct = propagator->backPropagate(true);
#pragma omp critical
{
if(model.corpus[i].value==0)
{
is_a_zero += propagator->getJointNodes();
zero_should_be_zero += propagator->getClassCorrect();
one_should_be_zero += (propagator->getJointNodes() - propagator->getClassCorrect());
}
else
{
is_a_one += propagator->getJointNodes();
one_should_be_one += propagator->getClassCorrect();
zero_should_be_one += (propagator->getJointNodes() - propagator->getClassCorrect());
}
error_ += propagator->getLblError();
correctly_classified_sent += correct;
count_words_ += propagator->getSentLength();
count_nodes_ += propagator->getNodesLength();
if (model.bools.Wd) grad_Wd += propagator->getWdGradient();
if (model.bools.Wdr) grad_Wdr += propagator->getWdrGradient();
if (model.bools.Bd) grad_Bd += propagator->getBdGradient();
if (model.bools.Bdr) grad_Bdr += propagator->getBdrGradient();
if (model.bools.Wl) grad_Wl += propagator->getWlGradient();
if (model.bools.Bl) grad_Bl += propagator->getBlGradient();
if (model.bools.D)
{
auto tmpD = propagator->getDGradients();
for (size_t k = 0; k < model.corpus[i].words.size(); ++k)
grad_D.row(model.corpus[i].words[k]) += tmpD[k];
}
}
delete propagator;
return correct;
}
void Backpropagator::backPropagateRae(int i, VectorReal& x)
{
SingleProp* propagator = nullptr;
if(rae_->config.tree == TREE_CCG or rae_->config.tree == TREE_STANFORD)
propagator = new SingleProp(rae_,model.corpus[i],model.beta,model.bools);
else
assert(false);
propagator->forwardPropagate(false);
propagator->setToD(x,0);
propagator->backPropagate(false);
#pragma omp critical
{
error_ += propagator->getRaeError();
count_words_ += propagator->getSentLength();
count_nodes_ += propagator->getNodesLength();
if (model.bools.Wd) grad_Wd += propagator->getWdGradient();
if (model.bools.Wdr) grad_Wdr += propagator->getWdrGradient();
if (model.bools.Bd) grad_Bd += propagator->getBdGradient();
if (model.bools.Bdr) grad_Bdr += propagator->getBdrGradient();
if (model.bools.D)
{
auto tmpD = propagator->getDGradients();
for (size_t k = 0; k < model.corpus[i].words.size(); ++k)
grad_D.row(model.corpus[i].words[k]) += tmpD[k];
}
}
delete propagator;
}
void Backpropagator::normalize(int type) { // type: 0=rae, 1=lbl
float norm = 1.0;
if (type == 0) norm = count_nodes_ - count_words_;
else if (type == 1) norm = max(1,model.to - model.from);
error_ /= norm;
grad_D /= norm;
grad_Wd /= norm;
grad_Wdr /= norm;
grad_Bd /= norm;
grad_Bdr /= norm;
grad_Wl /= norm;
grad_Bl /= norm;
}
void Backpropagator::printInfo() {
cout << " " << correctly_classified_sent << "/" << model.to - model.from << " ";
cout << "Z: " << is_a_zero << ": (" << zero_should_be_zero << " / " << one_should_be_zero << ")";
cout << "O: " << is_a_one << ": (" << one_should_be_one << " / " << zero_should_be_one << ")";
cout << endl;
}
WeightVectorType Backpropagator::dump() { return weights; }
lbfgsfloatval_t Backpropagator::getError() { return error_; }
}
| karlmoritz/oxcvsm | src/models/ccaeb/backpropagator.cc | C++ | apache-2.0 | 6,141 |
/*
* Copyright 2012 Open Source Robotics Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include <sys/stat.h>
#include "sdf/sdf.hh"
std::vector<std::string> params;
using namespace sdf;
/////////////////////////////////////////////////
void help()
{
std::cout << "This tool provides information about SDF files.\n\n";
std::cout << "gzsdf <command>\n\n";
std::cout << "Commands:\n";
std::cout << " describe [SDF version] Print the SDF format.\n";
std::cout << " convert [file] "
<< "In place conversion to the latest format.\n";
std::cout << " doc [SDF version] Print HTML SDF.\n";
std::cout << " check [file] [SDF version] Check the SDF format for the";
std::cout << " given file.\n";
std::cout << " print [SDF verison] Prints SDF, useful for ";
std::cout << " debugging and as a conversion tool.\n\n";
}
/////////////////////////////////////////////////
bool file_exists(const std::string &_filename)
{
struct stat st;
return stat(_filename.c_str(), &st) == 0;
}
/////////////////////////////////////////////////
int main(int argc, char** argv)
{
bool success = false;
// Get parameters from command line
for (int i = 1; i < argc; i++)
{
std::string p = argv[i];
boost::trim(p);
params.push_back(p);
}
if (params.empty() || params[0] == "help" || params[0] == "h")
{
help();
return 0;
}
if ((params[0] == "check" || params[0] == "print" || params[0] == "convert"))
{
if (params.size() == 3)
SDF::version = params[2];
}
else if (params.size() == 2)
SDF::version = params[1];
boost::shared_ptr<SDF> sdf(new SDF());
if (!init(sdf))
{
std::cerr << "ERROR: SDF parsing the xml failed" << std::endl;
return -1;
}
if (params[0] == "check")
{
if (params.size() < 2)
{
help();
std::cerr << "Error: Expecting an xml file to parse\n\n";
return -1;
}
if (!file_exists(params[1]))
std::cerr << "Error: File doesn't exist[" << params[1] << "]\n";
if (!readFile(params[1], sdf))
{
std::cerr << "Error: SDF parsing the xml failed\n";
return -1;
}
success = true;
std::cout << "Check complete\n";
}
else if (params[0] == "describe")
{
sdf->PrintDescription();
success = true;
}
else if (params[0] == "doc")
{
sdf->PrintDoc();
success = true;
}
else if (params[0] == "convert")
{
if (params.size() < 2)
{
help();
std::cerr << "Error: Missing SDF file to convert\n\n";
return -1;
}
if (!file_exists(params[1]))
std::cerr << "Error: File doesn't exist[" << params[1] << "]\n";
TiXmlDocument xmlDoc;
if (xmlDoc.LoadFile(params[1]))
{
if (sdf::Converter::Convert(&xmlDoc, SDF::version, true))
{
success = true;
xmlDoc.SaveFile(params[1]);
}
}
else
std::cerr << "Unable to load file[" << params[1] << "]\n";
}
else if (params[0] == "print")
{
if (params.size() < 2)
{
help();
std::cerr << "Error: Expecting an xml file to parse\n\n";
return -1;
}
if (!file_exists(params[1]))
std::cerr << "Error: File doesn't exist[" << params[1] << "]\n";
if (!readFile(params[1], sdf))
{
std::cerr << "Error: SDF parsing the xml failed\n";
return -1;
}
success = true;
sdf->PrintValues();
}
else
{
help();
std::cerr << "Error: Unknown option[" << params[0] << "]\n";
}
if (params[0] != "print" && params[0] != "doc" && success)
std::cout << "Success\n";
return 0;
}
| thomas-moulard/gazebo-deb | tools/gzsdf.cc | C++ | apache-2.0 | 4,148 |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.provider.pcep.tunnel.impl;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
import static org.onosproject.incubator.net.tunnel.Tunnel.Type.MPLS;
import static org.onosproject.incubator.net.tunnel.Tunnel.State.INIT;
import static org.onosproject.pcep.controller.PcepAnnotationKeys.BANDWIDTH;
import static org.onosproject.pcep.controller.PcepAnnotationKeys.LOCAL_LSP_ID;
import static org.onosproject.pcep.controller.PcepAnnotationKeys.LSP_SIG_TYPE;
import static org.onosproject.pcep.controller.PcepAnnotationKeys.PCC_TUNNEL_ID;
import static org.onosproject.pcep.controller.PcepAnnotationKeys.PLSP_ID;
import static org.onosproject.pcep.controller.PcepAnnotationKeys.DELEGATE;
import static org.onosproject.pcep.controller.LspType.WITHOUT_SIGNALLING_AND_WITHOUT_SR;
import static org.onosproject.pcep.controller.PcepSyncStatus.SYNCED;
import static org.onosproject.net.Device.Type.ROUTER;
import static org.onosproject.net.MastershipRole.MASTER;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onlab.packet.ChassisId;
import org.onlab.packet.IpAddress;
import org.onosproject.cfg.ComponentConfigAdapter;
import org.onosproject.core.ApplicationId;
import org.onosproject.incubator.net.tunnel.DefaultTunnel;
import org.onosproject.incubator.net.tunnel.IpTunnelEndPoint;
import org.onosproject.incubator.net.tunnel.Tunnel;
import org.onosproject.incubator.net.tunnel.Tunnel.Type;
import org.onosproject.incubator.net.tunnel.TunnelAdminService;
import org.onosproject.incubator.net.tunnel.TunnelDescription;
import org.onosproject.incubator.net.tunnel.TunnelEndPoint;
import org.onosproject.incubator.net.tunnel.TunnelId;
import org.onosproject.incubator.net.tunnel.TunnelName;
import org.onosproject.incubator.net.tunnel.TunnelProvider;
import org.onosproject.incubator.net.tunnel.TunnelProviderService;
import org.onosproject.incubator.net.tunnel.Tunnel.State;
import org.onosproject.mastership.MastershipServiceAdapter;
import org.onosproject.net.AnnotationKeys;
import org.onosproject.net.DefaultAnnotations;
import org.onosproject.net.DefaultDevice;
import org.onosproject.net.Device;
import org.onosproject.net.DeviceId;
import org.onosproject.net.ElementId;
import org.onosproject.net.MastershipRole;
import org.onosproject.net.Path;
import org.onosproject.net.SparseAnnotations;
import org.onosproject.net.device.DeviceServiceAdapter;
import org.onosproject.net.provider.ProviderId;
import org.onosproject.pcepio.exceptions.PcepOutOfBoundMessageException;
import org.onosproject.pcepio.exceptions.PcepParseException;
import org.onosproject.pcepio.protocol.PcepFactories;
import org.onosproject.pcepio.protocol.PcepMessage;
import org.onosproject.pcepio.protocol.PcepMessageReader;
import org.onosproject.pcepio.protocol.PcepVersion;
import org.onosproject.pcep.controller.ClientCapability;
import org.onosproject.pcep.controller.LspKey;
import org.onosproject.pcep.controller.PccId;
import com.google.common.collect.ImmutableSet;
/**
* Tests handling of PCEP report message.
*/
public class PcepTunnelAddedTest {
public static final String PROVIDER_ID = "org.onosproject.provider.tunnel.pcep";
public static final String UNKOWN = "UNKOWN";
PcepTunnelProvider tunnelProvider = new PcepTunnelProvider();
private final MockTunnelProviderRegistryAdapter registry = new MockTunnelProviderRegistryAdapter();
private final PcepClientControllerAdapter controller = new PcepClientControllerAdapter();
private final PcepControllerAdapter ctl = new PcepControllerAdapter();
private final PcepTunnelApiMapper pcepTunnelAPIMapper = new PcepTunnelApiMapper();
private final MockTunnelServiceAdapter tunnelService = new MockTunnelServiceAdapter();
public final MockDeviceService deviceService = new MockDeviceService();
private final MockMasterShipService masterShipService = new MockMasterShipService();
private final MockTunnelAdminService tunnelAdminService = new MockTunnelAdminService();
private class MockTunnelAdminService implements TunnelAdminService {
@Override
public void removeTunnel(TunnelId tunnelId) {
// TODO Auto-generated method stub
}
@Override
public void removeTunnels(TunnelEndPoint src, TunnelEndPoint dst, ProviderId producerName) {
// TODO Auto-generated method stub
}
@Override
public void removeTunnels(TunnelEndPoint src, TunnelEndPoint dst, Type type, ProviderId producerName) {
// TODO Auto-generated method stub
}
@Override
public void updateTunnel(Tunnel tunnel, Path path) {
if (tunnelService.tunnelIdAsKeyStore.containsKey(tunnel.tunnelId())) {
tunnelService.tunnelIdAsKeyStore.replace(tunnel.tunnelId(), tunnel);
}
}
}
private class MockMasterShipService extends MastershipServiceAdapter {
boolean set;
private void setMaster(boolean isMaster) {
this.set = isMaster;
}
@Override
public MastershipRole getLocalRole(DeviceId deviceId) {
return set ? MastershipRole.MASTER : MastershipRole.STANDBY;
}
@Override
public boolean isLocalMaster(DeviceId deviceId) {
return getLocalRole(deviceId) == MASTER;
}
}
private class MockDeviceService extends DeviceServiceAdapter {
List<Device> devices = new LinkedList<>();
private void addDevice(Device dev) {
devices.add(dev);
}
@Override
public Iterable<Device> getAvailableDevices() {
return devices;
}
}
private class MockTunnelProviderRegistryAdapter extends TunnelProviderRegistryAdapter {
public long tunnelIdCounter;
@Override
public TunnelProviderService register(TunnelProvider provider) {
this.provider = provider;
return new TestProviderService();
}
private class TestProviderService implements TunnelProviderService {
@Override
public TunnelProvider provider() {
return null;
}
@Override
public TunnelId tunnelAdded(TunnelDescription tunnel) {
TunnelId id = TunnelId.valueOf(String.valueOf(++tunnelIdCounter));
Tunnel storedTunnel = new DefaultTunnel(ProviderId.NONE,
tunnel.src(), tunnel.dst(),
tunnel.type(),
tunnel.groupId(),
id,
tunnel.tunnelName(),
tunnel.path(),
tunnel.resource(),
tunnel.annotations());
tunnelService.tunnelIdAsKeyStore.put(id, storedTunnel);
return id;
}
@Override
public TunnelId tunnelAdded(TunnelDescription tunnel, State state) {
TunnelId id = TunnelId.valueOf(String.valueOf(++tunnelIdCounter));
Tunnel storedTunnel = new DefaultTunnel(ProviderId.NONE,
tunnel.src(), tunnel.dst(),
tunnel.type(),
tunnel.groupId(),
id,
tunnel.tunnelName(),
tunnel.path(),
tunnel.resource(),
tunnel.annotations());
tunnelService.tunnelIdAsKeyStore.put(id, storedTunnel);
return id;
}
@Override
public void tunnelRemoved(TunnelDescription tunnel) {
}
@Override
public void tunnelUpdated(TunnelDescription tunnel) {
}
@Override
public void tunnelUpdated(TunnelDescription tunnel, State state) {
}
@Override
public Tunnel tunnelQueryById(TunnelId tunnelId) {
return null;
}
}
}
private class MockTunnelServiceAdapter extends TunnelServiceAdapter {
private HashMap<TunnelId, Tunnel> tunnelIdAsKeyStore = new HashMap<>();
private int tunnelIdCounter = 0;
@Override
public TunnelId setupTunnel(ApplicationId producerId, ElementId srcElementId, Tunnel tunnel, Path path) {
TunnelId tunnelId = TunnelId.valueOf(String.valueOf(++tunnelIdCounter));
tunnelIdAsKeyStore.put(tunnelId, tunnel);
return tunnelId;
}
@Override
public Collection<Tunnel> queryTunnel(TunnelEndPoint src, TunnelEndPoint dst) {
Collection<Tunnel> result = new HashSet<>();
Tunnel tunnel = null;
for (TunnelId tunnelId : tunnelIdAsKeyStore.keySet()) {
tunnel = tunnelIdAsKeyStore.get(tunnelId);
if ((null != tunnel) && (src.equals(tunnel.src())) && (dst.equals(tunnel.dst()))) {
result.add(tunnel);
}
}
return result.isEmpty() ? Collections.emptySet() : ImmutableSet.copyOf(result);
}
@Override
public Collection<Tunnel> queryAllTunnels() {
Collection<Tunnel> result = new HashSet<>();
for (TunnelId tunnelId : tunnelIdAsKeyStore.keySet()) {
result.add(tunnelIdAsKeyStore.get(tunnelId));
}
return result.isEmpty() ? Collections.emptySet() : ImmutableSet.copyOf(result);
}
}
@Before
public void preSetup() {
tunnelProvider.tunnelProviderRegistry = registry;
tunnelProvider.pcepClientController = controller;
tunnelProvider.controller = ctl;
tunnelProvider.deviceService = deviceService;
tunnelProvider.mastershipService = masterShipService;
tunnelProvider.pcepTunnelApiMapper = pcepTunnelAPIMapper;
tunnelProvider.cfgService = new ComponentConfigAdapter();
tunnelProvider.tunnelService = tunnelService;
tunnelProvider.tunnelAdminService = tunnelAdminService;
tunnelProvider.service = registry.register(tunnelProvider);
tunnelProvider.activate();
}
/**
* Tests PCRpt msg with sync flag set.
*/
@Test
public void tunnelProviderAddedTest1() throws PcepParseException, PcepOutOfBoundMessageException {
byte[] reportMsg = new byte[] {0x20, 0x0a, 0x00, (byte) 0x84,
0x21, 0x10, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, //SRP object
0x00, 0x1c, 0x00, 0x04, // PATH-SETUP-TYPE TLV
0x00, 0x00, 0x00, 0x02,
0x20, 0x10, 0x00, 0x24, 0x00, 0x00, 0x10, 0x03, //LSP object
0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, //symbolic path tlv
0x00, 0x12, 0x00, 0x10, // IPv4-LSP-IDENTIFIER-TLV
0x01, 0x01, 0x01, 0x01,
0x00, 0x01, 0x00, 0x01,
0x01, 0x01, 0x01, 0x01,
0x05, 0x05, 0x05, 0x05,
0x07, 0x10, 0x00, 0x14, //ERO object
0x01, 0x08, (byte) 0x01, 0x01, 0x01, 0x01, 0x04, 0x00, // ERO IPv4 sub objects
0x01, 0x08, (byte) 0x05, 0x05, 0x05, 0x05, 0x04, 0x00,
0x08, 0x10, 0x00, 0x34, //RRO object
0x01, 0x08, 0x11, 0x01, 0x01, 0x01, 0x04, 0x00, // RRO IPv4 sub objects
0x01, 0x08, 0x11, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x06, 0x06, 0x06, 0x06, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x01, 0x04, 0x00,
0x01, 0x08, 0x05, 0x05, 0x05, 0x05, 0x04, 0x00
};
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(reportMsg);
PcepMessageReader<PcepMessage> reader = PcepFactories.getGenericReader();
PcepMessage message = reader.readFrom(buffer);
DefaultAnnotations.Builder newBuilder = DefaultAnnotations.builder();
newBuilder.set(PcepTunnelProvider.LSRID, "1.1.1.1");
newBuilder.set(AnnotationKeys.TYPE, "L3");
Device device = new DefaultDevice(ProviderId.NONE, DeviceId.deviceId("1.1.1.1"), ROUTER,
UNKOWN, UNKOWN, UNKOWN,
UNKOWN, new ChassisId(),
newBuilder.build());
deviceService.addDevice(device);
controller.getClient(PccId.pccId(IpAddress.valueOf("1.1.1.1"))).setCapability(
new ClientCapability(true, true, true, true, true));
masterShipService.setMaster(true);
controller.processClientMessage(PccId.pccId(IpAddress.valueOf("1.1.1.1")), message);
assertThat(registry.tunnelIdCounter, is((long) 1));
}
/**
* Tests updating an existing tunnel on receiving asynchronous PCRpt msg,
* i.e. without any SRP id.
*/
@Test
public void tunnelProviderAddedTest2() throws PcepParseException, PcepOutOfBoundMessageException {
byte[] reportMsg = new byte[] {0x20, 0x0a, 0x00, (byte) 0x50,
0x21, 0x10, 0x00, 0x14, //SRP object
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x1c, 0x00, 0x04, // PATH-SETUP-TYPE TLV
0x00, 0x00, 0x00, 0x02,
0x20, 0x10, 0x00, 0x24, 0x00, 0x00, 0x10, 0x19, //LSP object
0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, //symbolic path TLV
0x00, 0x12, 0x00, 0x10, // IPv4-LSP-IDENTIFIER-TLV
0x4e, 0x1f, 0x04, 0x00,
0x00, 0x01, 0x00, 0x01,
0x4e, 0x1f, 0x04, 0x00,
0x4e, 0x20, 0x04, 0x00,
0x07, 0x10, 0x00, 0x14, //ERO object
0x01, 0x08, (byte) 0xb6, 0x02, 0x4e, 0x1f, 0x04, 0x00, // ERO IPv4 sub objects
0x01, 0x08, (byte) 0xb6, 0x02, 0x4e, 0x20, 0x04, 0x00,
};
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(reportMsg);
PcepMessageReader<PcepMessage> reader = PcepFactories.getGenericReader();
PcepMessage message = reader.readFrom(buffer);
// create an existing tunnel.
IpTunnelEndPoint tunnelEndPointSrc = IpTunnelEndPoint.ipTunnelPoint(IpAddress.valueOf(0x4e1f0400));
IpTunnelEndPoint tunnelEndPointDst = IpTunnelEndPoint.ipTunnelPoint(IpAddress.valueOf(0x4e200400));
SparseAnnotations annotations = DefaultAnnotations.builder()
.set(BANDWIDTH, (new Integer(1)).toString())
.set(LSP_SIG_TYPE, WITHOUT_SIGNALLING_AND_WITHOUT_SR.name())
.set(PCC_TUNNEL_ID, String.valueOf(1))
.set(PLSP_ID, String.valueOf(1))
.set(LOCAL_LSP_ID, String.valueOf(1))
.set(DELEGATE, String.valueOf("true"))
.build();
Tunnel tunnel = new DefaultTunnel(null, tunnelEndPointSrc, tunnelEndPointDst, MPLS, INIT, null, null,
TunnelName.tunnelName("T123"), null, annotations);
tunnelService.setupTunnel(null, null, tunnel, null);
PccId pccId = PccId.pccId(IpAddress.valueOf(0x4e1f0400));
PcepClientAdapter pc = new PcepClientAdapter();
pc.init(pccId, PcepVersion.PCEP_1);
masterShipService.setMaster(true);
controller.getClient(pccId).setLspAndDelegationInfo(new LspKey(1, (short) 1), true);
controller.getClient(pccId).setCapability(new ClientCapability(true, true, true, true, true));
controller.getClient(pccId).setLspDbSyncStatus(SYNCED);
// Process update message.
controller.processClientMessage(pccId, message);
assertThat(tunnelService.queryAllTunnels().size(), is(1));
}
/**
* Tests adding a new tunnel on receiving asynchronous PCRpt msg,
* i.e. without any SRP id.
*/
@Test
public void tunnelProviderAddedTest3() throws PcepParseException, PcepOutOfBoundMessageException {
byte[] reportMsg = new byte[] {0x20, 0x0a, 0x00, (byte) 0x84,
0x21, 0x10, 0x00, 0x14, //SRP object
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x1c, 0x00, 0x04, // PATH-SETUP-TYPE TLV
0x00, 0x00, 0x00, 0x02,
0x20, 0x10, 0x00, 0x24, 0x00, 0x00, 0x10, 0x1B, // LSP object
0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, // symbolic path TLV
0x00, 0x12, 0x00, 0x10, // IPv4-LSP-IDENTIFIER-TLV
0x01, 0x01, 0x01, 0x01,
0x00, 0x01, 0x00, 0x01,
0x01, 0x01, 0x01, 0x01,
0x05, 0x05, 0x05, 0x05,
0x07, 0x10, 0x00, 0x14, //ERO object
0x01, 0x08, (byte) 0x01, 0x01, 0x01, 0x01, 0x04, 0x00, // ERO IPv4 sub objects
0x01, 0x08, (byte) 0x05, 0x05, 0x05, 0x05, 0x04, 0x00,
0x08, 0x10, 0x00, 0x34, //RRO object
0x01, 0x08, 0x11, 0x01, 0x01, 0x01, 0x04, 0x00, // RRO IPv4 sub objects
0x01, 0x08, 0x11, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x06, 0x06, 0x06, 0x06, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x01, 0x04, 0x00,
0x01, 0x08, 0x05, 0x05, 0x05, 0x05, 0x04, 0x00
};
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(reportMsg);
PcepMessageReader<PcepMessage> reader = PcepFactories.getGenericReader();
PcepMessage message = reader.readFrom(buffer);
DefaultAnnotations.Builder newBuilder = DefaultAnnotations.builder();
newBuilder.set(PcepTunnelProvider.LSRID, "1.1.1.1");
newBuilder.set(AnnotationKeys.TYPE, "L3");
Device device = new DefaultDevice(ProviderId.NONE, DeviceId.deviceId("1.1.1.1"), ROUTER,
UNKOWN, UNKOWN, UNKOWN,
UNKOWN, new ChassisId(),
newBuilder.build());
deviceService.addDevice(device);
PccId pccId = PccId.pccId(IpAddress.valueOf("1.1.1.1"));
controller.getClient(pccId).setLspDbSyncStatus(SYNCED);
controller.getClient(pccId).setCapability(new ClientCapability(true, true, true, true, true));
PcepClientAdapter pc = new PcepClientAdapter();
pc.init(pccId, PcepVersion.PCEP_1);
controller.getClient(pccId).setLspAndDelegationInfo(new LspKey(1, (short) 1), true);
masterShipService.setMaster(true);
controller.processClientMessage(pccId, message);
assertThat(registry.tunnelIdCounter, is((long) 1));
}
/**
* Tests PCRpt msg with D flag set and delegated to non-master.
*
* @throws InterruptedException while waiting for delay
*/
@Test
public void tunnelProviderAddedTest4() throws PcepParseException, PcepOutOfBoundMessageException,
InterruptedException {
byte[] reportMsg = new byte[] {0x20, 0x0a, 0x00, (byte) 0x84,
0x21, 0x10, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, //SRP object
0x00, 0x1c, 0x00, 0x04, // PATH-SETUP-TYPE TLV
0x00, 0x00, 0x00, 0x02,
0x20, 0x10, 0x00, 0x24, 0x00, 0x00, 0x10, 0x02, //LSP object
0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, //symbolic path tlv
0x00, 0x12, 0x00, 0x10, // IPv4-LSP-IDENTIFIER-TLV
0x01, 0x01, 0x01, 0x01,
0x00, 0x01, 0x00, 0x01,
0x01, 0x01, 0x01, 0x01,
0x05, 0x05, 0x05, 0x05,
0x07, 0x10, 0x00, 0x14, //ERO object
0x01, 0x08, (byte) 0x01, 0x01, 0x01, 0x01, 0x04, 0x00, // ERO IPv4 sub objects
0x01, 0x08, (byte) 0x05, 0x05, 0x05, 0x05, 0x04, 0x00,
0x08, 0x10, 0x00, 0x34, //RRO object
0x01, 0x08, 0x11, 0x01, 0x01, 0x01, 0x04, 0x00, // RRO IPv4 sub objects
0x01, 0x08, 0x11, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x06, 0x06, 0x06, 0x06, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x01, 0x04, 0x00,
0x01, 0x08, 0x05, 0x05, 0x05, 0x05, 0x04, 0x00
};
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(reportMsg);
PcepMessageReader<PcepMessage> reader = PcepFactories.getGenericReader();
PcepMessage message = reader.readFrom(buffer);
//PCC 1.1.1.1, D=0, ONOS as master
masterShipService.setMaster(true);
DefaultAnnotations.Builder newBuilder = DefaultAnnotations.builder();
newBuilder.set(PcepTunnelProvider.LSRID, "1.1.1.1");
newBuilder.set(AnnotationKeys.TYPE, "L3");
Device device = new DefaultDevice(ProviderId.NONE, DeviceId.deviceId("1.1.1.1"), ROUTER,
UNKOWN, UNKOWN, UNKOWN,
UNKOWN, new ChassisId(),
newBuilder.build());
deviceService.addDevice(device);
controller.getClient(PccId.pccId(IpAddress.valueOf("1.1.1.1"))).setCapability(
new ClientCapability(true, true, true, true, true));
controller.processClientMessage(PccId.pccId(IpAddress.valueOf("1.1.1.1")), message);
assertThat(tunnelService.tunnelIdAsKeyStore.values().iterator().next().annotations().value(DELEGATE),
is("false"));
//PCC 1.1.1.1, D=1, non-master
masterShipService.setMaster(false);
reportMsg = new byte[] {0x20, 0x0a, 0x00, (byte) 0x84,
0x21, 0x10, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, //SRP object
0x00, 0x1c, 0x00, 0x04, // PATH-SETUP-TYPE TLV
0x00, 0x00, 0x00, 0x02,
0x20, 0x10, 0x00, 0x24, 0x00, 0x00, 0x10, 0x03, //LSP object
0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, //symbolic path tlv
0x00, 0x12, 0x00, 0x10, // IPv4-LSP-IDENTIFIER-TLV
0x01, 0x01, 0x01, 0x01,
0x00, 0x01, 0x00, 0x01,
0x01, 0x01, 0x01, 0x01,
0x05, 0x05, 0x05, 0x05,
0x07, 0x10, 0x00, 0x14, //ERO object
0x01, 0x08, (byte) 0x01, 0x01, 0x01, 0x01, 0x04, 0x00, // ERO IPv4 sub objects
0x01, 0x08, (byte) 0x05, 0x05, 0x05, 0x05, 0x04, 0x00,
0x08, 0x10, 0x00, 0x34, //RRO object
0x01, 0x08, 0x11, 0x01, 0x01, 0x01, 0x04, 0x00, // RRO IPv4 sub objects
0x01, 0x08, 0x11, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x06, 0x06, 0x06, 0x06, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x01, 0x04, 0x00,
0x01, 0x08, 0x05, 0x05, 0x05, 0x05, 0x04, 0x00
};
buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(reportMsg);
reader = PcepFactories.getGenericReader();
message = reader.readFrom(buffer);
controller.processClientMessage(PccId.pccId(IpAddress.valueOf("1.1.1.1")), message);
TimeUnit.MILLISECONDS.sleep(4000);
assertThat(registry.tunnelIdCounter, is((long) 1));
assertThat(tunnelService.tunnelIdAsKeyStore.values().iterator().next().annotations().value(DELEGATE),
is("true"));
}
/**
* Tests adding PCC Init LSP after LSP DB sync is over.
*/
@Test
public void tunnelProviderAddedTest5() throws PcepParseException, PcepOutOfBoundMessageException {
byte[] reportMsg = new byte[] {0x20, 0x0a, 0x00, (byte) 0x84,
0x21, 0x10, 0x00, 0x14, //SRP object
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x1c, 0x00, 0x04, // PATH-SETUP-TYPE TLV
0x00, 0x00, 0x00, 0x02,
0x20, 0x10, 0x00, 0x24, 0x00, 0x00, 0x10, 0x19, // LSP object
0x00, 0x11, 0x00, 0x02, 0x54, 0x31, 0x00, 0x00, // symbolic path TLV
0x00, 0x12, 0x00, 0x10, // IPv4-LSP-IDENTIFIER-TLV
0x01, 0x01, 0x01, 0x01,
0x00, 0x01, 0x00, 0x01,
0x01, 0x01, 0x01, 0x01,
0x05, 0x05, 0x05, 0x05,
0x07, 0x10, 0x00, 0x14, //ERO object
0x01, 0x08, (byte) 0x01, 0x01, 0x01, 0x01, 0x04, 0x00, // ERO IPv4 sub objects
0x01, 0x08, (byte) 0x05, 0x05, 0x05, 0x05, 0x04, 0x00,
0x08, 0x10, 0x00, 0x34, //RRO object
0x01, 0x08, 0x11, 0x01, 0x01, 0x01, 0x04, 0x00, // RRO IPv4 sub objects
0x01, 0x08, 0x11, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x06, 0x06, 0x06, 0x06, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x02, 0x04, 0x00,
0x01, 0x08, 0x12, 0x01, 0x01, 0x01, 0x04, 0x00,
0x01, 0x08, 0x05, 0x05, 0x05, 0x05, 0x04, 0x00
};
ChannelBuffer buffer = ChannelBuffers.dynamicBuffer();
buffer.writeBytes(reportMsg);
PcepMessageReader<PcepMessage> reader = PcepFactories.getGenericReader();
PcepMessage message = reader.readFrom(buffer);
DefaultAnnotations.Builder newBuilder = DefaultAnnotations.builder();
newBuilder.set(PcepTunnelProvider.LSRID, "1.1.1.1");
newBuilder.set(AnnotationKeys.TYPE, "L3");
Device device = new DefaultDevice(ProviderId.NONE, DeviceId.deviceId("1.1.1.1"), ROUTER,
UNKOWN, UNKOWN, UNKOWN,
UNKOWN, new ChassisId(),
newBuilder.build());
deviceService.addDevice(device);
PccId pccId = PccId.pccId(IpAddress.valueOf("1.1.1.1"));
controller.getClient(pccId).setLspDbSyncStatus(SYNCED);
controller.getClient(pccId).setCapability(new ClientCapability(true, true, true, true, true));
PcepClientAdapter pc = new PcepClientAdapter();
pc.init(pccId, PcepVersion.PCEP_1);
controller.getClient(pccId).setLspAndDelegationInfo(new LspKey(1, (short) 1), true);
masterShipService.setMaster(true);
controller.processClientMessage(pccId, message);
assertThat(registry.tunnelIdCounter, is((long) 0));
}
@After
public void tearDown() throws IOException {
tunnelProvider.deactivate();
tunnelProvider.controller = null;
tunnelProvider.pcepClientController = null;
tunnelProvider.tunnelProviderRegistry = null;
tunnelProvider.pcepTunnelApiMapper = null;
tunnelProvider.cfgService = null;
tunnelProvider.tunnelService = null;
tunnelProvider.tunnelAdminService = null;
tunnelProvider.deviceService = null;
tunnelProvider.mastershipService = null;
tunnelProvider.service = null;
}
}
| mengmoya/onos | providers/pcep/tunnel/src/test/java/org/onosproject/provider/pcep/tunnel/impl/PcepTunnelAddedTest.java | Java | apache-2.0 | 29,365 |
GBG.EquipmentArcs = {
shields:{buclet:{
orientation:-45,
deepLevel:1,
type:['block'],
widthLevels:[{from:40,to:60, color: '#3a3', cost:0},
{from:35,to:40, color: '#8a3', cost:1},
{from:30,to:35, color: '#777', cost:2},
{from:60,to:70, color: '#8a3', cost:1},
{from:70,to:75, color: '#777', cost:3}]
}
},
weapons:{
shortSword:{
orientation:45,
deepLevel:2,
type:['slash','parry'],
widthLevels:[{from:30,to:60, color: '#a33', cost:0},
{from:25,to:30, color: '#a83', cost:1},
{from:20,to:25, color: '#777', cost:2},
{from:60,to:77, color: '#a83', cost:1},
{from:77,to:85, color: '#777', cost:3}]
}
},
};
GBG.Movements = {
front:{
body05: { coordinates : {y:-50,x:0},rotation:0},
body1: { coordinates : {y:-100,x:0},rotation:0},
body15: { coordinates : {y:-150,x:0},rotation:0},
body2: { coordinates : {y:-200,x:0},rotation:0},
run3: { coordinates : {y:-300,x:0},rotation:0},
run4: { coordinates : {y:-400,x:0},rotation:0},
run5: { coordinates : {y:-500,x:0},rotation:0},
run6: { coordinates : {y:-600,x:0},rotation:0}
},
left:{},
right:{},
back:{},
turn:{}
};
/*
XWing template distances:
straight :
width 200mm
length => 1: 40mm, 2: 80mm, 3: 120mm, 4: 160mm, 5: 200mm
90 Turn , defined by arc radius. 90 degrees
Inner => 1: 25mm, 1: 53mm, 1: 80mm
outer => 1: 45mm, 1: 73mm, 1: 100mm
banc Turn , defined by arc radius. 45 degrees
Inner => 1: 70mm, 1: 120mm, 1: 170mm
outer => 1: 90mm, 1: 140mm, 1: 190mm
*/
GBG.XwingMovements = {
front:{
green1: { coordinates : {y:-200,x:0},rotation:0},
green2: { coordinates : {y:-300,x:0},rotation:0},
green3: { coordinates : {y:-400,x:0},rotation:0},
green4: { coordinates : {y:-500,x:0},rotation:0},
green5: { coordinates : {y:-600,x:0},rotation:0},
},
left:{
green1: { coordinates : {y:-137.5,x:-137.5},rotation:-90},
green2: { coordinates : {y:-207.5,x:-207.5},rotation:-90},
green3: { coordinates : {y:-275,x:-275},rotation:-90},
},
right:{
green1: { coordinates : {y:-137.5,x:137.5},rotation:90},
green2: { coordinates : {y:-207.5,x:207.5},rotation:90},
green3: { coordinates : {y:-275,x:275},rotation:90},
},
bancoLeft:{
green1: { coordinates : {y:-225.5 ,x:-45 },rotation:-45},
green2: { coordinates : {y:-313 ,x:-82.5 },rotation:-45},
green3: { coordinates : {y:-400.5 ,x:-120 },rotation:-45},
},
bancoRight:{
green1: { coordinates : {y:-225.5 ,x:45 },rotation:45},
green2: { coordinates : {y:-313 ,x:82.5 },rotation:45},
green3: { coordinates : {y:-400.5 ,x:120 },rotation:45},
},
back:{},
turn:{}
}; | Raising/GladiatorBoardGame | js/configurable.js | JavaScript | apache-2.0 | 3,052 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.time.format;
public enum ResolverStyle {
STRICT,
SMART,
LENIENT;
}
| jtransc/jtransc | jtransc-rt/src/java/time/format/ResolverStyle.java | Java | apache-2.0 | 897 |
$_L(["java.util.Vector"],"java.util.Stack",["java.util.EmptyStackException"],function(){
c$=$_T(java.util,"Stack",java.util.Vector);
$_M(c$,"empty",
function(){
return this.elementCount==0;
});
$_M(c$,"peek",
function(){
try{
return this.elementData[this.elementCount-1];
}catch(e){
if($_e(e,IndexOutOfBoundsException)){
throw new java.util.EmptyStackException();
}else{
throw e;
}
}
});
$_M(c$,"pop",
function(){
try{
var index=this.elementCount-1;
var obj=this.elementData[index];
this.removeElementAt(index);
return obj;
}catch(e){
if($_e(e,IndexOutOfBoundsException)){
throw new java.util.EmptyStackException();
}else{
throw e;
}
}
});
$_M(c$,"push",
function(object){
this.addElement(object);
return object;
},"~O");
$_M(c$,"search",
function(o){
var index=this.lastIndexOf(o);
if(index>=0)return(this.elementCount-index);
return-1;
},"~O");
});
| abego/j2slib | src/main/j2slib/java/util/Stack.js | JavaScript | apache-2.0 | 896 |
// Copyright Dirk Lemstra https://github.com/dlemstra/Magick.NET.
// Licensed under the Apache License, Version 2.0.
using ImageMagick;
using ImageMagick.Formats;
using Xunit;
namespace Magick.NET.Tests
{
public partial class HeicReadDefinesTests
{
public class TheConstructor
{
[Fact]
public void ShouldNotSetAnyDefines()
{
using (var image = new MagickImage())
{
image.Settings.SetDefines(new HeicReadDefines());
Assert.Null(image.Settings.GetDefine(MagickFormat.Heic, "depth-image"));
Assert.Null(image.Settings.GetDefine(MagickFormat.Heic, "preserve-orientation"));
}
}
}
}
}
| dlemstra/Magick.NET | tests/Magick.NET.Tests/Formats/Heic/HeicReadDefinesTests/TheConstructor.cs | C# | apache-2.0 | 778 |
/**
* Copyright 2015 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.zuul.filters.http;
import com.netflix.zuul.filters.BaseSyncFilter;
import com.netflix.zuul.filters.FilterType;
import com.netflix.zuul.message.http.HttpRequestMessage;
/**
* User: michaels@netflix.com
* Date: 5/29/15
* Time: 3:22 PM
*/
public abstract class HttpInboundSyncFilter extends BaseSyncFilter<HttpRequestMessage, HttpRequestMessage>
{
@Override
public FilterType filterType() {
return FilterType.INBOUND;
}
}
| NiteshKant/zuul | zuul-core/src/main/java/com/netflix/zuul/filters/http/HttpInboundSyncFilter.java | Java | apache-2.0 | 1,063 |
<?php
/* Copyright (C) 2014-2015 apoyl.com. All Rights Reserved.
/* Author:凹凸曼
/* Email: jar-c@163.com
/* 试图类 Lyc\View\View.class.php
*/
namespace Lyc\View;
class View {
protected $tpldir='';
protected $suffix='.phtml';
protected $vars=array();
public function __construct($tpldir){
if(empty($tpldir)){
echo 'VIEW ERROR: Template path can not be empty ';
exit;
}
$this->tpldir=$tpldir;
}
public function tpl($file){
$fullpath=$this->tpldir.$file.$this->suffix;
if(!file_exists($fullpath)){
echo 'VIEW ERROR: Template ['.$file.'] cannot exist';
exit;
}
if($this->vars){
foreach ($this->vars as $k=>$v){
$$k=$v;
}
}
include $fullpath;
}
public function setVar($k,$v){
if(!$k) throw new ViewException('VIEW ERROR: Template variable name can not be empty');
$this->vars[$k]=$v;
}
}
?>
| apoyl/LycPHP | Lyc/View/View.class.php | PHP | apache-2.0 | 1,138 |
// Copyright 2017-present Kirill Danshin and Gramework contributors
// Copyright 2019-present Highload LTD (UK CN: 11893420)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
package gramework
import (
"bytes"
"context"
"encoding/xml"
"fmt"
"github.com/microcosm-cc/bluemonday"
"github.com/pquerna/ffjson/ffjson"
"github.com/gocarina/gocsv"
"github.com/gramework/runtimer"
)
// @TODO: add more
var ctypes = []string{
jsonCT,
xmlCT,
csvCT,
}
// ContextFromValue returns gramework.Context from context.Context value from gramework.ContextKey
// in a more effective way, than standard eface.(*SomeType).
// WARNING: this function may return nil, if ctx has no gramework.Context stored or ctx is nil.
// This function will give a warning if you call it with nil context.Context.
func ContextFromValue(ctx context.Context) *Context {
if ctx == nil {
internalLog.Warn("ContextFromValue was called with nil context.Context, returning nil")
return nil
}
return (*Context)(runtimer.GetEfaceDataPtr(ctx.Value(ContextKey)))
}
// MWKill kills current context and stop any user-defined processing.
// This function intented for use in middlewares.
func (ctx *Context) MWKill() {
ctx.middlewareKilledReq = true
}
// Sanitize returns a sanitized `s`.
// It use bluemonday to sanitize given parameter.
//
// To change sanitizer policy, see (*App).SetSanitizerPolicy.
func (ctx *Context) Sanitize(s string) string {
return ctx.App.sanitizerPolicy.Sanitize(s)
}
// Sanitizer returns current bluemonday policy.
//
// To change sanitizer policy, see (*App).SetSanitizerPolicy.
//
// Context must not update the policy at runtime. Instead, please
// use a new policy.
func (ctx *Context) Sanitizer() *bluemonday.Policy {
return ctx.App.sanitizerPolicy
}
// SubPrefixes returns list of router's prefixes that was created using .Sub() feature
func (ctx *Context) SubPrefixes() []string {
return ctx.subPrefixes
}
// ContentType returns Content-Type header for current request
func (ctx *Context) ContentType() string {
return string(ctx.Request.Header.Peek(contentType))
}
// ToContext returns context.Context with gramework.Context stored
// in context values as a pointer (see gramework.ContextKey to receive and use this value).
//
// By default this func will extend context.Background(), if parentCtx is not provided.
func (ctx *Context) ToContext(parentCtx ...context.Context) context.Context {
if len(parentCtx) > 0 {
return context.WithValue(parentCtx[0], ContextKey, ctx)
}
return context.WithValue(context.Background(), ContextKey, ctx)
}
// RouteArg returns an argument value as a string or empty string
func (ctx *Context) RouteArg(argName string) string {
v, err := ctx.RouteArgErr(argName)
if err != nil {
return emptyString
}
return v
}
// ToCSV encodes csv-encoded value to client
func (ctx *Context) ToCSV(v interface{}) ([]byte, error) {
return gocsv.MarshalBytes(v)
}
// ToXML encodes xml-encoded value to client
func (ctx *Context) ToXML(v interface{}) ([]byte, error) {
b := bytes.NewBuffer(nil)
err := xml.NewEncoder(b).Encode(v)
return b.Bytes(), err
}
// GETKeys returns GET parameters keys (query args)
func (ctx *Context) GETKeys() []string {
var res []string
ctx.Request.URI().QueryArgs().VisitAll(func(key, value []byte) {
res = append(res, string(key))
})
return res
}
// GETKeysBytes returns GET parameters keys (query args) as []byte
func (ctx *Context) GETKeysBytes() [][]byte {
var res [][]byte
ctx.Request.URI().QueryArgs().VisitAll(func(key, value []byte) {
res = append(res, key)
})
return res
}
// GETParams returns GET parameters (query args)
func (ctx *Context) GETParams() map[string][]string {
res := make(map[string][]string)
ctx.Request.URI().QueryArgs().VisitAll(func(key, value []byte) {
res[string(key)] = append(res[string(key)], string(value))
})
return res
}
// GETParam returns GET parameter (query arg) by name
func (ctx *Context) GETParam(argName string) []string {
res := ctx.GETParams()
if param, ok := res[argName]; ok {
return param
}
return nil
}
// RouteArgErr returns an argument value as a string or empty string
// and ErrArgNotFound if argument was not found
func (ctx *Context) RouteArgErr(argName string) (string, error) {
i := ctx.UserValue(argName)
if i == nil {
return emptyString, ErrArgNotFound
}
switch value := i.(type) {
case string:
return value, nil
default:
return fmt.Sprintf(fmtV, i), nil
}
}
// ToTLS redirects user to HTTPS scheme
func (ctx *Context) ToTLS() {
u := ctx.URI()
u.SetScheme(https)
ctx.Redirect(u.String(), redirectCode)
}
// Forbidden send 403 Forbidden error
func (ctx *Context) Forbidden() {
ctx.Error(forbidden, forbiddenCode)
}
// ToJSON serializes v and returns the result
func (ctx *Context) ToJSON(v interface{}) ([]byte, error) {
b := bytes.NewBuffer(nil)
enc := ffjson.NewEncoder(b)
err := enc.Encode(v)
return b.Bytes(), err
}
// UnJSONBytes deserializes JSON request body to given variable pointer or allocates a new one.
// Returns resulting data and error. One of them may be nil.
func (ctx *Context) UnJSONBytes(b []byte, v ...interface{}) (interface{}, error) {
return UnJSONBytes(b, v...)
}
// UnJSON deserializes JSON request body to given variable pointer
func (ctx *Context) UnJSON(v interface{}) error {
return ffjson.NewDecoder().Decode(ctx.Request.Body(), &v)
}
// UnJSONBytes deserializes JSON request body to given variable pointer or allocates a new one.
// Returns resulting data and error. One of them may be nil.
func UnJSONBytes(b []byte, v ...interface{}) (interface{}, error) {
if len(v) == 0 {
var res interface{}
err := ffjson.NewDecoder().Decode(b, &res)
return res, err
}
err := ffjson.NewDecoder().Decode(b, &v[0])
return v[0], err
}
func (ctx *Context) jsonErrorLog(v interface{}) {
ctx.Err500()
if err := ctx.JSON(v); err != nil {
ctx.Logger.WithError(err).Error("JSONError err")
}
}
// RequestID return request ID for current context's request
func (ctx *Context) RequestID() string {
return ctx.requestID
}
| gramework/gramework | context.go | GO | apache-2.0 | 6,234 |
package me.chanjar.weixin.common.bean.menu;
import java.io.Serializable;
import com.google.gson.annotations.SerializedName;
import lombok.Data;
import me.chanjar.weixin.common.util.json.WxGsonBuilder;
/**
* menu rule.
*
* @author Daniel Qian
*/
@Data
public class WxMenuRule implements Serializable {
private static final long serialVersionUID = -4587181819499286670L;
/**
* 变态的微信接口,反序列化时这里反人类的使用和序列化时不一样的名字.
*/
@SerializedName(value = "tag_id", alternate = "group_id")
private String tagId;
private String sex;
private String country;
private String province;
private String city;
@SerializedName("client_platform_type")
private String clientPlatformType;
private String language;
@Override
public String toString() {
return WxGsonBuilder.create().toJson(this);
}
}
| Wechat-Group/WxJava | weixin-java-common/src/main/java/me/chanjar/weixin/common/bean/menu/WxMenuRule.java | Java | apache-2.0 | 878 |
#!/usr/bin/python
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse, os
import torch
"""
Checkpoints saved by train.py contain not only model parameters but also
optimizer states, losses, a history of generated images, and other statistics.
This information is very useful for development and debugging models, but makes
the saved checkpoints very large. This utility script strips away all extra
information from saved checkpoints, keeping only the saved models.
"""
parser = argparse.ArgumentParser()
parser.add_argument('--input_checkpoint', default=None)
parser.add_argument('--output_checkpoint', default=None)
parser.add_argument('--input_dir', default=None)
parser.add_argument('--output_dir', default=None)
parser.add_argument('--keep_discriminators', type=int, default=1)
def main(args):
if args.input_checkpoint is not None:
handle_checkpoint(args, args.input_checkpoint, args.output_checkpoint)
if args.input_dir is not None:
handle_dir(args, args.input_dir, args.output_dir)
def handle_dir(args, input_dir, output_dir):
for fn in os.listdir(input_dir):
if not fn.endswith('.pt'):
continue
input_path = os.path.join(input_dir, fn)
output_path = os.path.join(output_dir, fn)
handle_checkpoint(args, input_path, output_path)
def handle_checkpoint(args, input_path, output_path):
input_checkpoint = torch.load(input_path)
keep = ['args', 'model_state', 'model_kwargs']
if args.keep_discriminators == 1:
keep += ['d_img_state', 'd_img_kwargs', 'd_obj_state', 'd_obj_kwargs']
output_checkpoint = {}
for k, v in input_checkpoint.items():
if k in keep:
output_checkpoint[k] = v
torch.save(output_checkpoint, output_path)
if __name__ == '__main__':
args = parser.parse_args()
main(args)
| google/sg2im | scripts/strip_checkpoint.py | Python | apache-2.0 | 2,313 |
package poussecafe.environment;
import java.util.Collection;
import java.util.Objects;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import poussecafe.messaging.Message;
import poussecafe.processing.ListenersSet;
import poussecafe.util.ReflectionUtils;
public class MessageListenerRegistrar {
public void registerListener(MessageListener listener) {
logger.debug("Registering listener {}", listener);
Class<? extends Message> messageClass = listener.consumedMessageClass();
messageListenersSetBuilder.registerListenerForMessageClass(listener, messageClass);
if(listener.isWildcard()) {
messageListenersSetBuilder.registerWildcardListener(listener);
} else if(ReflectionUtils.isAbstract(messageClass)) {
Class<? extends Message> messageImplementationClass = environment.messageImplementationClass(messageClass);
messageListenersSetBuilder.registerListenerForMessageClass(listener, messageImplementationClass);
}
}
private Logger logger = LoggerFactory.getLogger(getClass());
private Environment environment;
public void setEnvironment(Environment environment) {
Objects.requireNonNull(environment);
this.environment = environment;
}
private MessageListenersSetBuilder messageListenersSetBuilder = new MessageListenersSetBuilder();
public Set<MessageListener> getListeners(Class<? extends Message> messageClass) {
return messageListenersSetBuilder.messageListenersOf(messageClass);
}
public Collection<MessageListener> allListeners() {
return messageListenersSetBuilder.messageListeners();
}
public ListenersSet listenersSet() {
return messageListenersSetBuilder;
}
}
| pousse-cafe/pousse-cafe | pousse-cafe-core/src/main/java/poussecafe/environment/MessageListenerRegistrar.java | Java | apache-2.0 | 1,787 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class MasterException(Exception):
pass
class PathException(Exception):
pass
class CommunicableException(Exception):
pass
class LiveActivityException(Exception):
pass
class StatusableException(Exception):
pass
class ActivityException(Exception):
pass
class SerializerException(Exception):
pass
class ControllerNotFoundException(Exception):
pass
class LiveActivityGroupNotFoundException(Exception):
pass
class LiveActivityNotFoundException(Exception):
pass
class ActivityNotFoundException(Exception):
pass
class SpaceNotFoundException(Exception):
pass
| EndPointCorp/interactivespaces-python-api | interactivespaces/exception.py | Python | apache-2.0 | 656 |
using System;
namespace Typewriter.Configuration
{
/// <summary>
/// Determines how partial classes and interfaces are rendered.
/// </summary>
public enum PartialRenderingMode
{
/// <summary>
/// Partial types are rendered as defined in the c# source containing only the parts defined in each file.
/// (This is the default rendering mode)
/// </summary>
Partial,
/// <summary>
/// Partial type definitions are combined to a single type acting as if the full type was
/// defined only once in the c# source (using the filename of the first file containing a part of the type).
/// (Unsupported in Visual Studio 2013)
/// </summary>
Combined,
/// <summary>
/// [depricated] A combined type definition are rendered for each file containing a partial definition.
/// (Unsupported in Visual Studio 2013)
/// </summary>
[Obsolete]
Legacy
}
}
| frhagn/Typewriter | src/CodeModel/Configuration/PartialRenderingMode.cs | C# | apache-2.0 | 1,005 |
/*
This question is from https://leetcode.com/problems/pacific-atlantic-water-flow/
Difficulty: medium
Given an m x n matrix of non-negative integers representing the height of each unit cell in a continent, the "Pacific ocean" touches the left and top edges of the matrix and the "Atlantic ocean" touches the right and bottom edges.
Water can only flow in four directions (up, down, left, or right) from a cell to another one with height equal or lower.
Find the list of grid coordinates where water can flow to both the Pacific and Atlantic ocean.
Note:
The order of returned grid coordinates does not matter.
Both m and n are less than 150.
Example:
Given the following 5x5 matrix:
Pacific ~ ~ ~ ~ ~
~ 1 2 2 3 (5) *
~ 3 2 3 (4) (4) *
~ 2 4 (5) 3 1 *
~ (6) (7) 1 4 5 *
~ (5) 1 1 2 4 *
* * * * * Atlantic
Return:
[[0, 4], [1, 3], [1, 4], [2, 2], [3, 0], [3, 1], [4, 0]] (positions with parentheses in above matrix).
*/
// T:O(M*N), S:O(M*N), 3ms
class Solution {
List<int[]> ans;
int[][] visited;
int[][] direction;
int row;
int col;
int[][] graph;
public List<int[]> pacificAtlantic(int[][] matrix) {
ans = new LinkedList();
if(matrix == null || matrix.length == 0) return ans;
graph = matrix;
row = matrix.length;
col = matrix[0].length;
visited = new int[row][col];
direction = new int[][]{{1, 0}, {-1, 0}, {0, 1}, {0, -1}};
for(int r = 0; r < row; r++){
dfs(r, 0, 1); // Pacific
dfs(r, col - 1, -1); // Atlantic
}
for(int c = 0; c < col; c++){
dfs(0, c, 1); // Pacific
dfs(row - 1, c, -1); // Atlantic
}
return ans;
}
private void dfs(int r, int c, int ocean){
if(visited[r][c] + ocean == 0){
ans.add(new int[]{r, c});
visited[r][c] = 2;
}else if(visited[r][c] != 2){
visited[r][c] = ocean;
}
for(int i = 0; i < direction.length; i++){
int newR = r + direction[i][0];
int newC = c + direction[i][1];
if(newR >= 0 && newR < row && newC >=0 && newC < col
&& visited[newR][newC] != 2
&& visited[newR][newC] != ocean
&& (graph[r][c] <= graph[newR][newC]))
dfs(newR, newC, ocean);
}
}
} | yehchilai/IQ | Medium/417 Pacific Atlantic Water Flow.java | Java | apache-2.0 | 2,462 |
/*
* K-scope
* Copyright 2012-2013 RIKEN, Japan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.riken.kscope.action;
import java.awt.event.ActionEvent;
import jp.riken.kscope.common.ANALYSIS_PANEL;
import jp.riken.kscope.model.ProjectModel;
import jp.riken.kscope.model.PropertiesTableModel;
import jp.riken.kscope.service.AppController;
import jp.riken.kscope.service.ProjectService;
/**
* プロジェクトプロパティアクション
* @author RIKEN
*/
public class ProjectPropertyAction extends ActionBase {
/**
* コンストラクタ
* @param controller アプリケーションコントローラ
*/
public ProjectPropertyAction(AppController controller) {
super(controller);
}
/**
* アクション発生イベント
* @param event イベント情報
*/
@Override
public void actionPerformed(ActionEvent event) {
// プロパティの設定を行う。
setProperties();
// プロパティタブをアクティブにする
this.controller.getMainframe().getPanelAnalysisView().setSelectedPanel(ANALYSIS_PANEL.PROPARTIES);
}
/**
* プロジェクトのプロパティの設定を行う。
*/
public void setProperties() {
// プロパティ設定モデルの取得する
PropertiesTableModel model = this.controller.getPropertiesTableModel();
// プロジェクトのプロパティの取得を行う
ProjectModel project = this.controller.getProjectModel();
ProjectService service = new ProjectService(project);
service.setProperties(model);
}
}
| K-scope/K-scope | src/jp/riken/kscope/action/ProjectPropertyAction.java | Java | apache-2.0 | 2,165 |
/**
*
*/
/**
* @author Jerry Goodnough
*
*/
package org.socraticgrid.hl7.services.eps.accessclients.publication; | SocraticGrid/Services-Backup | VACDS-EPS-Service-Library/src/main/java/org/socraticgrid/hl7/services/eps/accessclients/publication/package-info.java | Java | apache-2.0 | 118 |
// Generated from /POI/java/org/apache/poi/ss/formula/ptg/ExternSheetNameResolver.java
#include <org/apache/poi/ss/formula/ptg/ExternSheetNameResolver.hpp>
#include <java/lang/ClassCastException.hpp>
#include <java/lang/NullPointerException.hpp>
#include <java/lang/Object.hpp>
#include <java/lang/String.hpp>
#include <java/lang/StringBuffer.hpp>
#include <org/apache/poi/ss/formula/EvaluationWorkbook_ExternalSheet.hpp>
#include <org/apache/poi/ss/formula/EvaluationWorkbook_ExternalSheetRange.hpp>
#include <org/apache/poi/ss/formula/FormulaRenderingWorkbook.hpp>
#include <org/apache/poi/ss/formula/SheetNameFormatter.hpp>
template<typename T, typename U>
static T java_cast(U* u)
{
if(!u) return static_cast<T>(nullptr);
auto t = dynamic_cast<T>(u);
if(!t) throw new ::java::lang::ClassCastException();
return t;
}
template<typename T>
static T* npc(T* t)
{
if(!t) throw new ::java::lang::NullPointerException();
return t;
}
poi::ss::formula::ptg::ExternSheetNameResolver::ExternSheetNameResolver(const ::default_init_tag&)
: super(*static_cast< ::default_init_tag* >(0))
{
clinit();
}
poi::ss::formula::ptg::ExternSheetNameResolver::ExternSheetNameResolver()
: ExternSheetNameResolver(*static_cast< ::default_init_tag* >(0))
{
ctor();
}
void poi::ss::formula::ptg::ExternSheetNameResolver::ctor()
{
super::ctor();
}
java::lang::String* poi::ss::formula::ptg::ExternSheetNameResolver::prependSheetName(::poi::ss::formula::FormulaRenderingWorkbook* book, int32_t field_1_index_extern_sheet, ::java::lang::String* cellRefText)
{
clinit();
auto externalSheet = npc(book)->getExternalSheet(field_1_index_extern_sheet);
::java::lang::StringBuffer* sb;
if(externalSheet != nullptr) {
auto wbName = npc(externalSheet)->getWorkbookName();
auto sheetName = npc(externalSheet)->getSheetName();
if(wbName != nullptr) {
sb = new ::java::lang::StringBuffer(npc(wbName)->length() + npc(sheetName)->length() + npc(cellRefText)->length()+ int32_t(4));
::poi::ss::formula::SheetNameFormatter::appendFormat(sb, wbName, sheetName);
} else {
sb = new ::java::lang::StringBuffer(npc(sheetName)->length() + npc(cellRefText)->length() + int32_t(4));
::poi::ss::formula::SheetNameFormatter::appendFormat(sb, sheetName);
}
if(dynamic_cast< ::poi::ss::formula::EvaluationWorkbook_ExternalSheetRange* >(externalSheet) != nullptr) {
auto r = java_cast< ::poi::ss::formula::EvaluationWorkbook_ExternalSheetRange* >(externalSheet);
if(!npc(npc(r)->getFirstSheetName())->equals(static_cast< ::java::lang::Object* >(npc(r)->getLastSheetName()))) {
npc(sb)->append(u':');
::poi::ss::formula::SheetNameFormatter::appendFormat(sb, npc(r)->getLastSheetName());
}
}
} else {
auto firstSheetName = npc(book)->getSheetFirstNameByExternSheet(field_1_index_extern_sheet);
auto lastSheetName = npc(book)->getSheetLastNameByExternSheet(field_1_index_extern_sheet);
sb = new ::java::lang::StringBuffer(npc(firstSheetName)->length() + npc(cellRefText)->length() + int32_t(4));
if(npc(firstSheetName)->length() < 1) {
npc(sb)->append(u"#REF"_j);
} else {
::poi::ss::formula::SheetNameFormatter::appendFormat(sb, firstSheetName);
if(!npc(firstSheetName)->equals(static_cast< ::java::lang::Object* >(lastSheetName))) {
npc(sb)->append(u':');
npc(sb)->append(lastSheetName);
}
}
}
npc(sb)->append(u'!');
npc(sb)->append(cellRefText);
return npc(sb)->toString();
}
extern java::lang::Class *class_(const char16_t *c, int n);
java::lang::Class* poi::ss::formula::ptg::ExternSheetNameResolver::class_()
{
static ::java::lang::Class* c = ::class_(u"org.apache.poi.ss.formula.ptg.ExternSheetNameResolver", 53);
return c;
}
java::lang::Class* poi::ss::formula::ptg::ExternSheetNameResolver::getClass0()
{
return class_();
}
| pebble2015/cpoi | src/org/apache/poi/ss/formula/ptg/ExternSheetNameResolver.cpp | C++ | apache-2.0 | 4,078 |
package org.bndtools.rt.browserkit.api;
public interface BrowserKitConstants {
/**
* Specifies the initial title of the application window.
*/
public static final String WINDOW_TITLE = "browserkit.title";
}
| bndtools/bndtools-rt | org.bndtools.rt.browserkit/src/org/bndtools/rt/browserkit/api/BrowserKitConstants.java | Java | apache-2.0 | 215 |
package com.darknova.postcardmailer.scraper.config;
import com.darknova.postcardmailer.scraper.Scraper;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestTemplate;
/**
* This {@link Configuration} handles the data scraper.
*/
@Configuration
@ComponentScan(basePackageClasses = Scraper.class)
public class ScraperConfig {
@Bean
public RestTemplate restTemplate() {
return new RestTemplate();
}
}
| awushensky/postcard_mailer | scraper/src/main/java/com/darknova/postcardmailer/scraper/config/ScraperConfig.java | Java | apache-2.0 | 608 |
package org.swiften.xtestkit.base.type;
import org.jetbrains.annotations.NotNull;
/**
* Created by haipham on 4/10/17.
*/
/**
* This interface provides an app package name.
*/
@FunctionalInterface
public interface AppPackageProviderType {
/**
* Get an app package.
* @return {@link String} value.
*/
@NotNull String appPackage();
}
| protoman92/XTestKit | src/main/java/org/swiften/xtestkit/base/type/AppPackageProviderType.java | Java | apache-2.0 | 362 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Network Hosts are responsible for allocating ips and setting up network.
There are multiple backend drivers that handle specific types of networking
topologies. All of the network commands are issued to a subclass of
:class:`NetworkManager`.
**Related Flags**
:network_driver: Driver to use for network creation
:flat_network_bridge: Bridge device for simple network instances
:flat_interface: FlatDhcp will bridge into this interface if set
:flat_network_dns: Dns for simple network
:vlan_start: First VLAN for private networks
:vpn_ip: Public IP for the cloudpipe VPN servers
:vpn_start: First Vpn port for private networks
:cnt_vpn_clients: Number of addresses reserved for vpn clients
:network_size: Number of addresses in each private subnet
:floating_range: Floating IP address block
:fixed_range: Fixed IP address block
:date_dhcp_on_disassociate: Whether to update dhcp when fixed_ip
is disassociated
:fixed_ip_disassociate_timeout: Seconds after which a deallocated ip
is disassociated
:create_unique_mac_address_attempts: Number of times to attempt creating
a unique mac address
"""
import datetime
import itertools
import math
import netaddr
import socket
from eventlet import greenpool
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova import ipv6
from nova import log as logging
from nova import manager
from nova import quota
from nova import utils
from nova import rpc
from nova.network import api as network_api
from nova.compute import api as compute_api
import random
LOG = logging.getLogger("nova.network.manager")
FLAGS = flags.FLAGS
flags.DEFINE_string('flat_network_bridge', None,
'Bridge for simple network instances')
flags.DEFINE_string('flat_network_dns', '8.8.4.4',
'Dns for simple network')
flags.DEFINE_bool('flat_injected', False,
'Whether to attempt to inject network setup into guest')
flags.DEFINE_string('flat_interface', None,
'FlatDhcp will bridge into this interface if set')
flags.DEFINE_integer('vlan_start', 100, 'First VLAN for private networks')
flags.DEFINE_string('vlan_interface', None,
'vlans will bridge into this interface if set')
flags.DEFINE_integer('num_networks', 1, 'Number of networks to support')
flags.DEFINE_string('vpn_ip', '$my_ip',
'Public IP for the cloudpipe VPN servers')
flags.DEFINE_integer('vpn_start', 1000, 'First Vpn port for private networks')
flags.DEFINE_bool('multi_host', False,
'Default value for multi_host in networks')
flags.DEFINE_integer('network_size', 256,
'Number of addresses in each private subnet')
flags.DEFINE_string('floating_range', '4.4.4.0/24',
'Floating IP address block')
flags.DEFINE_string('fixed_range', '10.0.0.0/8', 'Fixed IP address block')
flags.DEFINE_string('fixed_range_v6', 'fd00::/48', 'Fixed IPv6 address block')
flags.DEFINE_string('gateway_v6', None, 'Default IPv6 gateway')
flags.DEFINE_integer('cnt_vpn_clients', 0,
'Number of addresses reserved for vpn clients')
flags.DEFINE_string('network_driver', 'nova.network.linux_net',
'Driver to use for network creation')
flags.DEFINE_bool('update_dhcp_on_disassociate', False,
'Whether to update dhcp when fixed_ip is disassociated')
flags.DEFINE_integer('fixed_ip_disassociate_timeout', 600,
'Seconds after which a deallocated ip is disassociated')
flags.DEFINE_integer('create_unique_mac_address_attempts', 5,
'Number of attempts to create unique mac address')
flags.DEFINE_bool('auto_assign_floating_ip', False,
'Autoassigning floating ip to VM')
flags.DEFINE_string('network_host', socket.gethostname(),
'Network host to use for ip allocation in flat modes')
flags.DEFINE_bool('fake_call', False,
'If True, skip using the queue and make local calls')
flags.DEFINE_bool('force_dhcp_release', False,
'If True, send a dhcp release on instance termination')
class AddressAlreadyAllocated(exception.Error):
"""Address was already allocated."""
pass
class RPCAllocateFixedIP(object):
"""Mixin class originally for FlatDCHP and VLAN network managers.
used since they share code to RPC.call allocate_fixed_ip on the
correct network host to configure dnsmasq
"""
def _allocate_fixed_ips(self, context, instance_id, host, networks,
**kwargs):
"""Calls allocate_fixed_ip once for each network."""
green_pool = greenpool.GreenPool()
vpn = kwargs.get('vpn')
requested_networks = kwargs.get('requested_networks')
for network in networks:
address = None
if requested_networks is not None:
for address in (fixed_ip for (uuid, fixed_ip) in \
requested_networks if network['uuid'] == uuid):
break
# NOTE(vish): if we are not multi_host pass to the network host
if not network['multi_host']:
host = network['host']
# NOTE(vish): if there is no network host, set one
if host == None:
host = rpc.call(context, FLAGS.network_topic,
{'method': 'set_network_host',
'args': {'network_ref': network}})
if host != self.host:
# need to call allocate_fixed_ip to correct network host
topic = self.db.queue_get_for(context,
FLAGS.network_topic,
host)
args = {}
args['instance_id'] = instance_id
args['network_id'] = network['id']
args['address'] = address
args['vpn'] = vpn
green_pool.spawn_n(rpc.call, context, topic,
{'method': '_rpc_allocate_fixed_ip',
'args': args})
else:
# i am the correct host, run here
self.allocate_fixed_ip(context, instance_id, network,
vpn=vpn, address=address)
# wait for all of the allocates (if any) to finish
green_pool.waitall()
def _rpc_allocate_fixed_ip(self, context, instance_id, network_id,
**kwargs):
"""Sits in between _allocate_fixed_ips and allocate_fixed_ip to
perform network lookup on the far side of rpc.
"""
network = self.db.network_get(context, network_id)
self.allocate_fixed_ip(context, instance_id, network, **kwargs)
class FloatingIP(object):
"""Mixin class for adding floating IP functionality to a manager."""
def init_host_floating_ips(self):
"""Configures floating ips owned by host."""
admin_context = context.get_admin_context()
try:
floating_ips = self.db.floating_ip_get_all_by_host(admin_context,
self.host)
except exception.NotFound:
return
for floating_ip in floating_ips:
if floating_ip.get('fixed_ip', None):
fixed_address = floating_ip['fixed_ip']['address']
# NOTE(vish): The False here is because we ignore the case
# that the ip is already bound.
self.driver.bind_floating_ip(floating_ip['address'], False)
self.driver.ensure_floating_forward(floating_ip['address'],
fixed_address)
def allocate_for_instance(self, context, **kwargs):
"""Handles allocating the floating IP resources for an instance.
calls super class allocate_for_instance() as well
rpc.called by network_api
"""
instance_id = kwargs.get('instance_id')
project_id = kwargs.get('project_id')
requested_networks = kwargs.get('requested_networks')
LOG.debug(_("floating IP allocation for instance |%s|"), instance_id,
context=context)
# call the next inherited class's allocate_for_instance()
# which is currently the NetworkManager version
# do this first so fixed ip is already allocated
ips = super(FloatingIP, self).allocate_for_instance(context, **kwargs)
if FLAGS.auto_assign_floating_ip:
# allocate a floating ip (public_ip is just the address string)
public_ip = self.allocate_floating_ip(context, project_id)
# set auto_assigned column to true for the floating ip
self.db.floating_ip_set_auto_assigned(context, public_ip)
# get the floating ip object from public_ip string
floating_ip = self.db.floating_ip_get_by_address(context,
public_ip)
# get the first fixed_ip belonging to the instance
fixed_ips = self.db.fixed_ip_get_by_instance(context, instance_id)
fixed_ip = fixed_ips[0] if fixed_ips else None
# call to correct network host to associate the floating ip
self.network_api.associate_floating_ip(context,
floating_ip,
fixed_ip,
affect_auto_assigned=True)
return ips
def deallocate_for_instance(self, context, **kwargs):
"""Handles deallocating floating IP resources for an instance.
calls super class deallocate_for_instance() as well.
rpc.called by network_api
"""
instance_id = kwargs.get('instance_id')
LOG.debug(_("floating IP deallocation for instance |%s|"), instance_id,
context=context)
fixed_ips = self.db.fixed_ip_get_by_instance(context, instance_id)
# add to kwargs so we can pass to super to save a db lookup there
kwargs['fixed_ips'] = fixed_ips
for fixed_ip in fixed_ips:
# disassociate floating ips related to fixed_ip
for floating_ip in fixed_ip.floating_ips:
address = floating_ip['address']
self.network_api.disassociate_floating_ip(context, address)
# deallocate if auto_assigned
if floating_ip['auto_assigned']:
self.network_api.release_floating_ip(context,
address,
True)
# call the next inherited class's deallocate_for_instance()
# which is currently the NetworkManager version
# call this after so floating IPs are handled first
super(FloatingIP, self).deallocate_for_instance(context, **kwargs)
def allocate_floating_ip(self, context, project_id):
"""Gets an floating ip from the pool."""
# NOTE(tr3buchet): all networks hosts in zone now use the same pool
LOG.debug("QUOTA: %s" % quota.allowed_floating_ips(context, 1))
if quota.allowed_floating_ips(context, 1) < 1:
LOG.warn(_('Quota exceeded for %s, tried to allocate '
'address'),
context.project_id)
raise quota.QuotaError(_('Address quota exceeded. You cannot '
'allocate any more addresses'))
# TODO(vish): add floating ips through manage command
return self.db.floating_ip_allocate_address(context,
project_id)
def associate_floating_ip(self, context, floating_address, fixed_address):
"""Associates an floating ip to a fixed ip."""
floating_ip = self.db.floating_ip_get_by_address(context,
floating_address)
if floating_ip['fixed_ip']:
raise exception.FloatingIpAlreadyInUse(
address=floating_ip['address'],
fixed_ip=floating_ip['fixed_ip']['address'])
self.db.floating_ip_fixed_ip_associate(context,
floating_address,
fixed_address,
self.host)
self.driver.bind_floating_ip(floating_address)
self.driver.ensure_floating_forward(floating_address, fixed_address)
def disassociate_floating_ip(self, context, floating_address):
"""Disassociates a floating ip."""
fixed_address = self.db.floating_ip_disassociate(context,
floating_address)
self.driver.unbind_floating_ip(floating_address)
self.driver.remove_floating_forward(floating_address, fixed_address)
def deallocate_floating_ip(self, context, floating_address):
"""Returns an floating ip to the pool."""
self.db.floating_ip_deallocate(context, floating_address)
class NetworkManager(manager.SchedulerDependentManager):
"""Implements common network manager functionality.
This class must be subclassed to support specific topologies.
host management:
hosts configure themselves for networks they are assigned to in the
table upon startup. If there are networks in the table which do not
have hosts, those will be filled in and have hosts configured
as the hosts pick them up one at time during their periodic task.
The one at a time part is to flatten the layout to help scale
"""
# If True, this manager requires VIF to create a bridge.
SHOULD_CREATE_BRIDGE = False
# If True, this manager requires VIF to create VLAN tag.
SHOULD_CREATE_VLAN = False
timeout_fixed_ips = True
def __init__(self, network_driver=None, *args, **kwargs):
if not network_driver:
network_driver = FLAGS.network_driver
self.driver = utils.import_object(network_driver)
self.network_api = network_api.API()
self.compute_api = compute_api.API()
super(NetworkManager, self).__init__(service_name='network',
*args, **kwargs)
@utils.synchronized('get_dhcp')
def _get_dhcp_ip(self, context, network_ref, host=None):
"""Get the proper dhcp address to listen on."""
# NOTE(vish): this is for compatibility
if not network_ref['multi_host']:
return network_ref['gateway']
if not host:
host = self.host
network_id = network_ref['id']
try:
fip = self.db.fixed_ip_get_by_network_host(context,
network_id,
host)
return fip['address']
except exception.FixedIpNotFoundForNetworkHost:
elevated = context.elevated()
return self.db.fixed_ip_associate_pool(elevated,
network_id,
host=host)
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
# NOTE(vish): Set up networks for which this host already has
# an ip address.
ctxt = context.get_admin_context()
for network in self.db.network_get_all_by_host(ctxt, self.host):
self._setup_network(ctxt, network)
def periodic_tasks(self, context=None):
"""Tasks to be run at a periodic interval."""
super(NetworkManager, self).periodic_tasks(context)
if self.timeout_fixed_ips:
now = utils.utcnow()
timeout = FLAGS.fixed_ip_disassociate_timeout
time = now - datetime.timedelta(seconds=timeout)
num = self.db.fixed_ip_disassociate_all_by_timeout(context,
self.host,
time)
if num:
LOG.debug(_('Dissassociated %s stale fixed ip(s)'), num)
def set_network_host(self, context, network_ref):
"""Safely sets the host of the network."""
LOG.debug(_('setting network host'), context=context)
host = self.db.network_set_host(context,
network_ref['id'],
self.host)
return host
def _do_trigger_security_group_members_refresh_for_instance(self,
instance_id):
admin_context = context.get_admin_context()
instance_ref = self.db.instance_get(admin_context, instance_id)
groups = instance_ref['security_groups']
group_ids = [group['id'] for group in groups]
self.compute_api.trigger_security_group_members_refresh(admin_context,
group_ids)
def _get_networks_for_instance(self, context, instance_id, project_id,
requested_networks=None):
"""Determine & return which networks an instance should connect to."""
# TODO(tr3buchet) maybe this needs to be updated in the future if
# there is a better way to determine which networks
# a non-vlan instance should connect to
if requested_networks is not None and len(requested_networks) != 0:
network_uuids = [uuid for (uuid, fixed_ip) in requested_networks]
networks = self.db.network_get_all_by_uuids(context,
network_uuids)
else:
try:
networks = self.db.network_get_all(context)
except exception.NoNetworksFound:
return []
# return only networks which are not vlan networks
return [network for network in networks if
not network['vlan']]
def allocate_for_instance(self, context, **kwargs):
"""Handles allocating the various network resources for an instance.
rpc.called by network_api
"""
instance_id = kwargs.pop('instance_id')
host = kwargs.pop('host')
project_id = kwargs.pop('project_id')
type_id = kwargs.pop('instance_type_id')
requested_networks = kwargs.get('requested_networks')
vpn = kwargs.pop('vpn')
admin_context = context.elevated()
LOG.debug(_("network allocations for instance %s"), instance_id,
context=context)
networks = self._get_networks_for_instance(admin_context,
instance_id, project_id,
requested_networks=requested_networks)
self._allocate_mac_addresses(context, instance_id, networks)
self._allocate_fixed_ips(admin_context, instance_id,
host, networks, vpn=vpn,
requested_networks=requested_networks)
return self.get_instance_nw_info(context, instance_id, type_id, host)
def deallocate_for_instance(self, context, **kwargs):
"""Handles deallocating various network resources for an instance.
rpc.called by network_api
kwargs can contain fixed_ips to circumvent another db lookup
"""
instance_id = kwargs.pop('instance_id')
try:
fixed_ips = kwargs.get('fixed_ips') or \
self.db.fixed_ip_get_by_instance(context, instance_id)
except exception.FixedIpNotFoundForInstance:
fixed_ips = []
LOG.debug(_("network deallocation for instance |%s|"), instance_id,
context=context)
# deallocate fixed ips
for fixed_ip in fixed_ips:
self.deallocate_fixed_ip(context, fixed_ip['address'], **kwargs)
# deallocate vifs (mac addresses)
self.db.virtual_interface_delete_by_instance(context, instance_id)
def get_instance_nw_info(self, context, instance_id,
instance_type_id, host):
"""Creates network info list for instance.
called by allocate_for_instance and netowrk_api
context needs to be elevated
:returns: network info list [(network,info),(network,info)...]
where network = dict containing pertinent data from a network db object
and info = dict containing pertinent networking data
"""
# TODO(tr3buchet) should handle floating IPs as well?
try:
fixed_ips = self.db.fixed_ip_get_by_instance(context, instance_id)
except exception.FixedIpNotFoundForInstance:
LOG.warn(_('No fixed IPs for instance %s'), instance_id)
fixed_ips = []
vifs = self.db.virtual_interface_get_by_instance(context, instance_id)
flavor = self.db.instance_type_get(context, instance_type_id)
network_info = []
# a vif has an address, instance_id, and network_id
# it is also joined to the instance and network given by those IDs
for vif in vifs:
network = vif['network']
if network is None:
continue
# determine which of the instance's IPs belong to this network
network_IPs = [fixed_ip['address'] for fixed_ip in fixed_ips if
fixed_ip['network_id'] == network['id']]
# TODO(tr3buchet) eventually "enabled" should be determined
def ip_dict(ip):
return {
'ip': ip,
'netmask': network['netmask'],
'enabled': '1'}
def ip6_dict():
return {
'ip': ipv6.to_global(network['cidr_v6'],
vif['address'],
network['project_id']),
'netmask': network['netmask_v6'],
'enabled': '1'}
network_dict = {
'bridge': network['bridge'],
'id': network['id'],
'cidr': network['cidr'],
'cidr_v6': network['cidr_v6'],
'injected': network['injected'],
'vlan': network['vlan'],
'bridge_interface': network['bridge_interface'],
'multi_host': network['multi_host']}
if network['multi_host']:
dhcp_server = self._get_dhcp_ip(context, network, host)
else:
dhcp_server = self._get_dhcp_ip(context,
network,
network['host'])
info = {
'label': network['label'],
'gateway': network['gateway'],
'dhcp_server': dhcp_server,
'broadcast': network['broadcast'],
'mac': vif['address'],
'vif_uuid': vif['uuid'],
'rxtx_cap': flavor['rxtx_cap'],
'dns': [],
'ips': [ip_dict(ip) for ip in network_IPs],
'should_create_bridge': self.SHOULD_CREATE_BRIDGE,
'should_create_vlan': self.SHOULD_CREATE_VLAN}
if network['cidr_v6']:
info['ip6s'] = [ip6_dict()]
# TODO(tr3buchet): handle ip6 routes here as well
if network['gateway_v6']:
info['gateway6'] = network['gateway_v6']
if network['dns1']:
info['dns'].append(network['dns1'])
if network['dns2']:
info['dns'].append(network['dns2'])
network_info.append((network_dict, info))
return network_info
def _allocate_mac_addresses(self, context, instance_id, networks):
"""Generates mac addresses and creates vif rows in db for them."""
for network in networks:
self.add_virtual_interface(context, instance_id, network['id'])
def add_virtual_interface(self, context, instance_id, network_id):
vif = {'address': self.generate_mac_address(),
'instance_id': instance_id,
'network_id': network_id,
'uuid': str(utils.gen_uuid())}
# try FLAG times to create a vif record with a unique mac_address
for _ in xrange(FLAGS.create_unique_mac_address_attempts):
try:
return self.db.virtual_interface_create(context, vif)
except exception.VirtualInterfaceCreateException:
vif['address'] = self.generate_mac_address()
else:
self.db.virtual_interface_delete_by_instance(context,
instance_id)
raise exception.VirtualInterfaceMacAddressException()
def generate_mac_address(self):
"""Generate an Ethernet MAC address."""
mac = [0x02, 0x16, 0x3e,
random.randint(0x00, 0x7f),
random.randint(0x00, 0xff),
random.randint(0x00, 0xff)]
return ':'.join(map(lambda x: "%02x" % x, mac))
def add_fixed_ip_to_instance(self, context, instance_id, host, network_id):
"""Adds a fixed ip to an instance from specified network."""
networks = [self.db.network_get(context, network_id)]
self._allocate_fixed_ips(context, instance_id, host, networks)
def remove_fixed_ip_from_instance(self, context, instance_id, address):
"""Removes a fixed ip from an instance from specified network."""
fixed_ips = self.db.fixed_ip_get_by_instance(context, instance_id)
for fixed_ip in fixed_ips:
if fixed_ip['address'] == address:
self.deallocate_fixed_ip(context, address)
return
raise exception.FixedIpNotFoundForSpecificInstance(
instance_id=instance_id, ip=address)
def allocate_fixed_ip(self, context, instance_id, network, **kwargs):
"""Gets a fixed ip from the pool."""
# TODO(vish): when this is called by compute, we can associate compute
# with a network, or a cluster of computes with a network
# and use that network here with a method like
# network_get_by_compute_host
address = None
if network['cidr']:
address = kwargs.get('address', None)
if address:
address = self.db.fixed_ip_associate(context,
address, instance_id,
network['id'])
else:
address = self.db.fixed_ip_associate_pool(context.elevated(),
network['id'],
instance_id)
self._do_trigger_security_group_members_refresh_for_instance(
instance_id)
get_vif = self.db.virtual_interface_get_by_instance_and_network
vif = get_vif(context, instance_id, network['id'])
values = {'allocated': True,
'virtual_interface_id': vif['id']}
self.db.fixed_ip_update(context, address, values)
self._setup_network(context, network)
return address
def deallocate_fixed_ip(self, context, address, **kwargs):
"""Returns a fixed ip to the pool."""
self.db.fixed_ip_update(context, address,
{'allocated': False,
'virtual_interface_id': None})
fixed_ip_ref = self.db.fixed_ip_get_by_address(context, address)
instance_ref = fixed_ip_ref['instance']
instance_id = instance_ref['id']
self._do_trigger_security_group_members_refresh_for_instance(
instance_id)
if FLAGS.force_dhcp_release:
dev = self.driver.get_dev(fixed_ip_ref['network'])
vif = self.db.virtual_interface_get_by_instance_and_network(
context, instance_ref['id'], fixed_ip_ref['network']['id'])
self.driver.release_dhcp(dev, address, vif['address'])
def lease_fixed_ip(self, context, address):
"""Called by dhcp-bridge when ip is leased."""
LOG.debug(_('Leased IP |%(address)s|'), locals(), context=context)
fixed_ip = self.db.fixed_ip_get_by_address(context, address)
instance = fixed_ip['instance']
if not instance:
raise exception.Error(_('IP %s leased that is not associated') %
address)
now = utils.utcnow()
self.db.fixed_ip_update(context,
fixed_ip['address'],
{'leased': True,
'updated_at': now})
if not fixed_ip['allocated']:
LOG.warn(_('IP |%s| leased that isn\'t allocated'), address,
context=context)
def release_fixed_ip(self, context, address):
"""Called by dhcp-bridge when ip is released."""
LOG.debug(_('Released IP |%(address)s|'), locals(), context=context)
fixed_ip = self.db.fixed_ip_get_by_address(context, address)
instance = fixed_ip['instance']
if not instance:
raise exception.Error(_('IP %s released that is not associated') %
address)
if not fixed_ip['leased']:
LOG.warn(_('IP %s released that was not leased'), address,
context=context)
self.db.fixed_ip_update(context,
fixed_ip['address'],
{'leased': False})
if not fixed_ip['allocated']:
self.db.fixed_ip_disassociate(context, address)
# NOTE(vish): dhcp server isn't updated until next setup, this
# means there will stale entries in the conf file
# the code below will update the file if necessary
if FLAGS.update_dhcp_on_disassociate:
network_ref = self.db.fixed_ip_get_network(context, address)
self._setup_network(context, network_ref)
def create_networks(self, context, label, cidr, multi_host, num_networks,
network_size, cidr_v6, gateway_v6, bridge,
bridge_interface, dns1=None, dns2=None, **kwargs):
"""Create networks based on parameters."""
# NOTE(jkoelker): these are dummy values to make sure iter works
fixed_net_v4 = netaddr.IPNetwork('0/32')
fixed_net_v6 = netaddr.IPNetwork('::0/128')
subnets_v4 = []
subnets_v6 = []
subnet_bits = int(math.ceil(math.log(network_size, 2)))
if cidr_v6:
fixed_net_v6 = netaddr.IPNetwork(cidr_v6)
prefixlen_v6 = 128 - subnet_bits
subnets_v6 = fixed_net_v6.subnet(prefixlen_v6, count=num_networks)
if cidr:
fixed_net_v4 = netaddr.IPNetwork(cidr)
prefixlen_v4 = 32 - subnet_bits
subnets_v4 = list(fixed_net_v4.subnet(prefixlen_v4,
count=num_networks))
# NOTE(jkoelker): This replaces the _validate_cidrs call and
# prevents looping multiple times
try:
nets = self.db.network_get_all(context)
except exception.NoNetworksFound:
nets = []
used_subnets = [netaddr.IPNetwork(net['cidr']) for net in nets]
def find_next(subnet):
next_subnet = subnet.next()
while next_subnet in subnets_v4:
next_subnet = next_subnet.next()
if next_subnet in fixed_net_v4:
return next_subnet
for subnet in list(subnets_v4):
if subnet in used_subnets:
next_subnet = find_next(subnet)
if next_subnet:
subnets_v4.remove(subnet)
subnets_v4.append(next_subnet)
subnet = next_subnet
else:
raise ValueError(_('cidr already in use'))
for used_subnet in used_subnets:
if subnet in used_subnet:
msg = _('requested cidr (%(cidr)s) conflicts with '
'existing supernet (%(super)s)')
raise ValueError(msg % {'cidr': subnet,
'super': used_subnet})
if used_subnet in subnet:
next_subnet = find_next(subnet)
if next_subnet:
subnets_v4.remove(subnet)
subnets_v4.append(next_subnet)
subnet = next_subnet
else:
msg = _('requested cidr (%(cidr)s) conflicts '
'with existing smaller cidr '
'(%(smaller)s)')
raise ValueError(msg % {'cidr': subnet,
'smaller': used_subnet})
networks = []
subnets = itertools.izip_longest(subnets_v4, subnets_v6)
for index, (subnet_v4, subnet_v6) in enumerate(subnets):
net = {}
net['bridge'] = bridge
net['bridge_interface'] = bridge_interface
net['multi_host'] = multi_host
net['dns1'] = dns1
net['dns2'] = dns2
if num_networks > 1:
net['label'] = '%s_%d' % (label, index)
else:
net['label'] = label
if cidr and subnet_v4:
net['cidr'] = str(subnet_v4)
net['netmask'] = str(subnet_v4.netmask)
net['gateway'] = str(subnet_v4[1])
net['broadcast'] = str(subnet_v4.broadcast)
net['dhcp_start'] = str(subnet_v4[2])
if cidr_v6 and subnet_v6:
net['cidr_v6'] = str(subnet_v6)
if gateway_v6:
# use a pre-defined gateway if one is provided
net['gateway_v6'] = str(gateway_v6)
else:
net['gateway_v6'] = str(subnet_v6[1])
net['netmask_v6'] = str(subnet_v6._prefixlen)
if kwargs.get('vpn', False):
# this bit here is for vlan-manager
del net['dns1']
del net['dns2']
vlan = kwargs['vlan_start'] + index
net['vpn_private_address'] = str(subnet_v4[2])
net['dhcp_start'] = str(subnet_v4[3])
net['vlan'] = vlan
net['bridge'] = 'br%s' % vlan
# NOTE(vish): This makes ports unique accross the cloud, a more
# robust solution would be to make them uniq per ip
net['vpn_public_port'] = kwargs['vpn_start'] + index
# None if network with cidr or cidr_v6 already exists
network = self.db.network_create_safe(context, net)
if not network:
raise ValueError(_('Network already exists!'))
else:
networks.append(network)
if network and cidr and subnet_v4:
self._create_fixed_ips(context, network['id'])
return networks
def delete_network(self, context, fixed_range, require_disassociated=True):
network = db.network_get_by_cidr(context, fixed_range)
if require_disassociated and network.project_id is not None:
raise ValueError(_('Network must be disassociated from project %s'
' before delete' % network.project_id))
db.network_delete_safe(context, network.id)
@property
def _bottom_reserved_ips(self): # pylint: disable=R0201
"""Number of reserved ips at the bottom of the range."""
return 2 # network, gateway
@property
def _top_reserved_ips(self): # pylint: disable=R0201
"""Number of reserved ips at the top of the range."""
return 1 # broadcast
def _create_fixed_ips(self, context, network_id):
"""Create all fixed ips for network."""
network = self.db.network_get(context, network_id)
# NOTE(vish): Should these be properties of the network as opposed
# to properties of the manager class?
bottom_reserved = self._bottom_reserved_ips
top_reserved = self._top_reserved_ips
project_net = netaddr.IPNetwork(network['cidr'])
num_ips = len(project_net)
for index in range(num_ips):
address = str(project_net[index])
if index < bottom_reserved or num_ips - index < top_reserved:
reserved = True
else:
reserved = False
self.db.fixed_ip_create(context, {'network_id': network_id,
'address': address,
'reserved': reserved})
def _allocate_fixed_ips(self, context, instance_id, host, networks,
**kwargs):
"""Calls allocate_fixed_ip once for each network."""
raise NotImplementedError()
def _setup_network(self, context, network_ref):
"""Sets up network on this host."""
raise NotImplementedError()
def validate_networks(self, context, networks):
"""check if the networks exists and host
is set to each network.
"""
if networks is None or len(networks) == 0:
return
network_uuids = [uuid for (uuid, fixed_ip) in networks]
self._get_networks_by_uuids(context, network_uuids)
for network_uuid, address in networks:
# check if the fixed IP address is valid and
# it actually belongs to the network
if address is not None:
if not utils.is_valid_ipv4(address):
raise exception.FixedIpInvalid(address=address)
fixed_ip_ref = self.db.fixed_ip_get_by_address(context,
address)
if fixed_ip_ref['network']['uuid'] != network_uuid:
raise exception.FixedIpNotFoundForNetwork(address=address,
network_uuid=network_uuid)
if fixed_ip_ref['instance'] is not None:
raise exception.FixedIpAlreadyInUse(address=address)
def _get_networks_by_uuids(self, context, network_uuids):
return self.db.network_get_all_by_uuids(context, network_uuids)
class FlatManager(NetworkManager):
"""Basic network where no vlans are used.
FlatManager does not do any bridge or vlan creation. The user is
responsible for setting up whatever bridges are specified when creating
networks through nova-manage. This bridge needs to be created on all
compute hosts.
The idea is to create a single network for the host with a command like:
nova-manage network create 192.168.0.0/24 1 256. Creating multiple
networks for for one manager is currently not supported, but could be
added by modifying allocate_fixed_ip and get_network to get the a network
with new logic instead of network_get_by_bridge. Arbitrary lists of
addresses in a single network can be accomplished with manual db editing.
If flat_injected is True, the compute host will attempt to inject network
config into the guest. It attempts to modify /etc/network/interfaces and
currently only works on debian based systems. To support a wider range of
OSes, some other method may need to be devised to let the guest know which
ip it should be using so that it can configure itself. Perhaps an attached
disk or serial device with configuration info.
Metadata forwarding must be handled by the gateway, and since nova does
not do any setup in this mode, it must be done manually. Requests to
169.254.169.254 port 80 will need to be forwarded to the api server.
"""
timeout_fixed_ips = False
def _allocate_fixed_ips(self, context, instance_id, host, networks,
**kwargs):
"""Calls allocate_fixed_ip once for each network."""
requested_networks = kwargs.get('requested_networks')
for network in networks:
address = None
if requested_networks is not None:
for address in (fixed_ip for (uuid, fixed_ip) in \
requested_networks if network['uuid'] == uuid):
break
self.allocate_fixed_ip(context, instance_id,
network, address=address)
def deallocate_fixed_ip(self, context, address, **kwargs):
"""Returns a fixed ip to the pool."""
super(FlatManager, self).deallocate_fixed_ip(context, address,
**kwargs)
self.db.fixed_ip_disassociate(context, address)
def _setup_network(self, context, network_ref):
"""Setup Network on this host."""
net = {}
net['injected'] = FLAGS.flat_injected
self.db.network_update(context, network_ref['id'], net)
class FlatDHCPManager(FloatingIP, RPCAllocateFixedIP, NetworkManager):
"""Flat networking with dhcp.
FlatDHCPManager will start up one dhcp server to give out addresses.
It never injects network settings into the guest. It also manages bridges.
Otherwise it behaves like FlatManager.
"""
SHOULD_CREATE_BRIDGE = True
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
self.driver.init_host()
self.driver.ensure_metadata_ip()
super(FlatDHCPManager, self).init_host()
self.init_host_floating_ips()
self.driver.metadata_forward()
def _setup_network(self, context, network_ref):
"""Sets up network on this host."""
network_ref['dhcp_server'] = self._get_dhcp_ip(context, network_ref)
mac_address = self.generate_mac_address()
dev = self.driver.plug(network_ref, mac_address)
self.driver.initialize_gateway_device(dev, network_ref)
if not FLAGS.fake_network:
self.driver.update_dhcp(context, dev, network_ref)
if(FLAGS.use_ipv6):
self.driver.update_ra(context, dev, network_ref)
gateway = utils.get_my_linklocal(dev)
self.db.network_update(context, network_ref['id'],
{'gateway_v6': gateway})
class VlanManager(RPCAllocateFixedIP, FloatingIP, NetworkManager):
"""Vlan network with dhcp.
VlanManager is the most complicated. It will create a host-managed
vlan for each project. Each project gets its own subnet. The networks
and associated subnets are created with nova-manage using a command like:
nova-manage network create 10.0.0.0/8 3 16. This will create 3 networks
of 16 addresses from the beginning of the 10.0.0.0 range.
A dhcp server is run for each subnet, so each project will have its own.
For this mode to be useful, each project will need a vpn to access the
instances in its subnet.
"""
SHOULD_CREATE_BRIDGE = True
SHOULD_CREATE_VLAN = True
def init_host(self):
"""Do any initialization that needs to be run if this is a
standalone service.
"""
self.driver.init_host()
self.driver.ensure_metadata_ip()
NetworkManager.init_host(self)
self.init_host_floating_ips()
self.driver.metadata_forward()
def allocate_fixed_ip(self, context, instance_id, network, **kwargs):
"""Gets a fixed ip from the pool."""
if kwargs.get('vpn', None):
address = network['vpn_private_address']
self.db.fixed_ip_associate(context,
address,
instance_id,
reserved=True)
else:
address = kwargs.get('address', None)
if address:
address = self.db.fixed_ip_associate(context, address,
instance_id,
network['id'])
else:
address = self.db.fixed_ip_associate_pool(context,
network['id'],
instance_id)
self._do_trigger_security_group_members_refresh_for_instance(
instance_id)
vif = self.db.virtual_interface_get_by_instance_and_network(context,
instance_id,
network['id'])
values = {'allocated': True,
'virtual_interface_id': vif['id']}
self.db.fixed_ip_update(context, address, values)
self._setup_network(context, network)
return address
def add_network_to_project(self, context, project_id):
"""Force adds another network to a project."""
self.db.network_associate(context, project_id, force=True)
def _get_networks_for_instance(self, context, instance_id, project_id,
requested_networks=None):
"""Determine which networks an instance should connect to."""
# get networks associated with project
if requested_networks is not None and len(requested_networks) != 0:
network_uuids = [uuid for (uuid, fixed_ip) in requested_networks]
networks = self.db.network_get_all_by_uuids(context,
network_uuids,
project_id)
else:
networks = self.db.project_get_networks(context, project_id)
return networks
def create_networks(self, context, **kwargs):
"""Create networks based on parameters."""
# Check that num_networks + vlan_start is not > 4094, fixes lp708025
if kwargs['num_networks'] + kwargs['vlan_start'] > 4094:
raise ValueError(_('The sum between the number of networks and'
' the vlan start cannot be greater'
' than 4094'))
# check that num networks and network size fits in fixed_net
fixed_net = netaddr.IPNetwork(kwargs['cidr'])
if len(fixed_net) < kwargs['num_networks'] * kwargs['network_size']:
raise ValueError(_('The network range is not big enough to fit '
'%(num_networks)s. Network size is %(network_size)s') %
kwargs)
NetworkManager.create_networks(self, context, vpn=True, **kwargs)
def _setup_network(self, context, network_ref):
"""Sets up network on this host."""
if not network_ref['vpn_public_address']:
net = {}
address = FLAGS.vpn_ip
net['vpn_public_address'] = address
network_ref = db.network_update(context, network_ref['id'], net)
else:
address = network_ref['vpn_public_address']
network_ref['dhcp_server'] = self._get_dhcp_ip(context, network_ref)
mac_address = self.generate_mac_address()
dev = self.driver.plug(network_ref, mac_address)
self.driver.initialize_gateway_device(dev, network_ref)
# NOTE(vish): only ensure this forward if the address hasn't been set
# manually.
if address == FLAGS.vpn_ip and hasattr(self.driver,
"ensure_vpn_forward"):
self.driver.ensure_vpn_forward(FLAGS.vpn_ip,
network_ref['vpn_public_port'],
network_ref['vpn_private_address'])
if not FLAGS.fake_network:
self.driver.update_dhcp(context, dev, network_ref)
if(FLAGS.use_ipv6):
self.driver.update_ra(context, dev, network_ref)
gateway = utils.get_my_linklocal(dev)
self.db.network_update(context, network_ref['id'],
{'gateway_v6': gateway})
def _get_networks_by_uuids(self, context, network_uuids):
return self.db.network_get_all_by_uuids(context, network_uuids,
context.project_id)
@property
def _bottom_reserved_ips(self):
"""Number of reserved ips at the bottom of the range."""
return super(VlanManager, self)._bottom_reserved_ips + 1 # vpn server
@property
def _top_reserved_ips(self):
"""Number of reserved ips at the top of the range."""
parent_reserved = super(VlanManager, self)._top_reserved_ips
return parent_reserved + FLAGS.cnt_vpn_clients
| xushiwei/nova | nova/network/manager.py | Python | apache-2.0 | 51,086 |
<?xml version="1.0" encoding="utf-8"?>
<!DOCTYPE TS>
<TS version="2.1" language="fr_FR">
<context>
<name>IotikActivator</name>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/dialogs/iotikActivator.ui" line="37"/>
<source>Activation</source>
<translation type="unfinished"></translation>
</message>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/dialogs/iotikActivator.ui" line="60"/>
<source>Key:</source>
<translation>La clé:</translation>
</message>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/dialogs/iotikActivator.ui" line="90"/>
<source>Activate</source>
<translation>Activer</translation>
</message>
</context>
<context>
<name>iotik::IotikActivator</name>
<message>
<source>Activator is not released yet</source>
<translation type="vanished">Activator n'est pas encore sorti</translation>
</message>
</context>
<context>
<name>iotik::ruc::IotikRuCGeneratorPluginBase</name>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/src/iotikRuCGeneratorPluginBase.cpp" line="64"/>
<source>Generate RuC code</source>
<translation>Génère le code RuC</translation>
</message>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/src/iotikRuCGeneratorPluginBase.cpp" line="70"/>
<source>Upload program by USB</source>
<translation>Télécharger le programme par USB</translation>
</message>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/src/iotikRuCGeneratorPluginBase.cpp" line="76"/>
<source>Upload program by Wi-Fi</source>
<translation>Télécharger le programme par Wi-Fi</translation>
</message>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/src/iotikRuCGeneratorPluginBase.cpp" line="85"/>
<source>Аctivate IoTik 32 v2.0</source>
<translation>Аctiver IoTik 32 v2.0</translation>
</message>
<message>
<source>Аctivate IoTik v2.0</source>
<translation type="vanished">Аctiver IoTik v2.0</translation>
</message>
<message>
<source>Аctivate IoTik v1.0</source>
<translation type="vanished">Аctiver IoTik v1.0</translation>
</message>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/src/iotikRuCGeneratorPluginBase.cpp" line="100"/>
<source>Generate RuC Code</source>
<translation>Génère le code RuC</translation>
</message>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/src/iotikRuCGeneratorPluginBase.cpp" line="103"/>
<source>Upload RuC Program by USB</source>
<translation>Télécharger le programme RuC par USB</translation>
</message>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/src/iotikRuCGeneratorPluginBase.cpp" line="106"/>
<source>Upload RuC Program by Wi-Fi</source>
<translation>Télécharger le programme RuC par Wi-Fi</translation>
</message>
<message>
<source>Activator is not released yet</source>
<translation type="vanished">Activator n'est pas encore sorti</translation>
</message>
<message>
<source>Wi-Fi is not released yet</source>
<translation type="vanished">Le Wi-Fi n'est pas encore disponible</translation>
</message>
<message>
<location filename="../../../../plugins/robots/generators/iotik/iotikRuCGeneratorLibrary/src/iotikRuCGeneratorPluginBase.cpp" line="206"/>
<source>Code compiling failed, aborting</source>
<translation>La compilation du code a échoué, a annulé</translation>
</message>
</context>
</TS>
| Victor-Y-Fadeev/qreal | qrtranslations/fr/plugins/robots/iotikRuCGeneratorLibrary_fr.ts | TypeScript | apache-2.0 | 4,171 |
package ru.qatools.school.twister.web.elements;
import org.openqa.selenium.support.FindBy;
import ru.yandex.qatools.htmlelements.element.Button;
import ru.yandex.qatools.htmlelements.element.HtmlElement;
import ru.yandex.qatools.htmlelements.element.TextBlock;
import ru.yandex.qatools.htmlelements.element.TextInput;
/**
* Created by dima on 25.01.15.
*/
public class ProfileForm extends HtmlElement {
@FindBy(id = "username")
private TextBlock userName;
public TextBlock getUserName() {
return userName;
}
}
| autoschool/twister | src/test/java/ru/qatools/school/twister/web/elements/ProfileForm.java | Java | apache-2.0 | 540 |
/*
* Copyright © 2009 HotPads (admin@hotpads.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.datarouter.batchsizeoptimizer.job;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.inject.Inject;
import io.datarouter.batchsizeoptimizer.storage.performancerecord.DatarouterOpPerformanceRecordDao;
import io.datarouter.batchsizeoptimizer.storage.performancerecord.OpPerformanceRecord;
import io.datarouter.batchsizeoptimizer.storage.performancerecord.OpPerformanceRecordKey;
import io.datarouter.instrumentation.task.TaskTracker;
import io.datarouter.job.BaseJob;
import io.datarouter.scanner.Scanner;
public class OpPerformanceRecordAggregationJob extends BaseJob{
private static final int BATCH_SIZE = 1000;
@Inject
private DatarouterOpPerformanceRecordDao opPerformanceRecordDao;
@Override
public void run(TaskTracker tracker){
String currentOpName = null;
List<OpPerformanceRecordKey> recordsToDelete = new ArrayList<>(BATCH_SIZE);
Map<Integer,AggregatedRecord> aggregatedRecordsByBatchSize = new HashMap<>();
for(OpPerformanceRecord record : opPerformanceRecordDao.scan().iterable()){
if(currentOpName != null && !currentOpName.equals(record.getKey().getOpName())){
saveAggregatedRecord(aggregatedRecordsByBatchSize);
aggregatedRecordsByBatchSize.clear();
}
currentOpName = record.getKey().getOpName();
aggregatedRecordsByBatchSize.computeIfAbsent(
record.getBatchSize(),
$ -> new AggregatedRecord(record.getKey().getOpName(), record.getBatchSize())).addRecord(record);
recordsToDelete.add(record.getKey());
if(recordsToDelete.size() > BATCH_SIZE){
opPerformanceRecordDao.deleteMulti(new ArrayList<>(recordsToDelete));
recordsToDelete.clear();
}
}
saveAggregatedRecord(aggregatedRecordsByBatchSize);
opPerformanceRecordDao.deleteMulti(recordsToDelete);
}
private void saveAggregatedRecord(Map<Integer,AggregatedRecord> aggregatedRecordsByBatchSize){
Scanner.of(aggregatedRecordsByBatchSize.values())
.map(AggregatedRecord::buildOpPerformanceRecord)
.flush(opPerformanceRecordDao::putMulti);
}
private static class AggregatedRecord{
private String opName;
private Integer batchSize;
private long timeSpent;
private long rowCount;
private AggregatedRecord(String opName, Integer batchSize){
this.opName = opName;
this.batchSize = batchSize;
this.timeSpent = 0;
this.rowCount = 0;
}
private void addRecord(OpPerformanceRecord record){
timeSpent += record.getTimeSpent();
rowCount += record.getRowCount();
}
private OpPerformanceRecord buildOpPerformanceRecord(){
return new OpPerformanceRecord(opName, batchSize, rowCount, timeSpent);
}
}
}
| hotpads/datarouter | datarouter-batch-size-optimizer/src/main/java/io/datarouter/batchsizeoptimizer/job/OpPerformanceRecordAggregationJob.java | Java | apache-2.0 | 3,269 |
using AugmentedSzczecin.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Windows.UI.Xaml;
using Windows.UI.Xaml.Data;
namespace AugmentedSzczecin.Converters
{
public class OpeningToVisibilityConverter : IValueConverter
{
public object Convert(object value, Type targetType, object parameter, string language)
{
Opening [] information = (Opening [])value;
if(information == null)
{
return Visibility.Collapsed;
}
return Visibility.Visible;
}
public object ConvertBack(object value, Type targetType, object parameter, string language)
{
throw new NotImplementedException();
}
}
}
| blstream/AugmentedSzczecin_WP | AugmentedSzczecin/AugmentedSzczecin/Converters/OpeningToVisibilityConverter.cs | C# | apache-2.0 | 813 |