code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 3 942 | language stringclasses 30
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
package org.hyperimage.connector.fedora3.ws;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlElementDecl;
import javax.xml.bind.annotation.XmlRegistry;
import javax.xml.namespace.QName;
/**
* This object contains factory methods for each
* Java content interface and Java element interface
* generated in the org.hyperimage.connector.fedora3.ws package.
* <p>An ObjectFactory allows you to programatically
* construct new instances of the Java representation
* for XML content. The Java representation of XML
* content can consist of schema derived interfaces
* and classes representing the binding of schema
* type definitions, element declarations and model
* groups. Factory methods for each of these are
* provided in this class.
*
*/
@XmlRegistry
public class ObjectFactory {
private final static QName _AssetURN_QNAME = new QName("http://connector.ws.hyperimage.org/", "assetURN");
private final static QName _Token_QNAME = new QName("http://connector.ws.hyperimage.org/", "token");
private final static QName _GetAssetPreviewDataResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetPreviewDataResponse");
private final static QName _ParentURN_QNAME = new QName("http://connector.ws.hyperimage.org/", "parentURN");
private final static QName _Username_QNAME = new QName("http://connector.ws.hyperimage.org/", "username");
private final static QName _GetAssetData_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetData");
private final static QName _GetAssetPreviewData_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetPreviewData");
private final static QName _GetHierarchyLevelResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getHierarchyLevelResponse");
private final static QName _Authenticate_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "authenticate");
private final static QName _HIWSLoggedException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSLoggedException");
private final static QName _GetMetadataRecord_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getMetadataRecord");
private final static QName _HIWSNotBinaryException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSNotBinaryException");
private final static QName _Session_QNAME = new QName("http://connector.ws.hyperimage.org/", "session");
private final static QName _HIWSDCMetadataException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSDCMetadataException");
private final static QName _HIWSAuthException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSAuthException");
private final static QName _HIWSAssetNotFoundException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSAssetNotFoundException");
private final static QName _GetWSVersion_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getWSVersion");
private final static QName _GetMetadataRecordResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getMetadataRecordResponse");
private final static QName _HIWSUTF8EncodingException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSUTF8EncodingException");
private final static QName _GetWSVersionResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getWSVersionResponse");
private final static QName _GetReposInfo_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getReposInfo");
private final static QName _HIWSXMLParserException_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "HIWSXMLParserException");
private final static QName _AuthenticateResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "authenticateResponse");
private final static QName _GetAssetDataResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getAssetDataResponse");
private final static QName _GetHierarchyLevel_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getHierarchyLevel");
private final static QName _GetReposInfoResponse_QNAME = new QName("http://fedora3.connector.hyperimage.org/", "getReposInfoResponse");
private final static QName _GetAssetPreviewDataResponseReturn_QNAME = new QName("", "return");
/**
* Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: org.hyperimage.connector.fedora3.ws
*
*/
public ObjectFactory() {
}
/**
* Create an instance of {@link HIWSDCMetadataException }
*
*/
public HIWSDCMetadataException createHIWSDCMetadataException() {
return new HIWSDCMetadataException();
}
/**
* Create an instance of {@link GetAssetDataResponse }
*
*/
public GetAssetDataResponse createGetAssetDataResponse() {
return new GetAssetDataResponse();
}
/**
* Create an instance of {@link HIWSAuthException }
*
*/
public HIWSAuthException createHIWSAuthException() {
return new HIWSAuthException();
}
/**
* Create an instance of {@link HIWSAssetNotFoundException }
*
*/
public HIWSAssetNotFoundException createHIWSAssetNotFoundException() {
return new HIWSAssetNotFoundException();
}
/**
* Create an instance of {@link HIWSNotBinaryException }
*
*/
public HIWSNotBinaryException createHIWSNotBinaryException() {
return new HIWSNotBinaryException();
}
/**
* Create an instance of {@link GetHierarchyLevelResponse }
*
*/
public GetHierarchyLevelResponse createGetHierarchyLevelResponse() {
return new GetHierarchyLevelResponse();
}
/**
* Create an instance of {@link Authenticate }
*
*/
public Authenticate createAuthenticate() {
return new Authenticate();
}
/**
* Create an instance of {@link HiHierarchyLevel }
*
*/
public HiHierarchyLevel createHiHierarchyLevel() {
return new HiHierarchyLevel();
}
/**
* Create an instance of {@link HIWSLoggedException }
*
*/
public HIWSLoggedException createHIWSLoggedException() {
return new HIWSLoggedException();
}
/**
* Create an instance of {@link GetHierarchyLevel }
*
*/
public GetHierarchyLevel createGetHierarchyLevel() {
return new GetHierarchyLevel();
}
/**
* Create an instance of {@link AuthenticateResponse }
*
*/
public AuthenticateResponse createAuthenticateResponse() {
return new AuthenticateResponse();
}
/**
* Create an instance of {@link GetReposInfoResponse }
*
*/
public GetReposInfoResponse createGetReposInfoResponse() {
return new GetReposInfoResponse();
}
/**
* Create an instance of {@link GetAssetPreviewDataResponse }
*
*/
public GetAssetPreviewDataResponse createGetAssetPreviewDataResponse() {
return new GetAssetPreviewDataResponse();
}
/**
* Create an instance of {@link GetWSVersion }
*
*/
public GetWSVersion createGetWSVersion() {
return new GetWSVersion();
}
/**
* Create an instance of {@link GetMetadataRecordResponse }
*
*/
public GetMetadataRecordResponse createGetMetadataRecordResponse() {
return new GetMetadataRecordResponse();
}
/**
* Create an instance of {@link HiMetadataRecord }
*
*/
public HiMetadataRecord createHiMetadataRecord() {
return new HiMetadataRecord();
}
/**
* Create an instance of {@link HiTypedDatastream }
*
*/
public HiTypedDatastream createHiTypedDatastream() {
return new HiTypedDatastream();
}
/**
* Create an instance of {@link HIWSXMLParserException }
*
*/
public HIWSXMLParserException createHIWSXMLParserException() {
return new HIWSXMLParserException();
}
/**
* Create an instance of {@link GetMetadataRecord }
*
*/
public GetMetadataRecord createGetMetadataRecord() {
return new GetMetadataRecord();
}
/**
* Create an instance of {@link GetAssetPreviewData }
*
*/
public GetAssetPreviewData createGetAssetPreviewData() {
return new GetAssetPreviewData();
}
/**
* Create an instance of {@link HIWSUTF8EncodingException }
*
*/
public HIWSUTF8EncodingException createHIWSUTF8EncodingException() {
return new HIWSUTF8EncodingException();
}
/**
* Create an instance of {@link GetReposInfo }
*
*/
public GetReposInfo createGetReposInfo() {
return new GetReposInfo();
}
/**
* Create an instance of {@link GetWSVersionResponse }
*
*/
public GetWSVersionResponse createGetWSVersionResponse() {
return new GetWSVersionResponse();
}
/**
* Create an instance of {@link GetAssetData }
*
*/
public GetAssetData createGetAssetData() {
return new GetAssetData();
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "assetURN")
public JAXBElement<String> createAssetURN(String value) {
return new JAXBElement<String>(_AssetURN_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "token")
public JAXBElement<String> createToken(String value) {
return new JAXBElement<String>(_Token_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetAssetPreviewDataResponse }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetPreviewDataResponse")
public JAXBElement<GetAssetPreviewDataResponse> createGetAssetPreviewDataResponse(GetAssetPreviewDataResponse value) {
return new JAXBElement<GetAssetPreviewDataResponse>(_GetAssetPreviewDataResponse_QNAME, GetAssetPreviewDataResponse.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "parentURN")
public JAXBElement<String> createParentURN(String value) {
return new JAXBElement<String>(_ParentURN_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "username")
public JAXBElement<String> createUsername(String value) {
return new JAXBElement<String>(_Username_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetAssetData }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetData")
public JAXBElement<GetAssetData> createGetAssetData(GetAssetData value) {
return new JAXBElement<GetAssetData>(_GetAssetData_QNAME, GetAssetData.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetAssetPreviewData }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetPreviewData")
public JAXBElement<GetAssetPreviewData> createGetAssetPreviewData(GetAssetPreviewData value) {
return new JAXBElement<GetAssetPreviewData>(_GetAssetPreviewData_QNAME, GetAssetPreviewData.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetHierarchyLevelResponse }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getHierarchyLevelResponse")
public JAXBElement<GetHierarchyLevelResponse> createGetHierarchyLevelResponse(GetHierarchyLevelResponse value) {
return new JAXBElement<GetHierarchyLevelResponse>(_GetHierarchyLevelResponse_QNAME, GetHierarchyLevelResponse.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link Authenticate }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "authenticate")
public JAXBElement<Authenticate> createAuthenticate(Authenticate value) {
return new JAXBElement<Authenticate>(_Authenticate_QNAME, Authenticate.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link HIWSLoggedException }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSLoggedException")
public JAXBElement<HIWSLoggedException> createHIWSLoggedException(HIWSLoggedException value) {
return new JAXBElement<HIWSLoggedException>(_HIWSLoggedException_QNAME, HIWSLoggedException.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetMetadataRecord }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getMetadataRecord")
public JAXBElement<GetMetadataRecord> createGetMetadataRecord(GetMetadataRecord value) {
return new JAXBElement<GetMetadataRecord>(_GetMetadataRecord_QNAME, GetMetadataRecord.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link HIWSNotBinaryException }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSNotBinaryException")
public JAXBElement<HIWSNotBinaryException> createHIWSNotBinaryException(HIWSNotBinaryException value) {
return new JAXBElement<HIWSNotBinaryException>(_HIWSNotBinaryException_QNAME, HIWSNotBinaryException.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link String }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://connector.ws.hyperimage.org/", name = "session")
public JAXBElement<String> createSession(String value) {
return new JAXBElement<String>(_Session_QNAME, String.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link HIWSDCMetadataException }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSDCMetadataException")
public JAXBElement<HIWSDCMetadataException> createHIWSDCMetadataException(HIWSDCMetadataException value) {
return new JAXBElement<HIWSDCMetadataException>(_HIWSDCMetadataException_QNAME, HIWSDCMetadataException.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link HIWSAuthException }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSAuthException")
public JAXBElement<HIWSAuthException> createHIWSAuthException(HIWSAuthException value) {
return new JAXBElement<HIWSAuthException>(_HIWSAuthException_QNAME, HIWSAuthException.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link HIWSAssetNotFoundException }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSAssetNotFoundException")
public JAXBElement<HIWSAssetNotFoundException> createHIWSAssetNotFoundException(HIWSAssetNotFoundException value) {
return new JAXBElement<HIWSAssetNotFoundException>(_HIWSAssetNotFoundException_QNAME, HIWSAssetNotFoundException.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetWSVersion }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getWSVersion")
public JAXBElement<GetWSVersion> createGetWSVersion(GetWSVersion value) {
return new JAXBElement<GetWSVersion>(_GetWSVersion_QNAME, GetWSVersion.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetMetadataRecordResponse }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getMetadataRecordResponse")
public JAXBElement<GetMetadataRecordResponse> createGetMetadataRecordResponse(GetMetadataRecordResponse value) {
return new JAXBElement<GetMetadataRecordResponse>(_GetMetadataRecordResponse_QNAME, GetMetadataRecordResponse.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link HIWSUTF8EncodingException }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSUTF8EncodingException")
public JAXBElement<HIWSUTF8EncodingException> createHIWSUTF8EncodingException(HIWSUTF8EncodingException value) {
return new JAXBElement<HIWSUTF8EncodingException>(_HIWSUTF8EncodingException_QNAME, HIWSUTF8EncodingException.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetWSVersionResponse }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getWSVersionResponse")
public JAXBElement<GetWSVersionResponse> createGetWSVersionResponse(GetWSVersionResponse value) {
return new JAXBElement<GetWSVersionResponse>(_GetWSVersionResponse_QNAME, GetWSVersionResponse.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetReposInfo }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getReposInfo")
public JAXBElement<GetReposInfo> createGetReposInfo(GetReposInfo value) {
return new JAXBElement<GetReposInfo>(_GetReposInfo_QNAME, GetReposInfo.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link HIWSXMLParserException }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "HIWSXMLParserException")
public JAXBElement<HIWSXMLParserException> createHIWSXMLParserException(HIWSXMLParserException value) {
return new JAXBElement<HIWSXMLParserException>(_HIWSXMLParserException_QNAME, HIWSXMLParserException.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link AuthenticateResponse }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "authenticateResponse")
public JAXBElement<AuthenticateResponse> createAuthenticateResponse(AuthenticateResponse value) {
return new JAXBElement<AuthenticateResponse>(_AuthenticateResponse_QNAME, AuthenticateResponse.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetAssetDataResponse }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getAssetDataResponse")
public JAXBElement<GetAssetDataResponse> createGetAssetDataResponse(GetAssetDataResponse value) {
return new JAXBElement<GetAssetDataResponse>(_GetAssetDataResponse_QNAME, GetAssetDataResponse.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetHierarchyLevel }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getHierarchyLevel")
public JAXBElement<GetHierarchyLevel> createGetHierarchyLevel(GetHierarchyLevel value) {
return new JAXBElement<GetHierarchyLevel>(_GetHierarchyLevel_QNAME, GetHierarchyLevel.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link GetReposInfoResponse }{@code >}}
*
*/
@XmlElementDecl(namespace = "http://fedora3.connector.hyperimage.org/", name = "getReposInfoResponse")
public JAXBElement<GetReposInfoResponse> createGetReposInfoResponse(GetReposInfoResponse value) {
return new JAXBElement<GetReposInfoResponse>(_GetReposInfoResponse_QNAME, GetReposInfoResponse.class, null, value);
}
/**
* Create an instance of {@link JAXBElement }{@code <}{@link byte[]}{@code >}}
*
*/
@XmlElementDecl(namespace = "", name = "return", scope = GetAssetPreviewDataResponse.class)
public JAXBElement<byte[]> createGetAssetPreviewDataResponseReturn(byte[] value) {
return new JAXBElement<byte[]>(_GetAssetPreviewDataResponseReturn_QNAME, byte[].class, GetAssetPreviewDataResponse.class, ((byte[]) value));
}
}
| bitgilde/HyperImage3 | hi3-editor/src/org/hyperimage/connector/fedora3/ws/ObjectFactory.java | Java | apache-2.0 | 21,077 |
#
# Cookbook Name:: nxlog
# Recipe:: default
#
# Copyright (C) 2014 Simon Detheridge
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
include_recipe 'nxlog::default'
nxlog_destination 'test_om_file' do
file '/var/log/test.log'
end
nxlog_destination 'test_om_blocker' do
output_module 'om_blocker'
end
nxlog_destination 'test_om_dbi' do
output_module 'om_dbi'
driver 'mysql'
sql 'INSERT INTO log VALUES ($SyslogFacility, $SyslogSeverity, $Message)'
options ['host 127.0.0.1', 'username foo', 'password bar', 'dbname nxlog']
end
nxlog_destination 'test_om_exec' do
output_module 'om_exec'
command '/usr/bin/foo'
args %w(bar baz)
end
nxlog_destination 'test_om_https' do
output_module 'om_http'
url 'https://example.org/foo'
https_cert_file '%CERTDIR%/client-cert.pem'
https_cert_key_file '%CERTDIR%/client-key.pem'
https_ca_file '%CERTDIR%/ca.pem'
https_allow_untrusted false
end
nxlog_destination 'test_om_http' do
output_module 'om_http'
url 'http://example.org/bar'
end
nxlog_destination 'test_om_null' do
output_module 'om_null'
end
nxlog_destination 'test_om_ssl' do
output_module 'om_ssl'
port 1234
host 'foo.example.org'
cert_file '%CERTDIR%/client-cert.pem'
cert_key_file '%CERTDIR%/client-key.pem'
ca_file '%CERTDIR%/ca.pem'
allow_untrusted false
output_type 'Binary'
end
nxlog_destination 'test_om_tcp' do
output_module 'om_tcp'
port 1234
host 'foo.example.org'
end
nxlog_destination 'test_om_udp' do
output_module 'om_udp'
port 1234
host 'foo.example.org'
end
nxlog_destination 'test_om_uds' do
output_module 'om_uds'
exec 'parse_syslog_bsd(); to_syslog_bsd();'
uds '/dev/log'
end
| widgit-cookbooks/nxlog | test/fixtures/cookbooks/test/recipes/output_resources.rb | Ruby | apache-2.0 | 2,171 |
/* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.hbase;
import java.io.IOException;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.List;
import java.util.ListIterator;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.mapreduce.TableInputFormat;
import org.apache.hadoop.hbase.mapreduce.TableRecordReader;
import org.apache.hadoop.hbase.mapreduce.TableSplit;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.InputSplit;
public class HBaseTableInputFormat extends TableInputFormat {
private static final Log LOG = LogFactory.getLog(HBaseTableInputFormat.class);
protected final byte[] gt_;
protected final byte[] gte_;
protected final byte[] lt_;
protected final byte[] lte_;
public HBaseTableInputFormat() {
this(-1, null, null, null, null);
}
protected HBaseTableInputFormat(long limit, byte[] gt, byte[] gte, byte[] lt, byte[] lte) {
super();
setTableRecordReader(new HBaseTableRecordReader(limit));
gt_ = gt;
gte_ = gte;
lt_ = lt;
lte_ = lte;
}
public static class HBaseTableIFBuilder {
protected byte[] gt_;
protected byte[] gte_;
protected byte[] lt_;
protected byte[] lte_;
protected long limit_;
protected Configuration conf_;
public HBaseTableIFBuilder withGt(byte[] gt) { gt_ = gt; return this; }
public HBaseTableIFBuilder withGte(byte[] gte) { gte_ = gte; return this; }
public HBaseTableIFBuilder withLt(byte[] lt) { lt_ = lt; return this; }
public HBaseTableIFBuilder withLte(byte[] lte) { lte_ = lte; return this; }
public HBaseTableIFBuilder withLimit(long limit) { limit_ = limit; return this; }
public HBaseTableIFBuilder withConf(Configuration conf) { conf_ = conf; return this; }
public HBaseTableInputFormat build() {
HBaseTableInputFormat inputFormat = new HBaseTableInputFormat(limit_, gt_, gte_, lt_, lte_);
if (conf_ != null) inputFormat.setConf(conf_);
return inputFormat;
}
}
@Override
public List<InputSplit> getSplits(org.apache.hadoop.mapreduce.JobContext context)
throws IOException {
List<InputSplit> splits = super.getSplits(context);
ListIterator<InputSplit> splitIter = splits.listIterator();
while (splitIter.hasNext()) {
TableSplit split = (TableSplit) splitIter.next();
byte[] startKey = split.getStartRow();
byte[] endKey = split.getEndRow();
// Skip if the region doesn't satisfy configured options.
if ((skipRegion(CompareOp.LESS, startKey, lt_)) ||
(skipRegion(CompareOp.GREATER, endKey, gt_)) ||
(skipRegion(CompareOp.GREATER, endKey, gte_)) ||
(skipRegion(CompareOp.LESS_OR_EQUAL, startKey, lte_)) ) {
splitIter.remove();
}
}
return splits;
}
private boolean skipRegion(CompareOp op, byte[] key, byte[] option ) throws IOException {
if (key.length == 0 || option == null)
return false;
BinaryComparator comp = new BinaryComparator(option);
RowFilter rowFilter = new RowFilter(op, comp);
return rowFilter.filterRowKey(key, 0, key.length);
}
protected class HBaseTableRecordReader extends TableRecordReader {
private long recordsSeen = 0;
private final long limit_;
private byte[] startRow_;
private byte[] endRow_;
private transient byte[] currRow_;
private int maxRowLength;
private BigInteger bigStart_;
private BigInteger bigEnd_;
private BigDecimal bigRange_;
private transient float progressSoFar_ = 0;
public HBaseTableRecordReader(long limit) {
limit_ = limit;
}
@Override
public void setScan(Scan scan) {
super.setScan(scan);
startRow_ = scan.getStartRow();
endRow_ = scan.getStopRow();
byte[] startPadded;
byte[] endPadded;
if (startRow_.length < endRow_.length) {
startPadded = Bytes.padTail(startRow_, endRow_.length - startRow_.length);
endPadded = endRow_;
} else if (endRow_.length < startRow_.length) {
startPadded = startRow_;
endPadded = Bytes.padTail(endRow_, startRow_.length - endRow_.length);
} else {
startPadded = startRow_;
endPadded = endRow_;
}
currRow_ = startRow_;
byte [] prependHeader = {1, 0};
bigStart_ = new BigInteger(Bytes.add(prependHeader, startPadded));
bigEnd_ = new BigInteger(Bytes.add(prependHeader, endPadded));
bigRange_ = new BigDecimal(bigEnd_.subtract(bigStart_));
maxRowLength = endRow_.length > startRow_.length ? endRow_.length : startRow_.length;
LOG.info("setScan with ranges: " + bigStart_ + " - " + bigEnd_ + " ( " + bigRange_ + ")");
}
@Override
public boolean nextKeyValue() throws IOException, InterruptedException {
if (limit_ > 0 && ++recordsSeen > limit_) {
return false;
}
boolean hasMore = super.nextKeyValue();
if (hasMore) {
currRow_ = getCurrentKey().get();
}
return hasMore;
}
@Override
public float getProgress() {
if (currRow_ == null || currRow_.length == 0 || endRow_.length == 0 || endRow_ == HConstants.LAST_ROW) {
return 0;
}
byte[] lastPadded = currRow_;
if(maxRowLength > currRow_.length) {
lastPadded = Bytes.padTail(currRow_, maxRowLength - currRow_.length);
}
byte [] prependHeader = {1, 0};
BigInteger bigLastRow = new BigInteger(Bytes.add(prependHeader, lastPadded));
if (bigLastRow.compareTo(bigEnd_) > 0) {
return progressSoFar_;
}
BigDecimal processed = new BigDecimal(bigLastRow.subtract(bigStart_));
try {
BigDecimal progress = processed.setScale(3).divide(bigRange_, BigDecimal.ROUND_HALF_DOWN);
progressSoFar_ = progress.floatValue();
return progressSoFar_;
} catch (java.lang.ArithmeticException e) {
return 0;
}
}
}
}
| apache/pig | src/org/apache/pig/backend/hadoop/hbase/HBaseTableInputFormat.java | Java | apache-2.0 | 7,754 |
// Copyright (c) 2015 Alachisoft
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
namespace Alachisoft.NCache.Web.Command
{
internal sealed class CommandOptions
{
private CommandOptions() { }
internal const string EXC_INITIAL = "EXCEPTION";
}
}
| modulexcite/NCache | Src/NCWebCache/Web/RemoteClient/Command/CommandOptions.cs | C# | apache-2.0 | 771 |
package app
import (
"net/http"
"time"
"golang.org/x/net/context"
"github.com/weaveworks/scope/probe/host"
"github.com/weaveworks/scope/report"
)
// Raw report handler
func makeRawReportHandler(rep Reporter) CtxHandlerFunc {
return func(ctx context.Context, w http.ResponseWriter, r *http.Request) {
report, err := rep.Report(ctx, time.Now())
if err != nil {
respondWith(w, http.StatusInternalServerError, err)
return
}
respondWith(w, http.StatusOK, report)
}
}
type probeDesc struct {
ID string `json:"id"`
Hostname string `json:"hostname"`
Version string `json:"version"`
LastSeen time.Time `json:"lastSeen"`
}
// Probe handler
func makeProbeHandler(rep Reporter) CtxHandlerFunc {
return func(ctx context.Context, w http.ResponseWriter, r *http.Request) {
r.ParseForm()
if _, sparse := r.Form["sparse"]; sparse {
// if we have reports, we must have connected probes
hasProbes, err := rep.HasReports(ctx, time.Now())
if err != nil {
respondWith(w, http.StatusInternalServerError, err)
}
respondWith(w, http.StatusOK, hasProbes)
return
}
rpt, err := rep.Report(ctx, time.Now())
if err != nil {
respondWith(w, http.StatusInternalServerError, err)
return
}
result := []probeDesc{}
for _, n := range rpt.Host.Nodes {
id, _ := n.Latest.Lookup(report.ControlProbeID)
hostname, _ := n.Latest.Lookup(host.HostName)
version, dt, _ := n.Latest.LookupEntry(host.ScopeVersion)
result = append(result, probeDesc{
ID: id,
Hostname: hostname,
Version: version,
LastSeen: dt,
})
}
respondWith(w, http.StatusOK, result)
}
}
| kinvolk/scope | app/api_report.go | GO | apache-2.0 | 1,643 |
/**
* Copyright (c) 2016-present, RxJava Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
* the License for the specific language governing permissions and limitations under the License.
*/
package io.reactivex.internal.operators.maybe;
import io.reactivex.*;
import io.reactivex.disposables.Disposable;
import io.reactivex.internal.disposables.DisposableHelper;
import io.reactivex.internal.fuseable.HasUpstreamMaybeSource;
import java.util.concurrent.atomic.AtomicReference;
/**
* Subscribes to the other source if the main source is empty.
*
* @param <T> the value type
*/
public final class MaybeSwitchIfEmptySingle<T> extends Single<T> implements HasUpstreamMaybeSource<T> {
final MaybeSource<T> source;
final SingleSource<? extends T> other;
public MaybeSwitchIfEmptySingle(MaybeSource<T> source, SingleSource<? extends T> other) {
this.source = source;
this.other = other;
}
@Override
public MaybeSource<T> source() {
return source;
}
@Override
protected void subscribeActual(SingleObserver<? super T> observer) {
source.subscribe(new SwitchIfEmptyMaybeObserver<T>(observer, other));
}
static final class SwitchIfEmptyMaybeObserver<T>
extends AtomicReference<Disposable>
implements MaybeObserver<T>, Disposable {
private static final long serialVersionUID = 4603919676453758899L;
final SingleObserver<? super T> downstream;
final SingleSource<? extends T> other;
SwitchIfEmptyMaybeObserver(SingleObserver<? super T> actual, SingleSource<? extends T> other) {
this.downstream = actual;
this.other = other;
}
@Override
public void dispose() {
DisposableHelper.dispose(this);
}
@Override
public boolean isDisposed() {
return DisposableHelper.isDisposed(get());
}
@Override
public void onSubscribe(Disposable d) {
if (DisposableHelper.setOnce(this, d)) {
downstream.onSubscribe(this);
}
}
@Override
public void onSuccess(T value) {
downstream.onSuccess(value);
}
@Override
public void onError(Throwable e) {
downstream.onError(e);
}
@Override
public void onComplete() {
Disposable d = get();
if (d != DisposableHelper.DISPOSED) {
if (compareAndSet(d, null)) {
other.subscribe(new OtherSingleObserver<T>(downstream, this));
}
}
}
static final class OtherSingleObserver<T> implements SingleObserver<T> {
final SingleObserver<? super T> downstream;
final AtomicReference<Disposable> parent;
OtherSingleObserver(SingleObserver<? super T> actual, AtomicReference<Disposable> parent) {
this.downstream = actual;
this.parent = parent;
}
@Override
public void onSubscribe(Disposable d) {
DisposableHelper.setOnce(parent, d);
}
@Override
public void onSuccess(T value) {
downstream.onSuccess(value);
}
@Override
public void onError(Throwable e) {
downstream.onError(e);
}
}
}
} | NiteshKant/RxJava | src/main/java/io/reactivex/internal/operators/maybe/MaybeSwitchIfEmptySingle.java | Java | apache-2.0 | 3,840 |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.plugin.configrepo.contract.material;
import com.google.gson.JsonObject;
import com.thoughtworks.go.plugin.configrepo.contract.AbstractCRTest;
import org.junit.jupiter.api.Test;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.is;
public class CRConfigMaterialTest extends AbstractCRTest<CRConfigMaterial> {
private final CRConfigMaterial named;
private final CRConfigMaterial namedDest;
private final CRConfigMaterial materialWithIgnores;
private final CRConfigMaterial invalidList;
public CRConfigMaterialTest() {
named = new CRConfigMaterial("primary", null,null);
namedDest = new CRConfigMaterial("primary", "folder",null);
List<String> patterns = new ArrayList<>();
patterns.add("externals");
patterns.add("tools");
materialWithIgnores = new CRConfigMaterial("primary", "folder",new CRFilter(patterns,false));
CRFilter badFilter = new CRFilter(patterns,false);
badFilter.setIncludesNoCheck(patterns);
invalidList = new CRConfigMaterial("primary", "folder",badFilter);
}
@Override
public void addGoodExamples(Map<String, CRConfigMaterial> examples) {
examples.put("namedExample", named);
examples.put("namedDest", namedDest);
examples.put("ignoreFilter", materialWithIgnores);
}
@Override
public void addBadExamples(Map<String, CRConfigMaterial> examples) {
examples.put("invalidList",invalidList);
}
@Test
public void shouldAppendTypeFieldWhenSerializingMaterials()
{
CRMaterial value = named;
JsonObject jsonObject = (JsonObject)gson.toJsonTree(value);
assertThat(jsonObject.get("type").getAsString(), is(CRConfigMaterial.TYPE_NAME));
}
@Test
public void shouldHandlePolymorphismWhenDeserializing()
{
CRMaterial value = named;
String json = gson.toJson(value);
CRConfigMaterial deserializedValue = (CRConfigMaterial)gson.fromJson(json,CRMaterial.class);
assertThat("Deserialized value should equal to value before serialization",
deserializedValue,is(value));
}
}
| gocd/gocd | plugin-infra/go-plugin-config-repo/src/test/java/com/thoughtworks/go/plugin/configrepo/contract/material/CRConfigMaterialTest.java | Java | apache-2.0 | 2,880 |
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
# Autogenerated By : src/main/python/generator/generator.py
# Autogenerated From : scripts/builtin/garch.dml
from typing import Dict, Iterable
from systemds.operator import OperationNode, Matrix, Frame, List, MultiReturn, Scalar
from systemds.script_building.dag import OutputType
from systemds.utils.consts import VALID_INPUT_TYPES
def garch(X: Matrix,
kmax: int,
momentum: float,
start_stepsize: float,
end_stepsize: float,
start_vicinity: float,
end_vicinity: float,
sim_seed: int,
verbose: bool):
"""
:param X: The input Matrix to apply Arima on.
:param kmax: Number of iterations
:param momentum: Momentum for momentum-gradient descent (set to 0 to deactivate)
:param start_stepsize: Initial gradient-descent stepsize
:param end_stepsize: gradient-descent stepsize at end (linear descent)
:param start_vicinity: proportion of randomness of restart-location for gradient descent at beginning
:param end_vicinity: same at end (linear decay)
:param sim_seed: seed for simulation of process on fitted coefficients
:param verbose: verbosity, comments during fitting
:return: 'OperationNode' containing simulated garch(1,1) process on fitted coefficients & variances of simulated fitted process & constant term of fitted process & 1-st arch-coefficient of fitted process & 1-st garch-coefficient of fitted process & drawbacks: slow convergence of optimization (sort of simulated annealing/gradient descent)
"""
params_dict = {'X': X, 'kmax': kmax, 'momentum': momentum, 'start_stepsize': start_stepsize, 'end_stepsize': end_stepsize, 'start_vicinity': start_vicinity, 'end_vicinity': end_vicinity, 'sim_seed': sim_seed, 'verbose': verbose}
vX_0 = Matrix(X.sds_context, '')
vX_1 = Matrix(X.sds_context, '')
vX_2 = Scalar(X.sds_context, '')
vX_3 = Scalar(X.sds_context, '')
vX_4 = Scalar(X.sds_context, '')
output_nodes = [vX_0, vX_1, vX_2, vX_3, vX_4, ]
op = MultiReturn(X.sds_context, 'garch', output_nodes, named_input_nodes=params_dict)
vX_0._unnamed_input_nodes = [op]
vX_1._unnamed_input_nodes = [op]
vX_2._unnamed_input_nodes = [op]
vX_3._unnamed_input_nodes = [op]
vX_4._unnamed_input_nodes = [op]
return op
| apache/incubator-systemml | src/main/python/systemds/operator/algorithm/builtin/garch.py | Python | apache-2.0 | 3,230 |
+++
Title = "Cancellation"
Type = "event"
Description = "Statement on what's next"
+++
With a heavy heart, the organizers of Eindhoven would like to inform you that we have - at this time - decided to cancel devopsdays Eindhoven for 2020. We did not take this decision lightly, and we took a large number of variables into account. However, ultimately, the health and well-being of all of our attendees, speakers and organizers are our top priority.
### What does this mean for you?
For our attendees that have already bought tickets for the events, we will - of course - be fully refunding your purchase. We will be issuing a full refund of the ticket purchase to the form of payment you used when initially purchased the ticket. Our Event Management Solution, Eventbrite, will send you a confirmation email of the cancellation & refund. Please note that the refund may take up to 14 days to process.
### What if the situation changes?
When the current NL and global situation improves for the better in the months ahead, we will reassess the case, and if possible, put on an event later in the year.
### What's next
Although this means no first devopsdays this year for Eindhoven, the organizers of Amsterdam and Eindhoven are busy thinking of ways to bend with the times and create an informative, vibrant, and exciting event online. | gomex/devopsdays-web | content/events/2020-eindhoven/cancellation.md | Markdown | apache-2.0 | 1,351 |
package org.hl7.fhir.dstu3.model.codesystems;
/*
Copyright (c) 2011+, HL7, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of HL7 nor the names of its contributors may be used to
endorse or promote products derived from this software without specific
prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
*/
// Generated on Tue, Dec 6, 2016 09:42-0500 for FHIR v1.8.0
import org.hl7.fhir.dstu3.model.EnumFactory;
public class ObservationStatusEnumFactory implements EnumFactory<ObservationStatus> {
public ObservationStatus fromCode(String codeString) throws IllegalArgumentException {
if (codeString == null || "".equals(codeString))
return null;
if ("registered".equals(codeString))
return ObservationStatus.REGISTERED;
if ("preliminary".equals(codeString))
return ObservationStatus.PRELIMINARY;
if ("final".equals(codeString))
return ObservationStatus.FINAL;
if ("amended".equals(codeString))
return ObservationStatus.AMENDED;
if ("cancelled".equals(codeString))
return ObservationStatus.CANCELLED;
if ("entered-in-error".equals(codeString))
return ObservationStatus.ENTEREDINERROR;
if ("unknown".equals(codeString))
return ObservationStatus.UNKNOWN;
throw new IllegalArgumentException("Unknown ObservationStatus code '"+codeString+"'");
}
public String toCode(ObservationStatus code) {
if (code == ObservationStatus.REGISTERED)
return "registered";
if (code == ObservationStatus.PRELIMINARY)
return "preliminary";
if (code == ObservationStatus.FINAL)
return "final";
if (code == ObservationStatus.AMENDED)
return "amended";
if (code == ObservationStatus.CANCELLED)
return "cancelled";
if (code == ObservationStatus.ENTEREDINERROR)
return "entered-in-error";
if (code == ObservationStatus.UNKNOWN)
return "unknown";
return "?";
}
public String toSystem(ObservationStatus code) {
return code.getSystem();
}
}
| Gaduo/hapi-fhir | hapi-fhir-structures-dstu3/src/main/java/org/hl7/fhir/dstu3/model/codesystems/ObservationStatusEnumFactory.java | Java | apache-2.0 | 3,374 |
/*
* Copyright 2013 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.examples.nqueens.app;
import java.io.File;
import org.junit.Test;
import org.optaplanner.benchmark.api.PlannerBenchmarkException;
import org.optaplanner.benchmark.api.PlannerBenchmarkFactory;
import org.optaplanner.benchmark.config.PlannerBenchmarkConfig;
import org.optaplanner.examples.common.app.PlannerBenchmarkTest;
public class BrokenNQueensBenchmarkTest extends PlannerBenchmarkTest {
@Override
protected String createBenchmarkConfigResource() {
return "org/optaplanner/examples/nqueens/benchmark/nqueensBenchmarkConfig.xml";
}
@Override
protected PlannerBenchmarkFactory buildPlannerBenchmarkFactory(File unsolvedDataFile) {
PlannerBenchmarkFactory benchmarkFactory = super.buildPlannerBenchmarkFactory(unsolvedDataFile);
PlannerBenchmarkConfig benchmarkConfig = benchmarkFactory.getPlannerBenchmarkConfig();
benchmarkConfig.setWarmUpSecondsSpentLimit(0L);
benchmarkConfig.getInheritedSolverBenchmarkConfig().getSolverConfig().getTerminationConfig()
.setStepCountLimit(-100); // Intentionally crash the solver
return benchmarkFactory;
}
// ************************************************************************
// Tests
// ************************************************************************
@Test(timeout = 100000, expected = PlannerBenchmarkException.class)
public void benchmarkBroken8queens() {
runBenchmarkTest(new File("data/nqueens/unsolved/8queens.xml"));
}
}
| gsheldon/optaplanner | optaplanner-examples/src/test/java/org/optaplanner/examples/nqueens/app/BrokenNQueensBenchmarkTest.java | Java | apache-2.0 | 2,156 |
# raptor-sdk
The Raptor Java SDK is part of the raptor platform and used extensively in the codebase.
It can be reused as a standalone library in external application for a direct integration to the exposed API
- [Requirements](#requirements)
- [Setup](#Setup)
- [Authentication](#authentication)
- [Inventory](#inventory)
- [List devices](#list-devices)
- [Create a device](#create-a-device)
- [Update a device](#update-a-device)
- [Load a device](#load-a-device)
- [Search for devices](#search-for-devices)
- [Events notifications](#events-notifications)
- [Watch device events](#watch-device-events)
- [Watch data events](#watch-data-events)
- [Watch action events](#watch-device-action-events)
- [Stream](#stream)
- [Push data](#push-data)
- [Pull data](#pull-data)
- [Last update](#last-update)
- [Drop data](#drop-data)
- [Search for data](#search-for-data)
- [Search by time](#search-by-time)
- [Search by numeric range](#search-by-numeric-range)
- [Search by distance](#search-by-distance)
- [Search by bounding box](#search-by-bounding-box)
- [Action](#action)
- [Set status](#set-status)
- [Get status](#get-status)
- [Invoke an action](#invoke-an-action)
- [Profile](#profile)
- [Set a value](#set-a-value)
- [Get a value](#get-a-value)
- [Get all values](#get-all-values)
- [Tree](#tree)
- [Create a node](#create-a-node)
- [Create a device node](#create-a-device-node)
- [List trees](#list-trees)
- [Delete a node](#delete-a-node)
- [Admin](#admin)
## Requirements
- Java 8 or higher
- Maven
## Setup
Import in your project the `raptor-sdk` maven package.
## Authentication
Let's start by initializing a raptor client instance
```java
// login with username and password
Raptor raptor = new Raptor("http://raptor.local", "admin", "admin")
// alternatively, login with a token
// Raptor raptor = new Raptor("http://raptor.local", "..token..")
// login and retrieve a token
AuthClient.LoginState loginInfo = raptor.Auth().login();
log.debug("Welcome {} (token: {})", loginInfo.user.getUsername(), loginInfo.token);
// close the session and drop the login token
raptor.Auth().logout();
```
## Inventory
The inventory API store device definitions
### List devices
List devices owned by a user
```java
List<Device> list = raptor.Inventory().list();
log.debug("found {} devices", list.size());
```
### Create a device
Create a new device definition
```java
Device dev = new Device();
dev.name("test device")
.description("info about");
dev.properties().put("active", true);
dev.properties().put("cost", 15000L);
dev.properties().put("version", "4.0.0");
dev.validate();
raptor.Inventory().create(dev);
log.debug("Device created {}", dev.id());
```
### Update a device
Update a device definition
```java
// Create a data stream named ambient with a channel temperature of type number
Stream s = dev.addStream("ambient", "temperature", "number");
//Add further channels of different types
s.addChannel("info", "text")
s.addChannel("alarm", "boolean")
// add an action
Action a = dev.addAction("light-control");
raptor.Inventory().update(dev);
log.debug("Device updated: \n {}", dev.toJSON());
```
### Load a device
Load a device definition
```java
Device dev1 = raptor.Inventory().load(dev.id());
log.debug("Device loaded: \n {}", dev.toJSON());
```
### Search for devices
Search for device definitions
```java
DeviceQuery q = new DeviceQuery();
// all devices which name contains `test`
q.name.contains("test");
// and properties.version equals to 4.0.0
q.properties.has("version", "4.0.0");
log.debug("Searching for {}", q.toJSON().toString());
List<Device> results = raptor.Inventory().search(q);
log.debug("Results found {}", results.stream().map(d -> d.name()).collect(Collectors.toList()));
```
### Event notifications
When a device receive data, an action is triggered or the definition changes events are emitted over an asynchronous MQTT channel.
#### Watch device events
Device events are notified when a device definition changes
```java
raptor.Inventory().subscribe(dev, new DeviceCallback() {
@Override
public void callback(Device obj, DevicePayload message) {
log.debug("Device event received {}", message.toString());
}
});
```
#### Watch data events
Data events are generated when a stream is updated
```java
raptor.Inventory().subscribe(dev, new DataCallback() {
@Override
public void callback(Stream stream, RecordSet record) {
log.debug("dev: Data received {}", record.toJson());
}
});
```
#### Watch action events
Action events are generated when an action is triggered or the status changes
```java
raptor.Inventory().subscribe(dev, new ActionCallback() {
@Override
public void callback(Action action, ActionPayload payload) {
log.debug("dev: Data received for {}: {}",
payload.actionId,
payload.data
);
}
});
```
## Stream
The Stream API handles data push and retrieval
### Push data
Send data based on a stream definition
```java
Stream stream = dev.getStream("ambient")
RecordSet record = new RecordSet(stream)
.channel("temperature", 5)
.channel("info", "cold")
.channel("alarm", true)
.location(new GeoJsonPoint(11, 45))
.timestamp(Instant.now())
;
raptor.Stream().push(record)
```
### Pull data
Retrieve data
```java
// return 100 records from 10
int from = 10,
size = 100;
ResultSet results = raptor.Stream().pull(stream, from, size);
```
### Last update
Retrieve the last record sent based on the timestamp
```java
ResultSet results = raptor.Stream().lastUpdate(stream);
```
### Drop data
Removed the data stored in a stream
```java
raptor.Stream().delete(stream);
```
### Search for data
#### Search by time
Search for a range in the data timestamp
```java
Instant i = Instant.now()
DataQuery q = new DataQuery()
.timeRange(
i.plus(500, ChronoUnit.MILLIS),
i.plus(2500, ChronoUnit.MILLIS)
);
log.debug("Searching {}", q.toJSON().toString());
ResultSet results = raptor.Stream().search(stream, q);
```
#### Search by numeric range
Search for a range in a numeric field
```java
DataQuery q = new DataQuery()
.range("temperature", -10, 10);
log.debug("Searching {}", q.toJSON().toString());
ResultSet results = raptor.Stream().search(stream, q);
```
#### Search by distance
Search data by distance using the `location` field
```java
DataQuery q = new DataQuery()
.distance(new GeoJsonPoint(11.45, 45.11), 10000, Metrics.KILOMETERS);
log.debug("Searching {}", q.toJSON().toString());
ResultSet results = raptor.Stream().search(stream, q);
```
#### Search by bounding box
Search data within an area using the `location` field
```java
DataQuery q = new DataQuery()
.boundingBox(new GeoJsonPoint(12, 45), new GeoJsonPoint(10, 44)));
log.debug("Searching {}", q.toJSON().toString());
ResultSet results = raptor.Stream().search(stream, q);
```
## Action
The Action API handles status and triggering of device defined actions
### Set status
Store the current status of an action
```java
Action a = dev.action("light-control");
ActionStatus status = raptor.Action()
.setStatus(a, a.getStatus().status("on"));
```
### Get status
Get the current stored status of an action
```java
Action a = dev.action("light-control");
ActionStatus status = raptor.Action()
.getStatus(a);
```
### Invoke an action
Trigger an action on the remote device
```java
Action a = dev.action("light-control");
ActionStatus status = raptor.Action()
.invoke(a);
```
Set the status of an action
## Profile
The Profile API handles a per-user key-value local store
### Set a value
Set a value by key
```java
ObjectNode json = r.Profile().newObjectNode();
json.put("test", "foo");
json.put("size", 1000L);
json.put("valid", true);
r.Profile().set("test1", json);
```
### Get a value
Get a value by key
```java
JsonNode response = r.Profile().get("test1");
```
### Get all values
Get all values
```java
JsonNode response = r.Profile().get();
```
## Tree
The Tree API handles hierarchical data structures
### Create a node
Create a generic (type `group`) node tree
```java
TreeNode node1 = TreeNode.create("Root1");
raptor.Tree().create(node1);
TreeNode child1 = TreeNode.create("child1");
TreeNode child2 = TreeNode.create("child2");
raptor.Tree().add(node1, Arrays.asList(child1));
raptor.Tree().add(child1, Arrays.asList(child2));
```
### Create a device node
Create a device references inside the tree. Events from that device will be propagated to the parent nodes up to the root
```java
raptor.Tree().add(child2, dev);
```
### List trees
List all the trees available
```java
List<TreeNode> list = raptor.Tree().list();
```
### Delete a node
Delete a node, causing all the leaves to be point to the parent. In case of `device` node references, this will not remove the device
```java
// drop child1 from previous example, child2 will be now direct children of Root1
raptor.Tree().remove(
node1 //Root1
.children().get(0) // child1
);
```
## Admin
Admin APIs allow the management of users, tokens and permissions
For an up to date reference see the [tests](https://github.com/raptorbox/raptor/tree/master/raptor-sdk/src/test/java/org/createnet/raptor/sdk/admin)
## License
Apache2
```
Copyright FBK/CREATE-NET
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
```
| raptorbox/raptor | raptor-sdk/README.md | Markdown | apache-2.0 | 9,938 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.accumulo.monitor.util.celltypes;
import java.io.Serializable;
import java.util.Comparator;
public abstract class CellType<T> implements Comparator<T>, Serializable {
private static final long serialVersionUID = 1L;
private boolean sortable = true;
abstract public String alignment();
abstract public String format(Object obj);
public final void setSortable(boolean sortable) {
this.sortable = sortable;
}
public final boolean isSortable() {
return sortable;
}
}
| adamjshook/accumulo | server/monitor/src/main/java/org/apache/accumulo/monitor/util/celltypes/CellType.java | Java | apache-2.0 | 1,314 |
package io.cattle.platform.process.dao.impl;
import static io.cattle.platform.core.model.tables.AccountTable.*;
import io.cattle.platform.core.model.Account;
import io.cattle.platform.db.jooq.dao.impl.AbstractJooqDao;
import io.cattle.platform.process.dao.AccountDao;
public class AccountDaoImpl extends AbstractJooqDao implements AccountDao {
@Override
public Account findByUuid(String uuid) {
return create()
.selectFrom(ACCOUNT)
.where(ACCOUNT.UUID.eq(uuid))
.fetchOne();
}
}
| cloudnautique/cloud-cattle | code/iaas/logic/src/main/java/io/cattle/platform/process/dao/impl/AccountDaoImpl.java | Java | apache-2.0 | 551 |
package net.ros.client.render;
import com.google.common.collect.ImmutableList;
import net.minecraft.block.state.IBlockState;
import net.minecraft.client.Minecraft;
import net.minecraft.client.renderer.block.model.BakedQuad;
import net.minecraft.client.renderer.block.model.IBakedModel;
import net.minecraft.client.renderer.block.model.ItemCameraTransforms;
import net.minecraft.client.renderer.block.model.ItemOverrideList;
import net.minecraft.client.renderer.texture.TextureAtlasSprite;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.item.ItemStack;
import net.minecraft.util.EnumFacing;
import net.minecraft.world.World;
import net.ros.client.render.model.ModelCacheManager;
import net.ros.client.render.model.obj.PipeOBJStates;
import net.ros.client.render.model.obj.ROSOBJState;
import net.ros.common.block.BlockPipeBase;
import org.apache.commons.lang3.tuple.Pair;
import javax.annotation.Nonnull;
import javax.vecmath.Matrix4f;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ModelPipeInventory implements IBakedModel
{
private final Map<ROSOBJState, CompositeBakedModel> CACHE = new HashMap<>();
private final BlockPipeBase pipeBlock;
public ModelPipeInventory(BlockPipeBase pipeBlock)
{
this.pipeBlock = pipeBlock;
}
@Nonnull
@Override
public List<BakedQuad> getQuads(IBlockState state, EnumFacing face, long rand)
{
return Collections.emptyList();
}
private CompositeBakedModel getModel(ROSOBJState pipeState)
{
if (CACHE.containsKey(pipeState))
return CACHE.get(pipeState);
else
{
CompositeBakedModel model = new CompositeBakedModel(ModelCacheManager.getPipeQuads(pipeBlock, pipeState),
Minecraft.getMinecraft().getBlockRendererDispatcher()
.getModelForState(pipeBlock.getDefaultState()));
CACHE.put(pipeState, model);
return model;
}
}
@Nonnull
@Override
public ItemOverrideList getOverrides()
{
return itemHandler;
}
@Override
public boolean isAmbientOcclusion()
{
return false;
}
@Override
public boolean isGui3d()
{
return true;
}
@Override
public boolean isBuiltInRenderer()
{
return false;
}
@Nonnull
@Override
public TextureAtlasSprite getParticleTexture()
{
return Minecraft.getMinecraft().getTextureMapBlocks().getAtlasSprite("minecraft:blocks/dirt");
}
@Nonnull
@Override
public ItemCameraTransforms getItemCameraTransforms()
{
return ItemCameraTransforms.DEFAULT;
}
private static class CompositeBakedModel implements IBakedModel
{
private IBakedModel pipeModel;
private final List<BakedQuad> genQuads;
CompositeBakedModel(List<BakedQuad> pipeQuads, IBakedModel pipeModel)
{
this.pipeModel = pipeModel;
ImmutableList.Builder<BakedQuad> genBuilder = ImmutableList.builder();
genBuilder.addAll(pipeQuads);
genQuads = genBuilder.build();
}
@Nonnull
@Override
public List<BakedQuad> getQuads(IBlockState state, EnumFacing face, long rand)
{
return face == null ? genQuads : Collections.emptyList();
}
@Override
public boolean isAmbientOcclusion()
{
return pipeModel.isAmbientOcclusion();
}
@Override
public boolean isGui3d()
{
return pipeModel.isGui3d();
}
@Override
public boolean isBuiltInRenderer()
{
return pipeModel.isBuiltInRenderer();
}
@Nonnull
@Override
public TextureAtlasSprite getParticleTexture()
{
return pipeModel.getParticleTexture();
}
@Nonnull
@Override
public ItemOverrideList getOverrides()
{
return ItemOverrideList.NONE;
}
@Override
public Pair<? extends IBakedModel, Matrix4f> handlePerspective(ItemCameraTransforms.TransformType
cameraTransformType)
{
return Pair.of(this, pipeModel.handlePerspective(cameraTransformType).getRight());
}
}
private final ItemOverrideList itemHandler = new ItemOverrideList(ImmutableList.of())
{
@Nonnull
@Override
public IBakedModel handleItemState(@Nonnull IBakedModel model, ItemStack stack, World world,
EntityLivingBase entity)
{
return ModelPipeInventory.this.getModel(PipeOBJStates.getVisibilityState(
pipeBlock.getPipeType().getSize(), EnumFacing.WEST, EnumFacing.EAST));
}
};
}
| mantal/Qbar | content/logistic/src/main/java/net/ros/client/render/ModelPipeInventory.java | Java | apache-2.0 | 4,971 |
package hulk.http.response
import akka.http.scaladsl.model.HttpEntity.Strict
import akka.http.scaladsl.model.{ContentType, ResponseEntity}
import akka.util.ByteString
/**
* Created by reweber on 24/12/2015
*/
case class HttpResponseBody(contentType: ContentType, data: ByteString)
object HttpResponseBody {
implicit private[hulk] def toResponseEntity(httpResponseBody: HttpResponseBody): ResponseEntity = {
Strict(httpResponseBody.contentType, httpResponseBody.data)
}
} | reneweb/hulk | framework/src/main/scala/hulk/http/response/HttpResponseBody.scala | Scala | apache-2.0 | 485 |
<?php
declare(strict_types=1);
namespace App\Radio;
use App\Entity;
use App\Environment;
use App\Exception\Supervisor\AlreadyRunningException;
use App\Exception\Supervisor\BadNameException;
use App\Exception\Supervisor\NotRunningException;
use App\Exception\SupervisorException;
use Doctrine\ORM\EntityManagerInterface;
use Psr\EventDispatcher\EventDispatcherInterface;
use Psr\Log\LoggerInterface;
use Supervisor\Exception\Fault;
use Supervisor\Exception\SupervisorException as SupervisorLibException;
use Supervisor\Process;
use Supervisor\Supervisor;
abstract class AbstractAdapter
{
public function __construct(
protected Environment $environment,
protected EntityManagerInterface $em,
protected Supervisor $supervisor,
protected EventDispatcherInterface $dispatcher,
protected LoggerInterface $logger
) {
}
/**
* Write configuration from Station object to the external service.
*
* @param Entity\Station $station
*
* @return bool Whether the newly written configuration differs from what was already on disk.
*/
public function write(Entity\Station $station): bool
{
$configPath = $this->getConfigurationPath($station);
if (null === $configPath) {
return false;
}
$currentConfig = (is_file($configPath))
? file_get_contents($configPath)
: null;
$newConfig = $this->getCurrentConfiguration($station);
file_put_contents($configPath, $newConfig);
return 0 !== strcmp($currentConfig ?: '', $newConfig ?: '');
}
/**
* Generate the configuration for this adapter as it would exist with current database settings.
*
* @param Entity\Station $station
*
*/
public function getCurrentConfiguration(Entity\Station $station): ?string
{
return null;
}
/**
* Returns the main path where configuration data is stored for this adapter.
*
*/
public function getConfigurationPath(Entity\Station $station): ?string
{
return null;
}
/**
* Indicate if the adapter in question is installed on the server.
*/
public function isInstalled(): bool
{
return (null !== $this->getBinary());
}
/**
* Return the binary executable location for this item.
*
* @return string|null Returns either the path to the binary if it exists or null for no binary.
*/
public function getBinary(): ?string
{
return null;
}
/**
* Check if the service is running.
*
* @param Entity\Station $station
*/
public function isRunning(Entity\Station $station): bool
{
if (!$this->hasCommand($station)) {
return true;
}
$program_name = $this->getProgramName($station);
try {
$process = $this->supervisor->getProcess($program_name);
return $process instanceof Process && $process->isRunning();
} catch (Fault\BadNameException) {
return false;
}
}
/**
* Return a boolean indicating whether the adapter has an executable command associated with it.
*
* @param Entity\Station $station
*/
public function hasCommand(Entity\Station $station): bool
{
if ($this->environment->isTesting() || !$station->getIsEnabled()) {
return false;
}
return ($this->getCommand($station) !== null);
}
/**
* Return the shell command required to run the program.
*
* @param Entity\Station $station
*/
public function getCommand(Entity\Station $station): ?string
{
return null;
}
/**
* Return the program's fully qualified supervisord name.
*
* @param Entity\Station $station
*/
abstract public function getProgramName(Entity\Station $station): string;
/**
* Restart the executable service.
*
* @param Entity\Station $station
*/
public function restart(Entity\Station $station): void
{
$this->stop($station);
$this->start($station);
}
/**
* @return bool Whether this adapter supports a non-destructive reload.
*/
public function supportsReload(): bool
{
return false;
}
/**
* Execute a non-destructive reload if the adapter supports it.
*
* @param Entity\Station $station
*/
public function reload(Entity\Station $station): void
{
$this->restart($station);
}
/**
* Stop the executable service.
*
* @param Entity\Station $station
*
* @throws SupervisorException
* @throws NotRunningException
*/
public function stop(Entity\Station $station): void
{
if ($this->hasCommand($station)) {
$program_name = $this->getProgramName($station);
try {
$this->supervisor->stopProcess($program_name);
$this->logger->info(
'Adapter "' . static::class . '" stopped.',
['station_id' => $station->getId(), 'station_name' => $station->getName()]
);
} catch (SupervisorLibException $e) {
$this->handleSupervisorException($e, $program_name, $station);
}
}
}
/**
* Start the executable service.
*
* @param Entity\Station $station
*
* @throws SupervisorException
* @throws AlreadyRunningException
*/
public function start(Entity\Station $station): void
{
if ($this->hasCommand($station)) {
$program_name = $this->getProgramName($station);
try {
$this->supervisor->startProcess($program_name);
$this->logger->info(
'Adapter "' . static::class . '" started.',
['station_id' => $station->getId(), 'station_name' => $station->getName()]
);
} catch (SupervisorLibException $e) {
$this->handleSupervisorException($e, $program_name, $station);
}
}
}
/**
* Internal handling of any Supervisor-related exception, to add richer data to it.
*
* @param SupervisorLibException $e
* @param string $program_name
* @param Entity\Station $station
*
* @throws AlreadyRunningException
* @throws BadNameException
* @throws NotRunningException
* @throws SupervisorException
*/
protected function handleSupervisorException(
SupervisorLibException $e,
string $program_name,
Entity\Station $station
): void {
$class_parts = explode('\\', static::class);
$class_name = array_pop($class_parts);
if ($e instanceof Fault\BadNameException) {
$e_headline = __('%s is not recognized as a service.', $class_name);
$e_body = __('It may not be registered with Supervisor yet. Restarting broadcasting may help.');
$app_e = new BadNameException(
$e_headline . '; ' . $e_body,
$e->getCode(),
$e
);
} elseif ($e instanceof Fault\AlreadyStartedException) {
$e_headline = __('%s cannot start', $class_name);
$e_body = __('It is already running.');
$app_e = new AlreadyRunningException(
$e_headline . '; ' . $e_body,
$e->getCode(),
$e
);
} elseif ($e instanceof Fault\NotRunningException) {
$e_headline = __('%s cannot stop', $class_name);
$e_body = __('It is not running.');
$app_e = new NotRunningException(
$e_headline . '; ' . $e_body,
$e->getCode(),
$e
);
} else {
$e_headline = __('%s encountered an error', $class_name);
// Get more detailed information for more significant errors.
$process_log = $this->supervisor->tailProcessStdoutLog($program_name, 0, 500);
$process_log = array_values(array_filter(explode("\n", $process_log[0])));
$process_log = array_slice($process_log, -6);
$e_body = (!empty($process_log))
? implode('<br>', $process_log)
: __('Check the log for details.');
$app_e = new SupervisorException($e_headline, $e->getCode(), $e);
$app_e->addExtraData('supervisor_log', $process_log);
$app_e->addExtraData('supervisor_process_info', $this->supervisor->getProcessInfo($program_name));
}
$app_e->setFormattedMessage('<b>' . $e_headline . '</b><br>' . $e_body);
$app_e->addLoggingContext('station_id', $station->getId());
$app_e->addLoggingContext('station_name', $station->getName());
throw $app_e;
}
/**
* Return the path where logs are written to.
*
* @param Entity\Station $station
*/
public function getLogPath(Entity\Station $station): string
{
$config_dir = $station->getRadioConfigDir();
$class_parts = explode('\\', static::class);
$class_name = array_pop($class_parts);
return $config_dir . '/' . strtolower($class_name) . '.log';
}
}
| AzuraCast/AzuraCast | src/Radio/AbstractAdapter.php | PHP | apache-2.0 | 9,344 |
---
datafolder: engine-cli
datafile: docker_run
title: docker run
---
<!--
Sorry, but the contents of this page are automatically generated from
Docker's source code. If you want to suggest a change to the text that appears
here, you'll need to find the string by searching this repo:
https://www.github.com/docker/docker
-->
{% include cli.md %}
| BSWANG/denverdino.github.io | engine/reference/commandline/run.md | Markdown | apache-2.0 | 348 |
import React, { Component, Fragment } from 'react';
import { navigate } from '@reach/router';
import PropTypes from 'prop-types';
import { Dropdown, DropdownToggle, DropdownMenu, DropdownItem } from 'reactstrap';
import { siteRoot, gettext, orgID } from '../../utils/constants';
import { seafileAPI } from '../../utils/seafile-api';
import { Utils } from '../../utils/utils';
import toaster from '../../components/toast';
import OrgGroupInfo from '../../models/org-group';
import MainPanelTopbar from './main-panel-topbar';
class Search extends React.Component {
constructor(props) {
super(props);
this.state = {
value: ''
};
}
handleInputChange = (e) => {
this.setState({
value: e.target.value
});
}
handleKeyPress = (e) => {
if (e.key == 'Enter') {
e.preventDefault();
this.handleSubmit();
}
}
handleSubmit = () => {
const value = this.state.value.trim();
if (!value) {
return false;
}
this.props.submit(value);
}
render() {
return (
<div className="input-icon">
<i className="d-flex input-icon-addon fas fa-search"></i>
<input
type="text"
className="form-control search-input h-6 mr-1"
style={{width: '15rem'}}
placeholder={this.props.placeholder}
value={this.state.value}
onChange={this.handleInputChange}
onKeyPress={this.handleKeyPress}
autoComplete="off"
/>
</div>
);
}
}
class OrgGroups extends Component {
constructor(props) {
super(props);
this.state = {
page: 1,
pageNext: false,
orgGroups: [],
isItemFreezed: false
};
}
componentDidMount() {
let page = this.state.page;
this.initData(page);
}
initData = (page) => {
seafileAPI.orgAdminListOrgGroups(orgID, page).then(res => {
let orgGroups = res.data.groups.map(item => {
return new OrgGroupInfo(item);
});
this.setState({
orgGroups: orgGroups,
pageNext: res.data.page_next,
page: res.data.page,
});
}).catch(error => {
let errMessage = Utils.getErrorMsg(error);
toaster.danger(errMessage);
});
}
onChangePageNum = (e, num) => {
e.preventDefault();
let page = this.state.page;
if (num == 1) {
page = page + 1;
} else {
page = page - 1;
}
this.initData(page);
}
onFreezedItem = () => {
this.setState({isItemFreezed: true});
}
onUnfreezedItem = () => {
this.setState({isItemFreezed: false});
}
deleteGroupItem = (group) => {
seafileAPI.orgAdminDeleteOrgGroup(orgID, group.id).then(res => {
this.setState({
orgGroups: this.state.orgGroups.filter(item => item.id != group.id)
});
let msg = gettext('Successfully deleted {name}');
msg = msg.replace('{name}', group.groupName);
toaster.success(msg);
}).catch(error => {
let errMessage = Utils.getErrorMsg(error);
toaster.danger(errMessage);
});
}
searchItems = (keyword) => {
navigate(`${siteRoot}org/groupadmin/search-groups/?query=${encodeURIComponent(keyword)}`);
}
getSearch = () => {
return <Search
placeholder={gettext('Search groups by name')}
submit={this.searchItems}
/>;
}
render() {
let groups = this.state.orgGroups;
return (
<Fragment>
<MainPanelTopbar search={this.getSearch()}/>
<div className="main-panel-center flex-row">
<div className="cur-view-container">
<div className="cur-view-path">
<h3 className="sf-heading">{gettext('All Groups')}</h3>
</div>
<div className="cur-view-content">
<table>
<thead>
<tr>
<th width="30%">{gettext('Name')}</th>
<th width="35%">{gettext('Creator')}</th>
<th width="23%">{gettext('Created At')}</th>
<th width="12%" className="text-center">{gettext('Operations')}</th>
</tr>
</thead>
<tbody>
{groups.map(item => {
return (
<GroupItem
key={item.id}
group={item}
isItemFreezed={this.state.isItemFreezed}
onFreezedItem={this.onFreezedItem}
onUnfreezedItem={this.onUnfreezedItem}
deleteGroupItem={this.deleteGroupItem}
/>
);
})}
</tbody>
</table>
<div className="paginator">
{this.state.page != 1 && <a href="#" onClick={(e) => this.onChangePageNum(e, -1)}>{gettext('Previous')}</a>}
{(this.state.page != 1 && this.state.pageNext) && <span> | </span>}
{this.state.pageNext && <a href="#" onClick={(e) => this.onChangePageNum(e, 1)}>{gettext('Next')}</a>}
</div>
</div>
</div>
</div>
</Fragment>
);
}
}
const GroupItemPropTypes = {
group: PropTypes.object.isRequired,
isItemFreezed: PropTypes.bool.isRequired,
onFreezedItem: PropTypes.func.isRequired,
onUnfreezedItem: PropTypes.func.isRequired,
deleteGroupItem: PropTypes.func.isRequired,
};
class GroupItem extends React.Component {
constructor(props) {
super(props);
this.state = {
highlight: false,
showMenu: false,
isItemMenuShow: false
};
}
onMouseEnter = () => {
if (!this.props.isItemFreezed) {
this.setState({
showMenu: true,
highlight: true,
});
}
}
onMouseLeave = () => {
if (!this.props.isItemFreezed) {
this.setState({
showMenu: false,
highlight: false
});
}
}
onDropdownToggleClick = (e) => {
e.preventDefault();
this.toggleOperationMenu(e);
}
toggleOperationMenu = (e) => {
e.stopPropagation();
this.setState(
{isItemMenuShow: !this.state.isItemMenuShow }, () => {
if (this.state.isItemMenuShow) {
this.props.onFreezedItem();
} else {
this.setState({
highlight: false,
showMenu: false,
});
this.props.onUnfreezedItem();
}
}
);
}
toggleDelete = () => {
this.props.deleteGroupItem(this.props.group);
}
renderGroupHref = (group) => {
let groupInfoHref;
if (group.creatorName == 'system admin') {
groupInfoHref = siteRoot + 'org/departmentadmin/groups/' + group.id + '/';
} else {
groupInfoHref = siteRoot + 'org/groupadmin/' + group.id + '/';
}
return groupInfoHref;
}
renderGroupCreator = (group) => {
let userInfoHref = siteRoot + 'org/useradmin/info/' + group.creatorEmail + '/';
if (group.creatorName == 'system admin') {
return (
<td> -- </td>
);
} else {
return(
<td>
<a href={userInfoHref} className="font-weight-normal">{group.creatorName}</a>
</td>
);
}
}
render() {
let { group } = this.props;
let isOperationMenuShow = (group.creatorName != 'system admin') && this.state.showMenu;
return (
<tr className={this.state.highlight ? 'tr-highlight' : ''} onMouseEnter={this.onMouseEnter} onMouseLeave={this.onMouseLeave}>
<td>
<a href={this.renderGroupHref(group)} className="font-weight-normal">{group.groupName}</a>
</td>
{this.renderGroupCreator(group)}
<td>{group.ctime}</td>
<td className="text-center cursor-pointer">
{isOperationMenuShow &&
<Dropdown isOpen={this.state.isItemMenuShow} toggle={this.toggleOperationMenu}>
<DropdownToggle
tag="a"
className="attr-action-icon fas fa-ellipsis-v"
title={gettext('More Operations')}
data-toggle="dropdown"
aria-expanded={this.state.isItemMenuShow}
onClick={this.onDropdownToggleClick}
/>
<DropdownMenu>
<DropdownItem onClick={this.toggleDelete}>{gettext('Delete')}</DropdownItem>
</DropdownMenu>
</Dropdown>
}
</td>
</tr>
);
}
}
GroupItem.propTypes = GroupItemPropTypes;
export default OrgGroups;
| miurahr/seahub | frontend/src/pages/org-admin/org-groups.js | JavaScript | apache-2.0 | 8,470 |
/*
* Copyright 2010-2020 JetBrains s.r.o. and Kotlin Programming Language contributors.
* Use of this source code is governed by the Apache 2.0 license that can be found in the license/LICENSE.txt file.
*/
package org.jetbrains.kotlin.idea.fir.low.level.api.sessions;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.JUnit3RunnerWithInners;
import org.jetbrains.kotlin.test.KotlinTestUtils;
import org.jetbrains.kotlin.test.TestMetadata;
import org.junit.runner.RunWith;
import java.io.File;
import java.util.regex.Pattern;
/** This class is generated by {@link org.jetbrains.kotlin.generators.tests.TestsPackage}. DO NOT MODIFY MANUALLY */
@SuppressWarnings("all")
@TestMetadata("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation")
@TestDataPath("$PROJECT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
public class SessionsInvalidationTestGenerated extends AbstractSessionsInvalidationTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
public void testAllFilesPresentInSessionInvalidation() throws Exception {
KotlinTestUtils.assertAllTestsPresentByMetadataWithExcluded(this.getClass(), new File("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation"), Pattern.compile("^([^\\.]+)$"), null, false);
}
@TestMetadata("binaryTree")
public void testBinaryTree() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTree/");
}
@TestMetadata("binaryTreeNoInvalidated")
public void testBinaryTreeNoInvalidated() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTreeNoInvalidated/");
}
@TestMetadata("binaryTreeWithAdditionalEdge")
public void testBinaryTreeWithAdditionalEdge() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTreeWithAdditionalEdge/");
}
@TestMetadata("binaryTreeWithInvalidInRoot")
public void testBinaryTreeWithInvalidInRoot() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/binaryTreeWithInvalidInRoot/");
}
@TestMetadata("linear")
public void testLinear() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/linear/");
}
@TestMetadata("rhombus")
public void testRhombus() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/rhombus/");
}
@TestMetadata("rhombusWithTwoInvalid")
public void testRhombusWithTwoInvalid() throws Exception {
runTest("idea/idea-frontend-fir/idea-fir-low-level-api/testdata/sessionInvalidation/rhombusWithTwoInvalid/");
}
}
| mdaniel/intellij-community | plugins/kotlin/fir-low-level-api/test/org/jetbrains/kotlin/idea/fir/low/level/api/sessions/SessionsInvalidationTestGenerated.java | Java | apache-2.0 | 2,951 |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package io.github.jass2125.locadora.jpa;
import javax.persistence.EntityManager;
import javax.persistence.Persistence;
/**
*
* @author Anderson Souza
* @email jair_anderson_bs@hotmail.com
* @since 2015, Feb 9, 2016
*/
public class EntityManagerJPA {
private static EntityManager em;
private EntityManagerJPA() {
}
public static EntityManager getEntityManager(){
if(em == null) {
em = Persistence.createEntityManagerFactory("default").createEntityManager();
}
return em;
}
}
| ifpb-disciplinas-2015-2/locadora-jpa-web | src/main/java/io/github/jass2125/locadora/jpa/EntityManagerJPA.java | Java | apache-2.0 | 740 |
import {boolean, number, object, text, withKnobs} from '@storybook/addon-knobs';
import {
BentoAccordion,
BentoAccordionContent,
BentoAccordionHeader,
BentoAccordionSection,
} from '#bento/components/bento-accordion/1.0/component';
import {BentoVideo} from '#bento/components/bento-video/1.0/component';
import * as Preact from '#preact';
import '#bento/components/bento-video/1.0/component.jss';
export default {
title: 'Video',
component: BentoVideo,
decorators: [withKnobs],
};
const VideoTagPlayer = ({i}) => {
const group = `Player ${i + 1}`;
const width = text('width', '640px', group);
const height = text('height', '360px', group);
const ariaLabel = text('aria-label', 'Video Player', group);
const autoplay = boolean('autoplay', true, group);
const controls = boolean('controls', true, group);
const mediasession = boolean('mediasession', true, group);
const noaudio = boolean('noaudio', false, group);
const loop = boolean('loop', false, group);
const poster = text(
'poster',
'https://amp.dev/static/inline-examples/images/kitten-playing.png',
group
);
const artist = text('artist', '', group);
const album = text('album', '', group);
const artwork = text('artwork', '', group);
const title = text('title', '', group);
const sources = object(
'sources',
[
{
src: 'https://amp.dev/static/inline-examples/videos/kitten-playing.webm',
type: 'video/webm',
},
{
src: 'https://amp.dev/static/inline-examples/videos/kitten-playing.mp4',
type: 'video/mp4',
},
],
group
);
return (
<BentoVideo
component="video"
aria-label={ariaLabel}
autoplay={autoplay}
controls={controls}
mediasession={mediasession}
noaudio={noaudio}
loop={loop}
poster={poster}
artist={artist}
album={album}
artwork={artwork}
title={title}
style={{width, height}}
sources={sources.map((props) => (
<source {...props}></source>
))}
/>
);
};
const Spacer = ({height}) => {
return (
<div
style={{
height,
background: `linear-gradient(to bottom, #bbb, #bbb 10%, #fff 10%, #fff)`,
backgroundSize: '100% 10px',
}}
></div>
);
};
export const Default = () => {
const amount = number('Amount', 1, {}, 'Page');
const spacerHeight = text('Space', '80vh', 'Page');
const spaceAbove = boolean('Space above', false, 'Page');
const spaceBelow = boolean('Space below', false, 'Page');
const players = [];
for (let i = 0; i < amount; i++) {
players.push(<VideoTagPlayer key={i} i={i} />);
if (i < amount - 1) {
players.push(<Spacer height={spacerHeight} />);
}
}
return (
<>
{spaceAbove && <Spacer height={spacerHeight} />}
{players}
{spaceBelow && <Spacer height={spacerHeight} />}
</>
);
};
export const InsideAccordion = () => {
const width = text('width', '320px');
const height = text('height', '180px');
return (
<BentoAccordion expandSingleSection>
<BentoAccordionSection key={1} expanded>
<BentoAccordionHeader>
<h2>Controls</h2>
</BentoAccordionHeader>
<BentoAccordionContent>
<BentoVideo
component="video"
controls={true}
loop={true}
style={{width, height}}
src="https://amp.dev/static/inline-examples/videos/kitten-playing.mp4"
poster="https://amp.dev/static/inline-examples/images/kitten-playing.png"
/>
</BentoAccordionContent>
</BentoAccordionSection>
<BentoAccordionSection key={2}>
<BentoAccordionHeader>
<h2>Autoplay</h2>
</BentoAccordionHeader>
<BentoAccordionContent>
<BentoVideo
component="video"
autoplay={true}
loop={true}
style={{width, height}}
src="https://amp.dev/static/inline-examples/videos/kitten-playing.mp4"
poster="https://amp.dev/static/inline-examples/images/kitten-playing.png"
sources={[
<source
type="video/mp4"
src="https://amp.dev/static/inline-examples/videos/kitten-playing.mp4"
/>,
]}
/>
</BentoAccordionContent>
</BentoAccordionSection>
</BentoAccordion>
);
};
| ampproject/amphtml | extensions/amp-video/1.0/storybook/Basic.js | JavaScript | apache-2.0 | 4,420 |
/*******************************************************************************
* Copyright 2012 Apigee Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.usergrid.persistence.query.tree;
import org.antlr.runtime.Token;
import org.usergrid.persistence.exceptions.PersistenceException;
/**
* @author tnine
*
*/
public class ContainsOperand extends Operand {
/**
* @param property
* @param literal
*/
public ContainsOperand(Token t) {
super(t);
}
/* (non-Javadoc)
* @see org.usergrid.persistence.query.tree.Operand#visit(org.usergrid.persistence.query.tree.QueryVisitor)
*/
@Override
public void visit(QueryVisitor visitor) throws PersistenceException {
visitor.visit(this);
}
public void setProperty(String name){
setChild(0, new Property(name));
}
public void setValue(String value){
setChild(1, new StringLiteral(value));
}
public Property getProperty(){
return (Property) this.children.get(0);
}
public StringLiteral getString(){
return (StringLiteral) this.children.get(1);
}
}
| futur/usergrid-stack | core/src/main/java/org/usergrid/persistence/query/tree/ContainsOperand.java | Java | apache-2.0 | 1,697 |
# Upating charts and values.yaml
The charts in the `manifests` directory are used in istioctl to generate an installation manifest. The configuration
settings contained in values.yaml files and passed through the CLI are validated against a
[schema](../../operator/pkg/apis/istio/v1alpha1/values_types.proto).
Whenever making changes in the charts, it's important to follow the below steps.
## Step 0. Check that any schema change really belongs in values.yaml
Is this a new parameter being added? If not, go to the next step.
Dynamic, runtime config that is used to configure Istio components should go into the
[MeshConfig API](https://github.com/istio/api/blob/master/mesh/v1alpha1/config.proto). Values.yaml is being deprecated and adding
to it is discouraged. MeshConfig is the official API which follows API management practices and is dynamic
(does not require component restarts).
Exceptions to this rule are configuration items that affect K8s level settings (resources, mounts etc.)
## Step 1. Make changes in charts and values.yaml in `manifests` directory
## Step 2. Make corresponding values changes in [../profiles/default.yaml](../profiles/default.yaml)
The values.yaml in `manifests` are only used for direct Helm based installations, which is being deprecated.
If any values.yaml changes are being made, the same changes must be made in the `manifests/profiles/default.yaml`
file, which must be in sync with the Helm values in `manifests`.
## Step 3. Update the validation schema
Istioctl uses a [schema](../../operator/pkg/apis/istio/v1alpha1/values_types.proto) to validate the values. Any changes to
the schema must be added here, otherwise istioctl users will see errors.
Once the schema file is updated, run:
```bash
$ make operator-proto
```
This will regenerate the Go structs used for schema validation.
## Step 4. Update the generated manifests
Tests of istioctl use the auto-generated manifests to ensure that the istioctl binary has the correct version of the charts.
These manifests can be found in [gen-istio.yaml](../charts/istio-control/istio-discovery/files/gen-istio.yaml).
To regenerate the manifests, run:
```bash
$ make gen
```
## Step 5. Update golden files
The new charts/values will likely produce different installation manifests. Unit tests that expect a certain command
output will fail for this reason. To update the golden output files, run:
```bash
$ make refresh-goldens
```
This will generate git diffs in the golden output files. Check that the changes are what you expect.
## Step 6. Create a PR using outputs from Steps 1 to 5
Your PR should pass all the checks if you followed these steps.
| smawson/istio | manifests/charts/UPDATING-CHARTS.md | Markdown | apache-2.0 | 2,661 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.server.core.api.subtree;
import org.apache.directory.api.ldap.model.entry.Entry;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.api.ldap.model.name.Dn;
import org.apache.directory.api.ldap.model.schema.SchemaManager;
import org.apache.directory.api.ldap.model.subtree.SubtreeSpecification;
import org.apache.directory.server.core.api.event.Evaluator;
import org.apache.directory.server.core.api.event.ExpressionEvaluator;
/**
* An evaluator used to determine if an entry is included in the collection
* represented by a subtree specification.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class SubtreeEvaluator
{
/** A refinement filter evaluator */
private final Evaluator evaluator;
/**
* Creates a subtreeSpecification evaluatior which can be used to determine
* if an entry is included within the collection of a subtree.
*
* @param schemaManager The server schemaManager
*/
public SubtreeEvaluator( SchemaManager schemaManager )
{
evaluator = new ExpressionEvaluator( schemaManager );
}
/**
* Determines if an entry is selected by a subtree specification.
*
* @param subtree the subtree specification
* @param apDn the distinguished name of the administrative point containing the subentry
* @param entryDn the distinguished name of the candidate entry
* @param entry The entry to evaluate
* @return true if the entry is selected by the specification, false if it is not
* @throws LdapException if errors are encountered while evaluating selection
*/
public boolean evaluate( SubtreeSpecification subtree, Dn apDn, Dn entryDn, Entry entry )
throws LdapException
{
/* =====================================================================
* NOTE: Regarding the overall approach, we try to narrow down the
* possibilities by slowly pruning relative names off of the entryDn.
* For example we check first if the entry is a descendant of the AP.
* If so we use the relative name thereafter to calculate if it is
* a descendant of the base. This means shorter names to compare and
* less work to do while we continue to deduce inclusion by the subtree
* specification.
* =====================================================================
*/
// First construct the subtree base, which is the concatenation of the
// AP Dn and the subentry base
Dn subentryBaseDn = apDn;
subentryBaseDn = subentryBaseDn.add( subtree.getBase() );
if ( !entryDn.isDescendantOf( subentryBaseDn ) )
{
// The entry Dn is not part of the subtree specification, get out
return false;
}
/*
* Evaluate based on minimum and maximum chop values. Here we simply
* need to compare the distances respectively with the size of the
* baseRelativeRdn. For the max distance entries with a baseRelativeRdn
* size greater than the max distance are rejected. For the min distance
* entries with a baseRelativeRdn size less than the minimum distance
* are rejected.
*/
int entryRelativeDnSize = entryDn.size() - subentryBaseDn.size();
if ( ( subtree.getMaxBaseDistance() != SubtreeSpecification.UNBOUNDED_MAX )
&& ( entryRelativeDnSize > subtree.getMaxBaseDistance() ) )
{
return false;
}
if ( ( subtree.getMinBaseDistance() > 0 ) && ( entryRelativeDnSize < subtree.getMinBaseDistance() ) )
{
return false;
}
/*
* For specific exclusions we must iterate through the set and check
* if the baseRelativeRdn is a descendant of the exclusion. The
* isDescendant() function will return true if the compared names
* are equal so for chopAfter exclusions we must check for equality
* as well and reject if the relative names are equal.
*/
// Now, get the entry's relative part
if ( !subtree.getChopBeforeExclusions().isEmpty() || !subtree.getChopAfterExclusions().isEmpty() )
{
Dn entryRelativeDn = entryDn.getDescendantOf( apDn ).getDescendantOf( subtree.getBase() );
for ( Dn chopBeforeDn : subtree.getChopBeforeExclusions() )
{
if ( entryRelativeDn.isDescendantOf( chopBeforeDn ) )
{
return false;
}
}
for ( Dn chopAfterDn : subtree.getChopAfterExclusions() )
{
if ( entryRelativeDn.isDescendantOf( chopAfterDn ) && !chopAfterDn.equals( entryRelativeDn ) )
{
return false;
}
}
}
/*
* The last remaining step is to check and see if the refinement filter
* selects the entry candidate based on objectClass attribute values.
* To do this we invoke the refinement evaluator members evaluate() method.
*/
if ( subtree.getRefinement() != null )
{
return evaluator.evaluate( subtree.getRefinement(), entryDn, entry );
}
/*
* If nothing has rejected the candidate entry and there is no refinement
* filter then the entry is included in the collection represented by the
* subtree specification so we return true.
*/
return true;
}
}
| apache/directory-server | core-api/src/main/java/org/apache/directory/server/core/api/subtree/SubtreeEvaluator.java | Java | apache-2.0 | 6,471 |
namespace SmartyStreets
{
public class RequestEntityTooLargeException : SmartyException
{
public RequestEntityTooLargeException()
{
}
public RequestEntityTooLargeException(string message)
: base(message)
{
}
}
} | smartystreets/smartystreets-csharp-sdk | src/sdk/Exceptions/RequestEntityTooLargeException.cs | C# | apache-2.0 | 233 |
## array.jl: Dense arrays
typealias Vector{T} Array{T,1}
typealias Matrix{T} Array{T,2}
typealias VecOrMat{T} Union(Vector{T}, Matrix{T})
typealias DenseVector{T} DenseArray{T,1}
typealias DenseMatrix{T} DenseArray{T,2}
typealias DenseVecOrMat{T} Union(DenseVector{T}, DenseMatrix{T})
typealias StridedArray{T,N,A<:DenseArray} Union(DenseArray{T,N}, SubArray{T,N,A})
typealias StridedVector{T,A<:DenseArray} Union(DenseArray{T,1}, SubArray{T,1,A})
typealias StridedMatrix{T,A<:DenseArray} Union(DenseArray{T,2}, SubArray{T,2,A})
typealias StridedVecOrMat{T} Union(StridedVector{T}, StridedMatrix{T})
## Basic functions ##
size(a::Array) = arraysize(a)
size(a::Array, d) = arraysize(a, d)
size(a::Matrix) = (arraysize(a,1), arraysize(a,2))
length(a::Array) = arraylen(a)
elsize{T}(a::Array{T}) = isbits(T) ? sizeof(T) : sizeof(Ptr)
sizeof(a::Array) = elsize(a) * length(a)
strides{T}(a::Array{T,1}) = (1,)
strides{T}(a::Array{T,2}) = (1, size(a,1))
strides{T}(a::Array{T,3}) = (1, size(a,1), size(a,1)*size(a,2))
isassigned(a::Array, i::Int...) = isdefined(a, i...)
## copy ##
function unsafe_copy!{T}(dest::Ptr{T}, src::Ptr{T}, N)
ccall(:memmove, Ptr{Void}, (Ptr{Void}, Ptr{Void}, Uint),
dest, src, N*sizeof(T))
return dest
end
function unsafe_copy!{T}(dest::Array{T}, dsto, src::Array{T}, so, N)
if isbits(T)
unsafe_copy!(pointer(dest, dsto), pointer(src, so), N)
else
for i=0:N-1
@inbounds arrayset(dest, src[i+so], i+dsto)
end
end
return dest
end
function copy!{T}(dest::Array{T}, dsto::Integer, src::Array{T}, so::Integer, N::Integer)
if so+N-1 > length(src) || dsto+N-1 > length(dest) || dsto < 1 || so < 1
throw(BoundsError())
end
unsafe_copy!(dest, dsto, src, so, N)
end
copy!{T}(dest::Array{T}, src::Array{T}) = copy!(dest, 1, src, 1, length(src))
function reinterpret{T,S}(::Type{T}, a::Array{S,1})
nel = int(div(length(a)*sizeof(S),sizeof(T)))
# TODO: maybe check that remainder is zero?
return reinterpret(T, a, (nel,))
end
function reinterpret{T,S}(::Type{T}, a::Array{S})
if sizeof(S) != sizeof(T)
error("result shape not specified")
end
reinterpret(T, a, size(a))
end
function reinterpret{T,S,N}(::Type{T}, a::Array{S}, dims::NTuple{N,Int})
if !isbits(T)
error("cannot reinterpret to type ", T)
end
if !isbits(S)
error("cannot reinterpret Array of type ", S)
end
nel = div(length(a)*sizeof(S),sizeof(T))
if prod(dims) != nel
throw(DimensionMismatch("new dimensions $(dims) must be consistent with array size $(nel)"))
end
ccall(:jl_reshape_array, Array{T,N}, (Any, Any, Any), Array{T,N}, a, dims)
end
# reshaping to same # of dimensions
function reshape{T,N}(a::Array{T,N}, dims::NTuple{N,Int})
if prod(dims) != length(a)
throw(DimensionMismatch("new dimensions $(dims) must be consistent with array size $(length(a))"))
end
if dims == size(a)
return a
end
ccall(:jl_reshape_array, Array{T,N}, (Any, Any, Any), Array{T,N}, a, dims)
end
# reshaping to different # of dimensions
function reshape{T,N}(a::Array{T}, dims::NTuple{N,Int})
if prod(dims) != length(a)
throw(DimensionMismatch("new dimensions $(dims) must be consistent with array size $(length(a))"))
end
ccall(:jl_reshape_array, Array{T,N}, (Any, Any, Any), Array{T,N}, a, dims)
end
## Constructors ##
similar(a::Array, T, dims::Dims) = Array(T, dims)
similar{T}(a::Array{T,1}) = Array(T, size(a,1))
similar{T}(a::Array{T,2}) = Array(T, size(a,1), size(a,2))
similar{T}(a::Array{T,1}, dims::Dims) = Array(T, dims)
similar{T}(a::Array{T,1}, m::Int) = Array(T, m)
similar{T}(a::Array{T,1}, S) = Array(S, size(a,1))
similar{T}(a::Array{T,2}, dims::Dims) = Array(T, dims)
similar{T}(a::Array{T,2}, m::Int) = Array(T, m)
similar{T}(a::Array{T,2}, S) = Array(S, size(a,1), size(a,2))
# T[x...] constructs Array{T,1}
function getindex(T::NonTupleType, vals...)
a = Array(T,length(vals))
for i = 1:length(vals)
a[i] = vals[i]
end
return a
end
getindex(T::(Type...)) = Array(T,0)
# T[a:b] and T[a:s:b] also contruct typed ranges
function getindex{T<:Number}(::Type{T}, r::Range)
copy!(Array(T,length(r)), r)
end
function getindex{T<:Number}(::Type{T}, r1::Range, rs::Range...)
a = Array(T,length(r1)+sum(length,rs))
o = 1
copy!(a, o, r1)
o += length(r1)
for r in rs
copy!(a, o, r)
o += length(r)
end
return a
end
function fill!{T<:Union(Int8,Uint8)}(a::Array{T}, x::Integer)
ccall(:memset, Ptr{Void}, (Ptr{Void}, Int32, Csize_t), a, x, length(a))
return a
end
function fill!{T<:Union(Integer,FloatingPoint)}(a::Array{T}, x)
# note: preserve -0.0 for floats
if isbits(T) && T<:Integer && convert(T,x) == 0
ccall(:memset, Ptr{Void}, (Ptr{Void}, Int32, Csize_t), a,0,length(a)*sizeof(T))
else
for i = 1:length(a)
@inbounds a[i] = x
end
end
return a
end
fill(v, dims::Dims) = fill!(Array(typeof(v), dims), v)
fill(v, dims::Integer...) = fill!(Array(typeof(v), dims...), v)
cell(dims::Integer...) = Array(Any, dims...)
cell(dims::(Integer...)) = Array(Any, convert((Int...), dims))
for (fname, felt) in ((:zeros,:zero), (:ones,:one))
@eval begin
($fname)(T::Type, dims...) = fill!(Array(T, dims...), ($felt)(T))
($fname)(dims...) = fill!(Array(Float64, dims...), ($felt)(Float64))
($fname){T}(A::AbstractArray{T}) = fill!(similar(A), ($felt)(T))
end
end
function eye(T::Type, m::Integer, n::Integer)
a = zeros(T,m,n)
for i = 1:min(m,n)
a[i,i] = one(T)
end
return a
end
eye(m::Integer, n::Integer) = eye(Float64, m, n)
eye(T::Type, n::Integer) = eye(T, n, n)
eye(n::Integer) = eye(Float64, n)
eye{T}(x::AbstractMatrix{T}) = eye(T, size(x, 1), size(x, 2))
function one{T}(x::AbstractMatrix{T})
m,n = size(x)
m==n || throw(DimensionMismatch("multiplicative identity defined only for square matrices"))
eye(T, m)
end
linspace(start::Integer, stop::Integer, n::Integer) =
linspace(float(start), float(stop), n)
function linspace(start::Real, stop::Real, n::Integer)
(start, stop) = promote(start, stop)
T = typeof(start)
a = Array(T, int(n))
if n == 1
a[1] = start
return a
end
n -= 1
S = promote_type(T, Float64)
for i=0:n
a[i+1] = start*(convert(S, (n-i))/n) + stop*(convert(S, i)/n)
end
a
end
linspace(start::Real, stop::Real) = linspace(start, stop, 100)
logspace(start::Real, stop::Real, n::Integer) = 10.^linspace(start, stop, n)
logspace(start::Real, stop::Real) = logspace(start, stop, 50)
## Conversions ##
convert{T,n}(::Type{Array{T}}, x::Array{T,n}) = x
convert{T,n}(::Type{Array{T,n}}, x::Array{T,n}) = x
convert{T,n,S}(::Type{Array{T}}, x::Array{S,n}) = convert(Array{T,n}, x)
convert{T,n,S}(::Type{Array{T,n}}, x::Array{S,n}) = copy!(similar(x,T), x)
function collect(T::Type, itr)
if applicable(length, itr)
# when length() isn't defined this branch might pollute the
# type of the other.
a = Array(T,length(itr)::Integer)
i = 0
for x in itr
a[i+=1] = x
end
else
a = Array(T,0)
for x in itr
push!(a,x)
end
end
return a
end
collect(itr) = collect(eltype(itr), itr)
## Indexing: getindex ##
getindex(a::Array) = arrayref(a,1)
getindex(A::Array, i0::Real) = arrayref(A,to_index(i0))
getindex(A::Array, i0::Real, i1::Real) = arrayref(A,to_index(i0),to_index(i1))
getindex(A::Array, i0::Real, i1::Real, i2::Real) =
arrayref(A,to_index(i0),to_index(i1),to_index(i2))
getindex(A::Array, i0::Real, i1::Real, i2::Real, i3::Real) =
arrayref(A,to_index(i0),to_index(i1),to_index(i2),to_index(i3))
getindex(A::Array, i0::Real, i1::Real, i2::Real, i3::Real, i4::Real) =
arrayref(A,to_index(i0),to_index(i1),to_index(i2),to_index(i3),to_index(i4))
getindex(A::Array, i0::Real, i1::Real, i2::Real, i3::Real, i4::Real, i5::Real) =
arrayref(A,to_index(i0),to_index(i1),to_index(i2),to_index(i3),to_index(i4),to_index(i5))
getindex(A::Array, i0::Real, i1::Real, i2::Real, i3::Real, i4::Real, i5::Real, I::Real...) =
arrayref(A,to_index(i0),to_index(i1),to_index(i2),to_index(i3),to_index(i4),to_index(i5),to_index(I)...)
# Fast copy using copy! for UnitRange
function getindex(A::Array, I::UnitRange{Int})
lI = length(I)
X = similar(A, lI)
if lI > 0
copy!(X, 1, A, first(I), lI)
end
return X
end
function getindex{T<:Real}(A::Array, I::AbstractVector{T})
return [ A[i] for i in to_index(I) ]
end
function getindex{T<:Real}(A::Range, I::AbstractVector{T})
return [ A[i] for i in to_index(I) ]
end
function getindex(A::Range, I::AbstractVector{Bool})
checkbounds(A, I)
return [ A[i] for i in to_index(I) ]
end
# logical indexing
function getindex_bool_1d(A::Array, I::AbstractArray{Bool})
checkbounds(A, I)
n = sum(I)
out = similar(A, n)
c = 1
for i = 1:length(I)
if I[i]
out[c] = A[i]
c += 1
end
end
out
end
getindex(A::Vector, I::AbstractVector{Bool}) = getindex_bool_1d(A, I)
getindex(A::Vector, I::AbstractArray{Bool}) = getindex_bool_1d(A, I)
getindex(A::Array, I::AbstractVector{Bool}) = getindex_bool_1d(A, I)
getindex(A::Array, I::AbstractArray{Bool}) = getindex_bool_1d(A, I)
## Indexing: setindex! ##
setindex!{T}(A::Array{T}, x) = arrayset(A, convert(T,x), 1)
setindex!{T}(A::Array{T}, x, i0::Real) = arrayset(A, convert(T,x), to_index(i0))
setindex!{T}(A::Array{T}, x, i0::Real, i1::Real) =
arrayset(A, convert(T,x), to_index(i0), to_index(i1))
setindex!{T}(A::Array{T}, x, i0::Real, i1::Real, i2::Real) =
arrayset(A, convert(T,x), to_index(i0), to_index(i1), to_index(i2))
setindex!{T}(A::Array{T}, x, i0::Real, i1::Real, i2::Real, i3::Real) =
arrayset(A, convert(T,x), to_index(i0), to_index(i1), to_index(i2), to_index(i3))
setindex!{T}(A::Array{T}, x, i0::Real, i1::Real, i2::Real, i3::Real, i4::Real) =
arrayset(A, convert(T,x), to_index(i0), to_index(i1), to_index(i2), to_index(i3), to_index(i4))
setindex!{T}(A::Array{T}, x, i0::Real, i1::Real, i2::Real, i3::Real, i4::Real, i5::Real) =
arrayset(A, convert(T,x), to_index(i0), to_index(i1), to_index(i2), to_index(i3), to_index(i4), to_index(i5))
setindex!{T}(A::Array{T}, x, i0::Real, i1::Real, i2::Real, i3::Real, i4::Real, i5::Real, I::Real...) =
arrayset(A, convert(T,x), to_index(i0), to_index(i1), to_index(i2), to_index(i3), to_index(i4), to_index(i5), to_index(I)...)
function setindex!{T<:Real}(A::Array, x, I::AbstractVector{T})
for i in I
A[i] = x
end
return A
end
function setindex!{T}(A::Array{T}, X::Array{T}, I::UnitRange{Int})
if length(X) != length(I)
throw_setindex_mismatch(X, (I,))
end
copy!(A, first(I), X, 1, length(I))
return A
end
function setindex!{T<:Real}(A::Array, X::AbstractArray, I::AbstractVector{T})
if length(X) != length(I)
throw_setindex_mismatch(X, (I,))
end
count = 1
if is(X,A)
X = copy(X)
end
for i in I
A[i] = X[count]
count += 1
end
return A
end
# logical indexing
function assign_bool_scalar_1d!(A::Array, x, I::AbstractArray{Bool})
checkbounds(A, I)
for i = 1:length(I)
if I[i]
A[i] = x
end
end
A
end
function assign_bool_vector_1d!(A::Array, X::AbstractArray, I::AbstractArray{Bool})
checkbounds(A, I)
c = 1
for i = 1:length(I)
if I[i]
A[i] = X[c]
c += 1
end
end
if length(X) != c-1
throw(DimensionMismatch("assigned $(length(X)) elements to length $(c-1) destination"))
end
A
end
setindex!(A::Array, X::AbstractArray, I::AbstractVector{Bool}) = assign_bool_vector_1d!(A, X, I)
setindex!(A::Array, X::AbstractArray, I::AbstractArray{Bool}) = assign_bool_vector_1d!(A, X, I)
setindex!(A::Array, x, I::AbstractVector{Bool}) = assign_bool_scalar_1d!(A, x, I)
setindex!(A::Array, x, I::AbstractArray{Bool}) = assign_bool_scalar_1d!(A, x, I)
# efficiently grow an array
function _growat!(a::Vector, i::Integer, delta::Integer)
n = length(a)
if i < div(n,2)
_growat_beg!(a, i, delta)
else
_growat_end!(a, i, delta)
end
return a
end
function _growat_beg!(a::Vector, i::Integer, delta::Integer)
ccall(:jl_array_grow_beg, Void, (Any, Uint), a, delta)
if i > 1
ccall(:memmove, Ptr{Void}, (Ptr{Void}, Ptr{Void}, Csize_t),
pointer(a, 1), pointer(a, 1+delta), (i-1)*elsize(a))
end
return a
end
function _growat_end!(a::Vector, i::Integer, delta::Integer)
ccall(:jl_array_grow_end, Void, (Any, Uint), a, delta)
n = length(a)
if n >= i+delta
ccall(:memmove, Ptr{Void}, (Ptr{Void}, Ptr{Void}, Csize_t),
pointer(a, i+delta), pointer(a, i), (n-i-delta+1)*elsize(a))
end
return a
end
# efficiently delete part of an array
function _deleteat!(a::Vector, i::Integer, delta::Integer)
n = length(a)
last = i+delta-1
if i-1 < n-last
_deleteat_beg!(a, i, delta)
else
_deleteat_end!(a, i, delta)
end
return a
end
function _deleteat_beg!(a::Vector, i::Integer, delta::Integer)
if i > 1
ccall(:memmove, Ptr{Void}, (Ptr{Void}, Ptr{Void}, Csize_t),
pointer(a, 1+delta), pointer(a, 1), (i-1)*elsize(a))
end
ccall(:jl_array_del_beg, Void, (Any, Uint), a, delta)
return a
end
function _deleteat_end!(a::Vector, i::Integer, delta::Integer)
n = length(a)
if n >= i+delta
ccall(:memmove, Ptr{Void}, (Ptr{Void}, Ptr{Void}, Csize_t),
pointer(a, i), pointer(a, i+delta), (n-i-delta+1)*elsize(a))
end
ccall(:jl_array_del_end, Void, (Any, Uint), a, delta)
return a
end
## Dequeue functionality ##
const _grow_none_errmsg =
"[] cannot grow. Instead, initialize the array with \"T[]\", where T is the desired element type."
function push!{T}(a::Array{T,1}, item)
if is(T,None)
error(_grow_none_errmsg)
end
# convert first so we don't grow the array if the assignment won't work
item = convert(T, item)
ccall(:jl_array_grow_end, Void, (Any, Uint), a, 1)
a[end] = item
return a
end
function push!(a::Array{Any,1}, item::ANY)
ccall(:jl_array_grow_end, Void, (Any, Uint), a, 1)
arrayset(a, item, length(a))
return a
end
function append!{T}(a::Array{T,1}, items::AbstractVector)
if is(T,None)
error(_grow_none_errmsg)
end
n = length(items)
ccall(:jl_array_grow_end, Void, (Any, Uint), a, n)
copy!(a, length(a)-n+1, items, 1, n)
return a
end
function prepend!{T}(a::Array{T,1}, items::AbstractVector)
if is(T,None)
error(_grow_none_errmsg)
end
n = length(items)
ccall(:jl_array_grow_beg, Void, (Any, Uint), a, n)
if a === items
copy!(a, 1, items, n+1, n)
else
copy!(a, 1, items, 1, n)
end
return a
end
function resize!(a::Vector, nl::Integer)
l = length(a)
if nl > l
ccall(:jl_array_grow_end, Void, (Any, Uint), a, nl-l)
else
if nl < 0
throw(BoundsError())
end
ccall(:jl_array_del_end, Void, (Any, Uint), a, l-nl)
end
return a
end
function sizehint(a::Vector, sz::Integer)
ccall(:jl_array_sizehint, Void, (Any, Uint), a, sz)
a
end
function pop!(a::Vector)
if isempty(a)
error("array must be non-empty")
end
item = a[end]
ccall(:jl_array_del_end, Void, (Any, Uint), a, 1)
return item
end
function unshift!{T}(a::Array{T,1}, item)
if is(T,None)
error(_grow_none_errmsg)
end
item = convert(T, item)
ccall(:jl_array_grow_beg, Void, (Any, Uint), a, 1)
a[1] = item
return a
end
function shift!(a::Vector)
if isempty(a)
error("array must be non-empty")
end
item = a[1]
ccall(:jl_array_del_beg, Void, (Any, Uint), a, 1)
return item
end
function insert!{T}(a::Array{T,1}, i::Integer, item)
1 <= i <= length(a)+1 || throw(BoundsError())
i == length(a)+1 && return push!(a, item)
item = convert(T, item)
_growat!(a, i, 1)
a[i] = item
return a
end
function deleteat!(a::Vector, i::Integer)
if !(1 <= i <= length(a))
throw(BoundsError())
end
return _deleteat!(a, i, 1)
end
function deleteat!{T<:Integer}(a::Vector, r::UnitRange{T})
n = length(a)
f = first(r)
l = last(r)
if !(1 <= f && l <= n)
throw(BoundsError())
end
return _deleteat!(a, f, length(r))
end
function deleteat!(a::Vector, inds)
n = length(a)
s = start(inds)
done(inds, s) && return a
(p, s) = next(inds, s)
q = p+1
while !done(inds, s)
(i,s) = next(inds, s)
if !(q <= i <= n)
i < q && error("indices must be unique and sorted")
throw(BoundsError())
end
while q < i
@inbounds a[p] = a[q]
p += 1; q += 1
end
q = i+1
end
while q <= n
@inbounds a[p] = a[q]
p += 1; q += 1
end
ccall(:jl_array_del_end, Void, (Any, Uint), a, n-p+1)
return a
end
const _default_splice = []
function splice!(a::Vector, i::Integer, ins::AbstractArray=_default_splice)
v = a[i]
m = length(ins)
if m == 0
_deleteat!(a, i, 1)
elseif m == 1
a[i] = ins[1]
else
_growat!(a, i, m-1)
for k = 1:m
a[i+k-1] = ins[k]
end
end
return v
end
function splice!{T<:Integer}(a::Vector, r::UnitRange{T}, ins::AbstractArray=_default_splice)
v = a[r]
m = length(ins)
if m == 0
deleteat!(a, r)
return v
end
n = length(a)
f = first(r)
l = last(r)
d = length(r)
if m < d
delta = d - m
if f-1 < n-l
_deleteat_beg!(a, f, delta)
else
_deleteat_end!(a, l-delta+1, delta)
end
elseif m > d
delta = m - d
if f-1 < n-l
_growat_beg!(a, f, delta)
else
_growat_end!(a, l+1, delta)
end
end
for k = 1:m
a[f+k-1] = ins[k]
end
return v
end
function empty!(a::Vector)
ccall(:jl_array_del_end, Void, (Any, Uint), a, length(a))
return a
end
## Unary operators ##
function conj!{T<:Number}(A::AbstractArray{T})
for i=1:length(A)
A[i] = conj(A[i])
end
return A
end
for f in (:-, :~, :conj, :sign)
@eval begin
function ($f)(A::StridedArray)
F = similar(A)
for i=1:length(A)
F[i] = ($f)(A[i])
end
return F
end
end
end
(-)(A::StridedArray{Bool}) = reshape([ -A[i] for i=1:length(A) ], size(A))
real(A::StridedArray) = reshape([ real(x) for x in A ], size(A))
imag(A::StridedArray) = reshape([ imag(x) for x in A ], size(A))
real{T<:Real}(x::StridedArray{T}) = x
imag{T<:Real}(x::StridedArray{T}) = zero(x)
function !(A::StridedArray{Bool})
F = similar(A)
for i=1:length(A)
F[i] = !A[i]
end
return F
end
## Binary arithmetic operators ##
promote_array_type{Scalar, Arry}(::Type{Scalar}, ::Type{Arry}) = promote_type(Scalar, Arry)
promote_array_type{S<:Real, A<:FloatingPoint}(::Type{S}, ::Type{A}) = A
promote_array_type{S<:Union(Complex, Real), AT<:FloatingPoint}(::Type{S}, ::Type{Complex{AT}}) = Complex{AT}
promote_array_type{S<:Integer, A<:Integer}(::Type{S}, ::Type{A}) = A
promote_array_type{S<:Integer}(::Type{S}, ::Type{Bool}) = S
./{T<:Integer}(x::Integer, y::StridedArray{T}) =
reshape([ x ./ y[i] for i=1:length(y) ], size(y))
./{T<:Integer}(x::StridedArray{T}, y::Integer) =
reshape([ x[i] ./ y for i=1:length(x) ], size(x))
./{T<:Integer}(x::Integer, y::StridedArray{Complex{T}}) =
reshape([ x ./ y[i] for i=1:length(y) ], size(y))
./{T<:Integer}(x::StridedArray{Complex{T}}, y::Integer) =
reshape([ x[i] ./ y for i=1:length(x) ], size(x))
./{S<:Integer,T<:Integer}(x::Complex{S}, y::StridedArray{T}) =
reshape([ x ./ y[i] for i=1:length(y) ], size(y))
./{S<:Integer,T<:Integer}(x::StridedArray{S}, y::Complex{T}) =
reshape([ x[i] ./ y for i=1:length(x) ], size(x))
# ^ is difficult, since negative exponents give a different type
.^(x::Number, y::StridedArray) =
reshape([ x ^ y[i] for i=1:length(y) ], size(y))
.^(x::StridedArray, y::Number ) =
reshape([ x[i] ^ y for i=1:length(x) ], size(x))
for f in (:+, :-, :div, :mod, :&, :|, :$)
@eval begin
function ($f){S,T}(A::StridedArray{S}, B::StridedArray{T})
F = similar(A, promote_type(S,T), promote_shape(size(A),size(B)))
for i=1:length(A)
@inbounds F[i] = ($f)(A[i], B[i])
end
return F
end
# interaction with Ranges
function ($f){S,T<:Real}(A::StridedArray{S}, B::Range{T})
F = similar(A, promote_type(S,T), promote_shape(size(A),size(B)))
i = 1
for b in B
@inbounds F[i] = ($f)(A[i], b)
i += 1
end
return F
end
function ($f){S<:Real,T}(A::Range{S}, B::StridedArray{T})
F = similar(B, promote_type(S,T), promote_shape(size(A),size(B)))
i = 1
for a in A
@inbounds F[i] = ($f)(a, B[i])
i += 1
end
return F
end
end
end
for f in (:.+, :.-, :.*, :./, :.\, :.%, :div, :mod, :rem, :&, :|, :$)
@eval begin
function ($f){T}(A::Number, B::StridedArray{T})
F = similar(B, promote_array_type(typeof(A),T))
for i=1:length(B)
@inbounds F[i] = ($f)(A, B[i])
end
return F
end
function ($f){T}(A::StridedArray{T}, B::Number)
F = similar(A, promote_array_type(typeof(B),T))
for i=1:length(A)
@inbounds F[i] = ($f)(A[i], B)
end
return F
end
end
end
# familiar aliases for broadcasting operations of array ± scalar (#7226):
(+)(A::AbstractArray{Bool},x::Bool) = A .+ x
(+)(x::Bool,A::AbstractArray{Bool}) = x .+ A
(-)(A::AbstractArray{Bool},x::Bool) = A .- x
(-)(x::Bool,A::AbstractArray{Bool}) = x .- A
(+)(A::AbstractArray,x::Number) = A .+ x
(+)(x::Number,A::AbstractArray) = x .+ A
(-)(A::AbstractArray,x::Number) = A .- x
(-)(x::Number,A::AbstractArray) = x .- A
# functions that should give an Int result for Bool arrays
for f in (:.+, :.-)
@eval begin
function ($f)(A::Bool, B::StridedArray{Bool})
F = similar(B, Int, size(B))
for i=1:length(B)
@inbounds F[i] = ($f)(A, B[i])
end
return F
end
function ($f)(A::StridedArray{Bool}, B::Bool)
F = similar(A, Int, size(A))
for i=1:length(A)
@inbounds F[i] = ($f)(A[i], B)
end
return F
end
end
end
for f in (:+, :-)
@eval begin
function ($f)(A::StridedArray{Bool}, B::StridedArray{Bool})
F = similar(A, Int, promote_shape(size(A), size(B)))
for i=1:length(A)
@inbounds F[i] = ($f)(A[i], B[i])
end
return F
end
end
end
## promotion to complex ##
function complex{S<:Real,T<:Real}(A::Array{S}, B::Array{T})
if size(A) != size(B); throw(DimensionMismatch("")); end
F = similar(A, typeof(complex(zero(S),zero(T))))
for i=1:length(A)
@inbounds F[i] = complex(A[i], B[i])
end
return F
end
function complex{T<:Real}(A::Real, B::Array{T})
F = similar(B, typeof(complex(A,zero(T))))
for i=1:length(B)
@inbounds F[i] = complex(A, B[i])
end
return F
end
function complex{T<:Real}(A::Array{T}, B::Real)
F = similar(A, typeof(complex(zero(T),B)))
for i=1:length(A)
@inbounds F[i] = complex(A[i], B)
end
return F
end
# use memcmp for lexcmp on byte arrays
function lexcmp(a::Array{Uint8,1}, b::Array{Uint8,1})
c = ccall(:memcmp, Int32, (Ptr{Uint8}, Ptr{Uint8}, Uint),
a, b, min(length(a),length(b)))
c < 0 ? -1 : c > 0 ? +1 : cmp(length(a),length(b))
end
## data movement ##
function slicedim(A::Array, d::Integer, i::Integer)
d_in = size(A)
leading = d_in[1:(d-1)]
d_out = tuple(leading..., 1, d_in[(d+1):end]...)
M = prod(leading)
N = length(A)
stride = M * d_in[d]
B = similar(A, d_out)
index_offset = 1 + (i-1)*M
l = 1
if M==1
for j=0:stride:(N-stride)
B[l] = A[j + index_offset]
l += 1
end
else
for j=0:stride:(N-stride)
offs = j + index_offset
for k=0:(M-1)
B[l] = A[offs + k]
l += 1
end
end
end
return B
end
function flipdim{T}(A::Array{T}, d::Integer)
nd = ndims(A)
sd = d > nd ? 1 : size(A, d)
if sd == 1 || isempty(A)
return copy(A)
end
B = similar(A)
nnd = 0
for i = 1:nd
nnd += int(size(A,i)==1 || i==d)
end
if nnd==nd
# flip along the only non-singleton dimension
for i = 1:sd
B[i] = A[sd+1-i]
end
return B
end
d_in = size(A)
leading = d_in[1:(d-1)]
M = prod(leading)
N = length(A)
stride = M * sd
if M==1
for j = 0:stride:(N-stride)
for i = 1:sd
ri = sd+1-i
B[j + ri] = A[j + i]
end
end
else
if isbits(T) && M>200
for i = 1:sd
ri = sd+1-i
for j=0:stride:(N-stride)
offs = j + 1 + (i-1)*M
boffs = j + 1 + (ri-1)*M
copy!(B, boffs, A, offs, M)
end
end
else
for i = 1:sd
ri = sd+1-i
for j=0:stride:(N-stride)
offs = j + 1 + (i-1)*M
boffs = j + 1 + (ri-1)*M
for k=0:(M-1)
B[boffs + k] = A[offs + k]
end
end
end
end
end
return B
end
function rotl90(A::StridedMatrix)
m,n = size(A)
B = similar(A,(n,m))
for i=1:m, j=1:n
B[n-j+1,i] = A[i,j]
end
return B
end
function rotr90(A::StridedMatrix)
m,n = size(A)
B = similar(A,(n,m))
for i=1:m, j=1:n
B[j,m-i+1] = A[i,j]
end
return B
end
function rot180(A::StridedMatrix)
m,n = size(A)
B = similar(A)
for i=1:m, j=1:n
B[m-i+1,n-j+1] = A[i,j]
end
return B
end
function rotl90(A::AbstractMatrix, k::Integer)
k = mod(k, 4)
k == 1 ? rotl90(A) :
k == 2 ? rot180(A) :
k == 3 ? rotr90(A) : copy(A)
end
rotr90(A::AbstractMatrix, k::Integer) = rotl90(A,-k)
rot180(A::AbstractMatrix, k::Integer) = mod(k, 2) == 1 ? rot180(A) : copy(A)
# note: probably should be StridedVector or AbstractVector
function reverse(A::AbstractVector, s=1, n=length(A))
B = similar(A)
for i = 1:s-1
B[i] = A[i]
end
for i = s:n
B[i] = A[n+s-i]
end
for i = n+1:length(A)
B[i] = A[i]
end
B
end
reverse(v::StridedVector) = (n=length(v); [ v[n-i+1] for i=1:n ])
reverse(v::StridedVector, s, n=length(v)) = reverse!(copy(v), s, n)
function reverse!(v::StridedVector, s=1, n=length(v))
r = n
for i=s:div(s+n-1,2)
v[i], v[r] = v[r], v[i]
r -= 1
end
v
end
function vcat{T}(arrays::Array{T,1}...)
n = 0
for a in arrays
n += length(a)
end
arr = Array(T, n)
ptr = pointer(arr)
offset = 0
if isbits(T)
elsz = sizeof(T)
else
elsz = div(WORD_SIZE,8)
end
for a in arrays
nba = length(a)*elsz
ccall(:memcpy, Ptr{Void}, (Ptr{Void}, Ptr{Void}, Uint),
ptr+offset, a, nba)
offset += nba
end
return arr
end
## find ##
# returns the index of the next non-zero element, or 0 if all zeros
function findnext(A, start::Integer)
for i = start:length(A)
if A[i] != 0
return i
end
end
return 0
end
findfirst(A) = findnext(A,1)
# returns the index of the next matching element
function findnext(A, v, start::Integer)
for i = start:length(A)
if A[i] == v
return i
end
end
return 0
end
findfirst(A,v) = findnext(A,v,1)
# returns the index of the next element for which the function returns true
function findnext(testf::Function, A, start::Integer)
for i = start:length(A)
if testf(A[i])
return i
end
end
return 0
end
findfirst(testf::Function, A) = findnext(testf, A, 1)
function find(testf::Function, A::StridedArray)
# use a dynamic-length array to store the indexes, then copy to a non-padded
# array for the return
tmpI = Array(Int, 0)
for i = 1:length(A)
if testf(A[i])
push!(tmpI, i)
end
end
I = similar(A, Int, length(tmpI))
copy!(I, tmpI)
I
end
function find(A::StridedArray)
nnzA = countnz(A)
I = similar(A, Int, nnzA)
count = 1
for i=1:length(A)
if A[i] != 0
I[count] = i
count += 1
end
end
return I
end
find(x::Number) = x == 0 ? Array(Int,0) : [1]
find(testf::Function, x) = find(testf(x))
findn(A::AbstractVector) = find(A)
function findn(A::StridedMatrix)
nnzA = countnz(A)
I = similar(A, Int, nnzA)
J = similar(A, Int, nnzA)
count = 1
for j=1:size(A,2), i=1:size(A,1)
if A[i,j] != 0
I[count] = i
J[count] = j
count += 1
end
end
return (I, J)
end
function findnz{T}(A::StridedMatrix{T})
nnzA = countnz(A)
I = zeros(Int, nnzA)
J = zeros(Int, nnzA)
NZs = zeros(T, nnzA)
count = 1
if nnzA > 0
for j=1:size(A,2), i=1:size(A,1)
Aij = A[i,j]
if Aij != 0
I[count] = i
J[count] = j
NZs[count] = Aij
count += 1
end
end
end
return (I, J, NZs)
end
function findmax(a)
if isempty(a)
error("array must be non-empty")
end
m = a[1]
mi = 1
for i=2:length(a)
ai = a[i]
if ai > m || m!=m
m = ai
mi = i
end
end
return (m, mi)
end
function findmin(a)
if isempty(a)
error("array must be non-empty")
end
m = a[1]
mi = 1
for i=2:length(a)
ai = a[i]
if ai < m || m!=m
m = ai
mi = i
end
end
return (m, mi)
end
indmax(a) = findmax(a)[2]
indmin(a) = findmin(a)[2]
# similar to Matlab's ismember
# returns a vector containing the highest index in b for each value in a that is a member of b
function indexin(a::AbstractArray, b::AbstractArray)
bdict = Dict(b, 1:length(b))
[get(bdict, i, 0) for i in a]
end
# findin (the index of intersection)
function findin(a, b::UnitRange)
ind = Array(Int, 0)
f = first(b)
l = last(b)
for i = 1:length(a)
if f <= a[i] <= l
push!(ind, i)
end
end
ind
end
function findin(a, b)
ind = Array(Int, 0)
bset = union!(Set(), b)
for i = 1:length(a)
if in(a[i], bset)
push!(ind, i)
end
end
ind
end
# Copying subregions
function indcopy(sz::Dims, I::Vector)
n = length(I)
s = sz[n]
for i = n+1:length(sz)
s *= sz[i]
end
dst = eltype(I)[findin(I[i], i < n ? (1:sz[i]) : (1:s)) for i = 1:n]
src = eltype(I)[I[i][findin(I[i], i < n ? (1:sz[i]) : (1:s))] for i = 1:n]
dst, src
end
function indcopy(sz::Dims, I::(RangeIndex...))
n = length(I)
s = sz[n]
for i = n+1:length(sz)
s *= sz[i]
end
dst::typeof(I) = ntuple(n, i-> findin(I[i], i < n ? (1:sz[i]) : (1:s)))::typeof(I)
src::typeof(I) = ntuple(n, i-> I[i][findin(I[i], i < n ? (1:sz[i]) : (1:s))])::typeof(I)
dst, src
end
## Filter ##
# given a function returning a boolean and an array, return matching elements
filter(f::Function, As::AbstractArray) = As[map(f, As)::AbstractArray{Bool}]
function filter!(f::Function, a::Vector)
insrt = 1
for curr = 1:length(a)
if f(a[curr])
a[insrt] = a[curr]
insrt += 1
end
end
deleteat!(a, insrt:length(a))
return a
end
function filter(f::Function, a::Vector)
r = Array(eltype(a), 0)
for i = 1:length(a)
if f(a[i])
push!(r, a[i])
end
end
return r
end
## Transpose ##
const transposebaselength=64
function transpose!(B::StridedMatrix,A::StridedMatrix)
m, n = size(A)
size(B) == (n,m) || throw(DimensionMismatch("transpose"))
if m*n<=4*transposebaselength
@inbounds begin
for j = 1:n
for i = 1:m
B[j,i] = transpose(A[i,j])
end
end
end
else
transposeblock!(B,A,m,n,0,0)
end
return B
end
function transposeblock!(B::StridedMatrix,A::StridedMatrix,m::Int,n::Int,offseti::Int,offsetj::Int)
if m*n<=transposebaselength
@inbounds begin
for j = offsetj+(1:n)
for i = offseti+(1:m)
B[j,i] = transpose(A[i,j])
end
end
end
elseif m>n
newm=m>>1
transposeblock!(B,A,newm,n,offseti,offsetj)
transposeblock!(B,A,m-newm,n,offseti+newm,offsetj)
else
newn=n>>1
transposeblock!(B,A,m,newn,offseti,offsetj)
transposeblock!(B,A,m,n-newn,offseti,offsetj+newn)
end
return B
end
function ctranspose!(B::StridedMatrix,A::StridedMatrix)
m, n = size(A)
size(B) == (n,m) || throw(DimensionMismatch("transpose"))
if m*n<=4*transposebaselength
@inbounds begin
for j = 1:n
for i = 1:m
B[j,i] = ctranspose(A[i,j])
end
end
end
else
ctransposeblock!(B,A,m,n,0,0)
end
return B
end
function ctransposeblock!(B::StridedMatrix,A::StridedMatrix,m::Int,n::Int,offseti::Int,offsetj::Int)
if m*n<=transposebaselength
@inbounds begin
for j = offsetj+(1:n)
for i = offseti+(1:m)
B[j,i] = ctranspose(A[i,j])
end
end
end
elseif m>n
newm=m>>1
ctransposeblock!(B,A,newm,n,offseti,offsetj)
ctransposeblock!(B,A,m-newm,n,offseti+newm,offsetj)
else
newn=n>>1
ctransposeblock!(B,A,m,newn,offseti,offsetj)
ctransposeblock!(B,A,m,n-newn,offseti,offsetj+newn)
end
return B
end
function transpose(A::StridedMatrix)
B = similar(A, size(A, 2), size(A, 1))
transpose!(B, A)
end
function ctranspose(A::StridedMatrix)
B = similar(A, size(A, 2), size(A, 1))
ctranspose!(B, A)
end
ctranspose{T<:Real}(A::StridedVecOrMat{T}) = transpose(A)
transpose(x::StridedVector) = [ transpose(x[j]) for i=1, j=1:size(x,1) ]
ctranspose{T}(x::StridedVector{T}) = T[ ctranspose(x[j]) for i=1, j=1:size(x,1) ]
# set-like operators for vectors
# These are moderately efficient, preserve order, and remove dupes.
function intersect(v1, vs...)
ret = Array(eltype(v1),0)
for v_elem in v1
inall = true
for i = 1:length(vs)
if !in(v_elem, vs[i])
inall=false; break
end
end
if inall
push!(ret, v_elem)
end
end
ret
end
function union(vs...)
ret = Array(promote_eltype(vs...),0)
seen = Set()
for v in vs
for v_elem in v
if !in(v_elem, seen)
push!(ret, v_elem)
push!(seen, v_elem)
end
end
end
ret
end
# setdiff only accepts two args
function setdiff(a, b)
args_type = promote_type(eltype(a), eltype(b))
bset = Set(b)
ret = Array(args_type,0)
seen = Set()
for a_elem in a
if !in(a_elem, seen) && !in(a_elem, bset)
push!(ret, a_elem)
push!(seen, a_elem)
end
end
ret
end
# symdiff is associative, so a relatively clean
# way to implement this is by using setdiff and union, and
# recursing. Has the advantage of keeping order, too, but
# not as fast as other methods that make a single pass and
# store counts with a Dict.
symdiff(a) = a
symdiff(a, b) = union(setdiff(a,b), setdiff(b,a))
symdiff(a, b, rest...) = symdiff(a, symdiff(b, rest...))
_cumsum_type{T<:Number}(v::AbstractArray{T}) = typeof(+zero(T))
_cumsum_type(v) = typeof(v[1]+v[1])
for (f, fp, op) = ((:cumsum, :cumsum_pairwise, :+),
(:cumprod, :cumprod_pairwise, :*) )
# in-place cumsum of c = s+v(i1:n), using pairwise summation as for sum
@eval function ($fp)(v::AbstractVector, c::AbstractVector, s, i1, n)
if n < 128
@inbounds c[i1] = ($op)(s, v[i1])
for i = i1+1:i1+n-1
@inbounds c[i] = $(op)(c[i-1], v[i])
end
else
n2 = div(n,2)
($fp)(v, c, s, i1, n2)
($fp)(v, c, c[(i1+n2)-1], i1+n2, n-n2)
end
end
@eval function ($f)(v::AbstractVector)
n = length(v)
c = $(op===:+ ? (:(similar(v,_cumsum_type(v)))) :
(:(similar(v))))
if n == 0; return c; end
($fp)(v, c, $(op==:+ ? :(zero(v[1])) : :(one(v[1]))), 1, n)
return c
end
@eval ($f)(A::AbstractArray) = ($f)(A, 1)
end
for (f, op) = ((:cummin, :min), (:cummax, :max))
@eval function ($f)(v::AbstractVector)
n = length(v)
cur_val = v[1]
res = similar(v, n)
res[1] = cur_val
for i in 2:n
cur_val = ($op)(v[i], cur_val)
res[i] = cur_val
end
return res
end
@eval function ($f)(A::StridedArray, axis::Integer)
dimsA = size(A)
ndimsA = ndims(A)
axis_size = dimsA[axis]
axis_stride = 1
for i = 1:(axis-1)
axis_stride *= size(A,i)
end
if axis_size < 1
return A
end
B = similar(A)
for i = 1:length(A)
if div(i-1, axis_stride) % axis_size == 0
B[i] = A[i]
else
B[i] = ($op)(A[i], B[i-axis_stride])
end
end
return B
end
@eval ($f)(A::AbstractArray) = ($f)(A, 1)
end
| shubhamg31/columbus_julia | lib/julia/base/array.jl | Julia | apache-2.0 | 38,770 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache license, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the license for the specific language governing permissions and
* limitations under the license.
*/
package org.apache.logging.log4j.core.appender;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class SocketAppenderBuilderTest {
/**
* Tests https://issues.apache.org/jira/browse/LOG4J2-1620
*/
@Test
public void testDefaultImmediateFlush() {
assertTrue(SocketAppender.newBuilder().isImmediateFlush(),
"Regression of LOG4J2-1620: default value for immediateFlush should be true");
}
}
| apache/logging-log4j2 | log4j-core/src/test/java/org/apache/logging/log4j/core/appender/SocketAppenderBuilderTest.java | Java | apache-2.0 | 1,293 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.marshaller.jdk;
import java.io.IOException;
import java.io.InputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.OutputStream;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.internal.util.io.GridByteArrayInputStream;
import org.apache.ignite.internal.util.io.GridByteArrayOutputStream;
import org.apache.ignite.internal.util.typedef.internal.S;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.marshaller.AbstractNodeNameAwareMarshaller;
import org.jetbrains.annotations.Nullable;
/**
* Implementation of {@link org.apache.ignite.marshaller.Marshaller} based on JDK serialization mechanism.
* <p>
* <h1 class="header">Configuration</h1>
* <h2 class="header">Mandatory</h2>
* This marshaller has no mandatory configuration parameters.
* <h2 class="header">Java Example</h2>
* {@code JdkMarshaller} needs to be explicitly configured to override default {@link org.apache.ignite.marshaller.optimized.OptimizedMarshaller}.
* <pre name="code" class="java">
* JdkMarshaller marshaller = new JdkMarshaller();
*
* IgniteConfiguration cfg = new IgniteConfiguration();
*
* // Override default marshaller.
* cfg.setMarshaller(marshaller);
*
* // Starts grid.
* G.start(cfg);
* </pre>
* <h2 class="header">Spring Example</h2>
* JdkMarshaller can be configured from Spring XML configuration file:
* <pre name="code" class="xml">
* <bean id="grid.custom.cfg" class="org.apache.ignite.configuration.IgniteConfiguration" singleton="true">
* ...
* <property name="marshaller">
* <bean class="org.apache.ignite.marshaller.jdk.JdkMarshaller"/>
* </property>
* ...
* </bean>
* </pre>
* <p>
* <img src="http://ignite.apache.org/images/spring-small.png">
* <br>
* For information about Spring framework visit <a href="http://www.springframework.org/">www.springframework.org</a>
*/
public class JdkMarshaller extends AbstractNodeNameAwareMarshaller {
/** {@inheritDoc} */
@Override protected void marshal0(@Nullable Object obj, OutputStream out) throws IgniteCheckedException {
assert out != null;
ObjectOutputStream objOut = null;
try {
objOut = new JdkMarshallerObjectOutputStream(new JdkMarshallerOutputStreamWrapper(out));
// Make sure that we serialize only task, without class loader.
objOut.writeObject(obj);
objOut.flush();
}
catch (Exception e) {
throw new IgniteCheckedException("Failed to serialize object: " + obj, e);
}
finally{
U.closeQuiet(objOut);
}
}
/** {@inheritDoc} */
@Override protected byte[] marshal0(@Nullable Object obj) throws IgniteCheckedException {
GridByteArrayOutputStream out = null;
try {
out = new GridByteArrayOutputStream(DFLT_BUFFER_SIZE);
marshal(obj, out);
return out.toByteArray();
}
finally {
U.close(out, null);
}
}
/** {@inheritDoc} */
@SuppressWarnings({"unchecked"})
@Override protected <T> T unmarshal0(InputStream in, @Nullable ClassLoader clsLdr) throws IgniteCheckedException {
assert in != null;
if (clsLdr == null)
clsLdr = getClass().getClassLoader();
ObjectInputStream objIn = null;
try {
objIn = new JdkMarshallerObjectInputStream(new JdkMarshallerInputStreamWrapper(in), clsLdr);
return (T)objIn.readObject();
}
catch (ClassNotFoundException e) {
throw new IgniteCheckedException("Failed to find class with given class loader for unmarshalling " +
"(make sure same versions of all classes are available on all nodes or enable peer-class-loading): " +
clsLdr, e);
}
catch (Exception e) {
throw new IgniteCheckedException("Failed to deserialize object with given class loader: " + clsLdr, e);
}
finally{
U.closeQuiet(objIn);
}
}
/** {@inheritDoc} */
@Override protected <T> T unmarshal0(byte[] arr, @Nullable ClassLoader clsLdr) throws IgniteCheckedException {
GridByteArrayInputStream in = null;
try {
in = new GridByteArrayInputStream(arr, 0, arr.length);
return unmarshal(in, clsLdr);
}
finally {
U.close(in, null);
}
}
/** {@inheritDoc} */
@Override public void onUndeploy(ClassLoader ldr) {
// No-op.
}
/** {@inheritDoc} */
@Override public String toString() {
return S.toString(JdkMarshaller.class, this);
}
}
| leveyj/ignite | modules/core/src/main/java/org/apache/ignite/marshaller/jdk/JdkMarshaller.java | Java | apache-2.0 | 5,576 |
"""
Tests for login and logout.
"""
import datetime
from unittest.mock import patch
import responses
import quilt3
from .utils import QuiltTestCase
class TestSession(QuiltTestCase):
@patch('quilt3.session.open_url')
@patch('quilt3.session.input', return_value='123456')
@patch('quilt3.session.login_with_token')
def test_login(self, mock_login_with_token, mock_input, mock_open_url):
quilt3.login()
url = quilt3.session.get_registry_url()
mock_open_url.assert_called_with(f'{url}/login')
mock_login_with_token.assert_called_with('123456')
@patch('quilt3.session._save_auth')
@patch('quilt3.session._save_credentials')
def test_login_with_token(self, mock_save_credentials, mock_save_auth):
url = quilt3.session.get_registry_url()
mock_auth = dict(
refresh_token='refresh-token',
access_token='access-token',
expires_at=123456789
)
self.requests_mock.add(
responses.POST,
f'{url}/api/token',
json=mock_auth,
status=200
)
self.requests_mock.add(
responses.GET,
f'{url}/api/auth/get_credentials',
json=dict(
AccessKeyId='access-key',
SecretAccessKey='secret-key',
SessionToken='session-token',
Expiration="2019-05-28T23:58:07+00:00"
),
status=200
)
quilt3.session.login_with_token('123456')
mock_save_auth.assert_called_with({url: mock_auth})
mock_save_credentials.assert_called_with(dict(
access_key='access-key',
secret_key='secret-key',
token='session-token',
expiry_time="2019-05-28T23:58:07+00:00"
))
@patch('quilt3.session._save_credentials')
@patch('quilt3.session._load_credentials')
def test_create_botocore_session(self, mock_load_credentials, mock_save_credentials):
def format_date(date):
return date.replace(tzinfo=datetime.timezone.utc, microsecond=0).isoformat()
# Test good credentials.
future_date = datetime.datetime.utcnow() + datetime.timedelta(hours=1)
mock_load_credentials.return_value = dict(
access_key='access-key',
secret_key='secret-key',
token='session-token',
expiry_time=format_date(future_date)
)
session = quilt3.session.create_botocore_session()
credentials = session.get_credentials()
assert credentials.access_key == 'access-key'
assert credentials.secret_key == 'secret-key'
assert credentials.token == 'session-token'
mock_save_credentials.assert_not_called()
# Test expired credentials.
past_date = datetime.datetime.utcnow() - datetime.timedelta(minutes=5)
mock_load_credentials.return_value = dict(
access_key='access-key',
secret_key='secret-key',
token='session-token',
expiry_time=format_date(past_date)
)
url = quilt3.session.get_registry_url()
self.requests_mock.add(
responses.GET,
f'{url}/api/auth/get_credentials',
json=dict(
AccessKeyId='access-key2',
SecretAccessKey='secret-key2',
SessionToken='session-token2',
Expiration=format_date(future_date)
),
status=200
)
session = quilt3.session.create_botocore_session()
credentials = session.get_credentials()
assert credentials.access_key == 'access-key2'
assert credentials.secret_key == 'secret-key2'
assert credentials.token == 'session-token2'
mock_save_credentials.assert_called()
def test_logged_in(self):
registry_url = quilt3.session.get_registry_url()
other_registry_url = registry_url + 'other'
mock_auth = dict(
refresh_token='refresh-token',
access_token='access-token',
expires_at=123456789,
)
with patch('quilt3.session._load_auth', return_value={registry_url: mock_auth}) as mocked_load_auth:
assert quilt3.logged_in() == 'https://example.com'
mocked_load_auth.assert_called_once()
with patch('quilt3.session._load_auth', return_value={other_registry_url: mock_auth}) as mocked_load_auth:
assert quilt3.logged_in() is None
mocked_load_auth.assert_called_once()
| quiltdata/quilt-compiler | api/python/tests/test_session.py | Python | apache-2.0 | 4,576 |
package io.cattle.platform.configitem.server.model.impl;
import java.io.IOException;
import io.cattle.platform.configitem.server.model.RefreshableConfigItem;
import io.cattle.platform.configitem.server.resource.ResourceRoot;
import io.cattle.platform.configitem.version.ConfigItemStatusManager;
public abstract class AbstractResourceRootConfigItem extends AbstractConfigItem implements RefreshableConfigItem {
ResourceRoot resourceRoot;
public AbstractResourceRootConfigItem(String name, ConfigItemStatusManager versionManager, ResourceRoot resourceRoot) {
super(name, versionManager);
this.resourceRoot = resourceRoot;
}
@Override
public String getSourceRevision() {
return resourceRoot.getSourceRevision();
}
@Override
public void refresh() throws IOException {
resourceRoot.scan();
}
public ResourceRoot getResourceRoot() {
return resourceRoot;
}
}
| alena1108/cattle | code/iaas/config-item/server/src/main/java/io/cattle/platform/configitem/server/model/impl/AbstractResourceRootConfigItem.java | Java | apache-2.0 | 945 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util.json;
import java.io.IOException;
import java.io.StringWriter;
import java.io.Writer;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.Map;
/**
* JsonArray is a common non-thread safe data format for a collection of data.
* The contents of a JsonArray are only validated as JSON values on
* serialization.
*
* @see Jsoner
* @since 2.0.0
*/
public class JsonArray extends ArrayList<Object> implements Jsonable {
/**
* The serialization version this class is compatible with. This value
* doesn't need to be incremented if and only if the only changes to occur
* were updating comments, updating javadocs, adding new fields to the
* class, changing the fields from static to non-static, or changing the
* fields from transient to non transient. All other changes require this
* number be incremented.
*/
private static final long serialVersionUID = 1L;
/** Instantiates an empty JsonArray. */
public JsonArray() {
}
/**
* Instantiate a new JsonArray using ArrayList's constructor of the same
* type.
*
* @param collection represents the elements to produce the JsonArray with.
*/
public JsonArray(final Collection<?> collection) {
super(collection);
}
/**
* A convenience method that assumes every element of the JsonArray is
* castable to T before adding it to a collection of Ts.
*
* @param <T> represents the type that all of the elements of the JsonArray
* should be cast to and the type the collection will contain.
* @param destination represents where all of the elements of the JsonArray
* are added to after being cast to the generic type provided.
* @throws ClassCastException if the unchecked cast of an element to T
* fails.
*/
@SuppressWarnings("unchecked")
public <T> void asCollection(final Collection<T> destination) {
for (final Object o : this) {
destination.add((T)o);
}
}
/**
* A convenience method that assumes there is a BigDecimal, Number, or
* String at the given index. If a Number or String is there it is used to
* construct a new BigDecimal.
*
* @param index representing where the value is expected to be at.
* @return the value stored at the key or the default provided if the key
* doesn't exist.
* @throws ClassCastException if there was a value but didn't match the
* assumed return types.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @throws NumberFormatException if a String isn't a valid representation of
* a BigDecimal.
* @see BigDecimal
* @see Number#doubleValue()
*/
public BigDecimal getBigDecimal(final int index) {
Object returnable = this.get(index);
if (returnable instanceof BigDecimal) {
/* Success there was a BigDecimal. */
} else if (returnable instanceof Number) {
/* A number can be used to construct a BigDecimal. */
returnable = new BigDecimal(returnable.toString());
} else if (returnable instanceof String) {
/* A number can be used to construct a BigDecimal. */
returnable = new BigDecimal((String)returnable);
}
return (BigDecimal)returnable;
}
/**
* A convenience method that assumes there is a Boolean or String value at
* the given index.
*
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a boolean.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
*/
public Boolean getBoolean(final int index) {
Object returnable = this.get(index);
if (returnable instanceof String) {
returnable = Boolean.valueOf((String)returnable);
}
return (Boolean)returnable;
}
/**
* A convenience method that assumes there is a Number or String value at
* the given index.
*
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a byte.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws NumberFormatException if a String isn't a valid representation of
* a BigDecimal or if the Number represents the double or float
* Infinity or NaN.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @see Number
*/
public Byte getByte(final int index) {
Object returnable = this.get(index);
if (returnable == null) {
return null;
}
if (returnable instanceof String) {
/* A String can be used to construct a BigDecimal. */
returnable = new BigDecimal((String)returnable);
}
return ((Number)returnable).byteValue();
}
/**
* A convenience method that assumes there is a Collection value at the
* given index.
*
* @param <T> the kind of collection to expect at the index. Note unless
* manually added, collection values will be a JsonArray.
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a Collection.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @see Collection
*/
@SuppressWarnings("unchecked")
public <T extends Collection<?>> T getCollection(final int index) {
/*
* The unchecked warning is suppressed because there is no way of
* guaranteeing at compile time the cast will work.
*/
return (T)this.get(index);
}
/**
* A convenience method that assumes there is a Number or String value at
* the given index.
*
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a double.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws NumberFormatException if a String isn't a valid representation of
* a BigDecimal or if the Number represents the double or float
* Infinity or NaN.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @see Number
*/
public Double getDouble(final int index) {
Object returnable = this.get(index);
if (returnable == null) {
return null;
}
if (returnable instanceof String) {
/* A String can be used to construct a BigDecimal. */
returnable = new BigDecimal((String)returnable);
}
return ((Number)returnable).doubleValue();
}
/**
* A convenience method that assumes there is a String value at the given
* index representing a fully qualified name in dot notation of an enum.
*
* @param index representing where the value is expected to be at.
* @param <T> the Enum type the value at the index is expected to belong to.
* @return the enum based on the string found at the index, or null if the
* value at the index was null.
* @throws ClassNotFoundException if the element was a String but the
* declaring enum type couldn't be determined with it.
* @throws ClassCastException if the element at the index was not a String
* or if the fully qualified enum name is of the wrong type.
* @throws IllegalArgumentException if an enum type was dynamically
* determined but it doesn't define an enum with the dynamically
* determined name.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @see Enum#valueOf(Class, String)
*/
@SuppressWarnings("unchecked")
public <T extends Enum<T>> T getEnum(final int index) throws ClassNotFoundException {
/*
* Supressing the unchecked warning because the returnType is
* dynamically identified and could lead to a ClassCastException when
* returnType is cast to Class<T>, which is expected by the method's
* contract.
*/
T returnable;
final String element;
final String[] splitValues;
final int numberOfValues;
final StringBuilder returnTypeName;
final StringBuilder enumName;
final Class<T> returnType;
/* Make sure the element at the index is a String. */
element = this.getString(index);
if (element == null) {
return null;
}
/* Get the package, class, and enum names. */
splitValues = element.split("\\.");
numberOfValues = splitValues.length;
returnTypeName = new StringBuilder();
enumName = new StringBuilder();
for (int i = 0; i < numberOfValues; i++) {
if (i == (numberOfValues - 1)) {
/*
* If it is the last split value then it should be the name of
* the Enum since dots are not allowed in enum names.
*/
enumName.append(splitValues[i]);
} else if (i == (numberOfValues - 2)) {
/*
* If it is the penultimate split value then it should be the
* end of the package/enum type and not need a dot appended to
* it.
*/
returnTypeName.append(splitValues[i]);
} else {
/*
* Must be part of the package/enum type and will need a dot
* appended to it since they got removed in the split.
*/
returnTypeName.append(splitValues[i]);
returnTypeName.append(".");
}
}
/* Use the package/class and enum names to get the Enum<T>. */
returnType = (Class<T>)Class.forName(returnTypeName.toString());
returnable = Enum.valueOf(returnType, enumName.toString());
return returnable;
}
/**
* A convenience method that assumes there is a Number or String value at
* the given index.
*
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a float.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws NumberFormatException if a String isn't a valid representation of
* a BigDecimal or if the Number represents the double or float
* Infinity or NaN.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @see Number
*/
public Float getFloat(final int index) {
Object returnable = this.get(index);
if (returnable == null) {
return null;
}
if (returnable instanceof String) {
/* A String can be used to construct a BigDecimal. */
returnable = new BigDecimal((String)returnable);
}
return ((Number)returnable).floatValue();
}
/**
* A convenience method that assumes there is a Number or String value at
* the given index.
*
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a int.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws NumberFormatException if a String isn't a valid representation of
* a BigDecimal or if the Number represents the double or float
* Infinity or NaN.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @see Number
*/
public Integer getInteger(final int index) {
Object returnable = this.get(index);
if (returnable == null) {
return null;
}
if (returnable instanceof String) {
/* A String can be used to construct a BigDecimal. */
returnable = new BigDecimal((String)returnable);
}
return ((Number)returnable).intValue();
}
/**
* A convenience method that assumes there is a Number or String value at
* the given index.
*
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a long.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws NumberFormatException if a String isn't a valid representation of
* a BigDecimal or if the Number represents the double or float
* Infinity or NaN.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @see Number
*/
public Long getLong(final int index) {
Object returnable = this.get(index);
if (returnable == null) {
return null;
}
if (returnable instanceof String) {
/* A String can be used to construct a BigDecimal. */
returnable = new BigDecimal((String)returnable);
}
return ((Number)returnable).longValue();
}
/**
* A convenience method that assumes there is a Map value at the given
* index.
*
* @param <T> the kind of map to expect at the index. Note unless manually
* added, Map values will be a JsonObject.
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a Map.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @see Map
*/
@SuppressWarnings("unchecked")
public <T extends Map<?, ?>> T getMap(final int index) {
/*
* The unchecked warning is suppressed because there is no way of
* guaranteeing at compile time the cast will work.
*/
return (T)this.get(index);
}
/**
* A convenience method that assumes there is a Number or String value at
* the given index.
*
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a short.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws NumberFormatException if a String isn't a valid representation of
* a BigDecimal or if the Number represents the double or float
* Infinity or NaN.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
* @see Number
*/
public Short getShort(final int index) {
Object returnable = this.get(index);
if (returnable == null) {
return null;
}
if (returnable instanceof String) {
/* A String can be used to construct a BigDecimal. */
returnable = new BigDecimal((String)returnable);
}
return ((Number)returnable).shortValue();
}
/**
* A convenience method that assumes there is a Boolean, Number, or String
* value at the given index.
*
* @param index represents where the value is expected to be at.
* @return the value at the index provided cast to a String.
* @throws ClassCastException if there was a value but didn't match the
* assumed return type.
* @throws IndexOutOfBoundsException if the index is outside of the range of
* element indexes in the JsonArray.
*/
public String getString(final int index) {
Object returnable = this.get(index);
if (returnable instanceof Boolean) {
returnable = returnable.toString();
} else if (returnable instanceof Number) {
returnable = returnable.toString();
}
return (String)returnable;
}
/*
* (non-Javadoc)
* @see org.apache.camel.util.json.Jsonable#asJsonString()
*/
@Override
public String toJson() {
final StringWriter writable = new StringWriter();
try {
this.toJson(writable);
} catch (final IOException caught) {
/* See java.io.StringWriter. */
}
return writable.toString();
}
/*
* (non-Javadoc)
* @see org.apache.camel.util.json.Jsonable#toJsonString(java.io.Writer)
*/
@Override
public void toJson(final Writer writable) throws IOException {
boolean isFirstElement = true;
final Iterator<Object> elements = this.iterator();
writable.write('[');
while (elements.hasNext()) {
if (isFirstElement) {
isFirstElement = false;
} else {
writable.write(',');
}
writable.write(Jsoner.serialize(elements.next()));
}
writable.write(']');
}
}
| objectiser/camel | tooling/camel-util-json/src/main/java/org/apache/camel/util/json/JsonArray.java | Java | apache-2.0 | 19,042 |
/*
* Licensed to Apereo under one or more contributor license
* agreements. See the NOTICE file distributed with this work
* for additional information regarding copyright ownership.
* Apereo licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a
* copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.jasig.portlet.notice.util;
import javax.portlet.PortletRequest;
import javax.servlet.http.HttpServletRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.security.core.Authentication;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.stereotype.Component;
@Component("usernameFinder")
public final class UsernameFinder {
@Value("${UsernameFinder.unauthenticatedUsername}")
private String unauthenticatedUsername = "guest";
private Logger logger = LoggerFactory.getLogger(getClass());
/**
* @deprecated Prefer interactions that are not based on the Portlet API
*/
@Deprecated
public String findUsername(PortletRequest req) {
return req.getRemoteUser() != null
? req.getRemoteUser()
: unauthenticatedUsername;
}
/**
* @since 4.0
*/
public String findUsername(HttpServletRequest request) {
final Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
logger.trace("Processing the following Authentication object: {}", authentication);
final String rslt = (String) authentication.getPrincipal();
logger.debug("Found username '{}' based on the contents of the SecurityContextHolder", rslt);
// Identification based on Spring Security is required to access Servlet-based APIs
if (rslt == null) {
throw new SecurityException("User not identified");
}
return rslt;
}
/**
* @deprecated Prefer interactions that are not based on the Portlet API
*/
@Deprecated
public boolean isAuthenticated(PortletRequest req) {
return !findUsername(req).equalsIgnoreCase(unauthenticatedUsername);
}
public boolean isAuthenticated(HttpServletRequest request) {
final Authentication authentication = SecurityContextHolder.getContext().getAuthentication();
logger.trace("Processing the following Authentication object: {}", authentication);
return authentication != null && authentication.isAuthenticated();
}
}
| Jasig/NotificationPortlet | notification-portlet-webapp/src/main/java/org/jasig/portlet/notice/util/UsernameFinder.java | Java | apache-2.0 | 3,039 |
import { registerBidder } from '../src/adapters/bidderFactory.js';
import { BANNER, NATIVE, VIDEO } from '../src/mediaTypes.js';
import * as utils from '../src/utils.js';
import { config } from '../src/config.js';
const BIDDER_CODE = 'gothamads';
const ACCOUNTID_MACROS = '[account_id]';
const URL_ENDPOINT = `https://us-e-node1.gothamads.com/bid?pass=${ACCOUNTID_MACROS}&integration=prebidjs`;
const NATIVE_ASSET_IDS = {
0: 'title',
2: 'icon',
3: 'image',
5: 'sponsoredBy',
4: 'body',
1: 'cta'
};
const NATIVE_PARAMS = {
title: {
id: 0,
name: 'title'
},
icon: {
id: 2,
type: 1,
name: 'img'
},
image: {
id: 3,
type: 3,
name: 'img'
},
sponsoredBy: {
id: 5,
name: 'data',
type: 1
},
body: {
id: 4,
name: 'data',
type: 2
},
cta: {
id: 1,
type: 12,
name: 'data'
}
};
const NATIVE_VERSION = '1.2';
export const spec = {
code: BIDDER_CODE,
supportedMediaTypes: [BANNER, VIDEO, NATIVE],
/**
* Determines whether or not the given bid request is valid.
*
* @param {object} bid The bid to validate.
* @return boolean True if this is a valid bid, and false otherwise.
*/
isBidRequestValid: (bid) => {
return Boolean(bid.params.accountId) && Boolean(bid.params.placementId)
},
/**
* Make a server request from the list of BidRequests.
*
* @param {BidRequest[]} validBidRequests A non-empty list of valid bid requests that should be sent to the Server.
* @return ServerRequest Info describing the request to the server.
*/
buildRequests: (validBidRequests, bidderRequest) => {
if (validBidRequests && validBidRequests.length === 0) return []
let accuontId = validBidRequests[0].params.accountId;
const endpointURL = URL_ENDPOINT.replace(ACCOUNTID_MACROS, accuontId);
let winTop = window;
let location;
try {
location = new URL(bidderRequest.refererInfo.referer)
winTop = window.top;
} catch (e) {
location = winTop.location;
utils.logMessage(e);
};
let bids = [];
for (let bidRequest of validBidRequests) {
let impObject = prepareImpObject(bidRequest);
let data = {
id: bidRequest.bidId,
test: config.getConfig('debug') ? 1 : 0,
cur: ['USD'],
device: {
w: winTop.screen.width,
h: winTop.screen.height,
language: (navigator && navigator.language) ? navigator.language.indexOf('-') != -1 ? navigator.language.split('-')[0] : navigator.language : '',
},
site: {
page: location.pathname,
host: location.host
},
source: {
tid: bidRequest.transactionId
},
regs: {
coppa: config.getConfig('coppa') === true ? 1 : 0,
ext: {}
},
tmax: bidRequest.timeout,
imp: [impObject],
};
if (bidRequest.gdprConsent && bidRequest.gdprConsent.gdprApplies) {
utils.deepSetValue(data, 'regs.ext.gdpr', bidRequest.gdprConsent.gdprApplies ? 1 : 0);
utils.deepSetValue(data, 'user.ext.consent', bidRequest.gdprConsent.consentString);
}
if (bidRequest.uspConsent !== undefined) {
utils.deepSetValue(data, 'regs.ext.us_privacy', bidRequest.uspConsent);
}
bids.push(data)
}
return {
method: 'POST',
url: endpointURL,
data: bids
};
},
/**
* Unpack the response from the server into a list of bids.
*
* @param {*} serverResponse A successful response from the server.
* @return {Bid[]} An array of bids which were nested inside the server.
*/
interpretResponse: (serverResponse) => {
if (!serverResponse || !serverResponse.body) return []
let GothamAdsResponse = serverResponse.body;
let bids = [];
for (let response of GothamAdsResponse) {
let mediaType = response.seatbid[0].bid[0].ext && response.seatbid[0].bid[0].ext.mediaType ? response.seatbid[0].bid[0].ext.mediaType : BANNER;
let bid = {
requestId: response.id,
cpm: response.seatbid[0].bid[0].price,
width: response.seatbid[0].bid[0].w,
height: response.seatbid[0].bid[0].h,
ttl: response.ttl || 1200,
currency: response.cur || 'USD',
netRevenue: true,
creativeId: response.seatbid[0].bid[0].crid,
dealId: response.seatbid[0].bid[0].dealid,
mediaType: mediaType
};
bid.meta = {};
if (response.seatbid[0].bid[0].adomain && response.seatbid[0].bid[0].adomain.length > 0) {
bid.meta.advertiserDomains = response.seatbid[0].bid[0].adomain;
}
switch (mediaType) {
case VIDEO:
bid.vastXml = response.seatbid[0].bid[0].adm;
bid.vastUrl = response.seatbid[0].bid[0].ext.vastUrl;
break;
case NATIVE:
bid.native = parseNative(response.seatbid[0].bid[0].adm);
break;
default:
bid.ad = response.seatbid[0].bid[0].adm;
}
bids.push(bid);
}
return bids;
},
};
/**
* Determine type of request
*
* @param bidRequest
* @param type
* @returns {boolean}
*/
const checkRequestType = (bidRequest, type) => {
return (typeof utils.deepAccess(bidRequest, `mediaTypes.${type}`) !== 'undefined');
}
const parseNative = admObject => {
const {
assets,
link,
imptrackers,
jstracker
} = admObject.native;
const result = {
clickUrl: link.url,
clickTrackers: link.clicktrackers || undefined,
impressionTrackers: imptrackers || undefined,
javascriptTrackers: jstracker ? [jstracker] : undefined
};
assets.forEach(asset => {
const kind = NATIVE_ASSET_IDS[asset.id];
const content = kind && asset[NATIVE_PARAMS[kind].name];
if (content) {
result[kind] = content.text || content.value || {
url: content.url,
width: content.w,
height: content.h
};
}
});
return result;
}
const prepareImpObject = (bidRequest) => {
let impObject = {
id: bidRequest.transactionId,
secure: 1,
ext: {
placementId: bidRequest.params.placementId
}
};
if (checkRequestType(bidRequest, BANNER)) {
impObject.banner = addBannerParameters(bidRequest);
}
if (checkRequestType(bidRequest, VIDEO)) {
impObject.video = addVideoParameters(bidRequest);
}
if (checkRequestType(bidRequest, NATIVE)) {
impObject.native = {
ver: NATIVE_VERSION,
request: addNativeParameters(bidRequest)
};
}
return impObject
};
const addNativeParameters = bidRequest => {
let impObject = {
id: bidRequest.transactionId,
ver: NATIVE_VERSION,
};
const assets = utils._map(bidRequest.mediaTypes.native, (bidParams, key) => {
const props = NATIVE_PARAMS[key];
const asset = {
required: bidParams.required & 1,
};
if (props) {
asset.id = props.id;
let wmin, hmin;
let aRatios = bidParams.aspect_ratios;
if (aRatios && aRatios[0]) {
aRatios = aRatios[0];
wmin = aRatios.min_width || 0;
hmin = aRatios.ratio_height * wmin / aRatios.ratio_width | 0;
}
if (bidParams.sizes) {
const sizes = flatten(bidParams.sizes);
wmin = sizes[0];
hmin = sizes[1];
}
asset[props.name] = {}
if (bidParams.len) asset[props.name]['len'] = bidParams.len;
if (props.type) asset[props.name]['type'] = props.type;
if (wmin) asset[props.name]['wmin'] = wmin;
if (hmin) asset[props.name]['hmin'] = hmin;
return asset;
}
}).filter(Boolean);
impObject.assets = assets;
return impObject
}
const addBannerParameters = (bidRequest) => {
let bannerObject = {};
const size = parseSizes(bidRequest, 'banner');
bannerObject.w = size[0];
bannerObject.h = size[1];
return bannerObject;
};
const parseSizes = (bid, mediaType) => {
let mediaTypes = bid.mediaTypes;
if (mediaType === 'video') {
let size = [];
if (mediaTypes.video && mediaTypes.video.w && mediaTypes.video.h) {
size = [
mediaTypes.video.w,
mediaTypes.video.h
];
} else if (Array.isArray(utils.deepAccess(bid, 'mediaTypes.video.playerSize')) && bid.mediaTypes.video.playerSize.length === 1) {
size = bid.mediaTypes.video.playerSize[0];
} else if (Array.isArray(bid.sizes) && bid.sizes.length > 0 && Array.isArray(bid.sizes[0]) && bid.sizes[0].length > 1) {
size = bid.sizes[0];
}
return size;
}
let sizes = [];
if (Array.isArray(mediaTypes.banner.sizes)) {
sizes = mediaTypes.banner.sizes[0];
} else if (Array.isArray(bid.sizes) && bid.sizes.length > 0) {
sizes = bid.sizes
} else {
utils.logWarn('no sizes are setup or found');
}
return sizes
}
const addVideoParameters = (bidRequest) => {
let videoObj = {};
let supportParamsList = ['mimes', 'minduration', 'maxduration', 'protocols', 'startdelay', 'placement', 'skip', 'skipafter', 'minbitrate', 'maxbitrate', 'delivery', 'playbackmethod', 'api', 'linearity']
for (let param of supportParamsList) {
if (bidRequest.mediaTypes.video[param] !== undefined) {
videoObj[param] = bidRequest.mediaTypes.video[param];
}
}
const size = parseSizes(bidRequest, 'video');
videoObj.w = size[0];
videoObj.h = size[1];
return videoObj;
}
const flatten = arr => {
return [].concat(...arr);
}
registerBidder(spec);
| tchibirev/Prebid.js | modules/gothamadsBidAdapter.js | JavaScript | apache-2.0 | 9,383 |
<?php
/**
* @category SchumacherFM
* @package SchumacherFM_FastIndexer
* @copyright Copyright (c) http://www.schumacher.fm
* @license see LICENSE.md file
* @author Cyrill at Schumacher dot fm @SchumacherFM
*/
class SchumacherFM_FastIndexer_Model_Lock_Session
extends SchumacherFM_FastIndexer_Model_Lock_Abstract
implements SchumacherFM_FastIndexer_Model_Lock_LockInterface
{
const SESS_PREFIX = 'fastindexer_';
/**
* @var Mage_Core_Model_Resource_Session
*/
protected $_session = null;
/**
* @return Mage_Core_Model_Resource_Session
*/
public function getSession()
{
if (null !== $this->_session) {
return $this->_session;
}
$this->_session = Mage::getResourceSingleton('core/session');
return $this->_session;
}
/**
* Lock process without blocking.
* This method allow protect multiple process running and fast lock validation.
*
*/
public function lock()
{
$this->getSession()->write(self::SESS_PREFIX . $this->getIndexerCode(), microtime(true));
}
/**
* Lock and block process.
* If new instance of the process will try validate locking state
* script will wait until process will be unlocked
*
*/
public function lockAndBlock()
{
$this->lock();
}
/**
* Unlock process
*
* @return Mage_Index_Model_Process
*/
public function unlock()
{
$this->getSession()->destroy(self::SESS_PREFIX . $this->getIndexerCode());
}
/**
* Check if process is locked
*
* @return bool
*/
public function isLocked()
{
$startTime = (double)$this->getSession()->read(self::SESS_PREFIX . $this->getIndexerCode());
if ($startTime < 0.0001) {
return false;
}
return $this->_isLockedByTtl($startTime);
}
} | mssyogi/Magento-FastIndexer | src/app/code/community/SchumacherFM/FastIndexer/Model/Lock/Session.php | PHP | apache-2.0 | 1,917 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.input;
import com.fasterxml.jackson.annotation.JacksonInject;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonInclude.Include;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.Iterators;
import org.apache.druid.client.coordinator.CoordinatorClient;
import org.apache.druid.data.input.AbstractInputSource;
import org.apache.druid.data.input.InputFileAttribute;
import org.apache.druid.data.input.InputFormat;
import org.apache.druid.data.input.InputRowSchema;
import org.apache.druid.data.input.InputSourceReader;
import org.apache.druid.data.input.InputSplit;
import org.apache.druid.data.input.MaxSizeSplitHintSpec;
import org.apache.druid.data.input.SegmentsSplitHintSpec;
import org.apache.druid.data.input.SplitHintSpec;
import org.apache.druid.data.input.impl.InputEntityIteratingReader;
import org.apache.druid.data.input.impl.SplittableInputSource;
import org.apache.druid.indexing.common.ReingestionTimelineUtils;
import org.apache.druid.indexing.common.RetryPolicy;
import org.apache.druid.indexing.common.RetryPolicyFactory;
import org.apache.druid.indexing.common.SegmentLoaderFactory;
import org.apache.druid.indexing.firehose.WindowedSegmentId;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.guava.Comparators;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.query.filter.DimFilter;
import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.loading.SegmentLoader;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.TimelineObjectHolder;
import org.apache.druid.timeline.VersionedIntervalTimeline;
import org.apache.druid.timeline.partition.PartitionChunk;
import org.apache.druid.timeline.partition.PartitionHolder;
import org.apache.druid.utils.Streams;
import org.joda.time.Duration;
import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ThreadLocalRandom;
import java.util.stream.Stream;
public class DruidInputSource extends AbstractInputSource implements SplittableInputSource<List<WindowedSegmentId>>
{
private static final Logger LOG = new Logger(DruidInputSource.class);
/**
* A Comparator that orders {@link WindowedSegmentId} mainly by segmentId (which is important), and then by intervals
* (which is arbitrary, and only here for totality of ordering).
*/
private static final Comparator<WindowedSegmentId> WINDOWED_SEGMENT_ID_COMPARATOR =
Comparator.comparing(WindowedSegmentId::getSegmentId)
.thenComparing(windowedSegmentId -> windowedSegmentId.getIntervals().size())
.thenComparing(
(WindowedSegmentId a, WindowedSegmentId b) -> {
// Same segmentId, same intervals list size. Compare each interval.
int cmp = 0;
for (int i = 0; i < a.getIntervals().size(); i++) {
cmp = Comparators.intervalsByStartThenEnd()
.compare(a.getIntervals().get(i), b.getIntervals().get(i));
if (cmp != 0) {
return cmp;
}
}
return cmp;
}
);
private final String dataSource;
// Exactly one of interval and segmentIds should be non-null. Typically 'interval' is specified directly
// by the user creating this firehose and 'segmentIds' is used for sub-tasks if it is split for parallel
// batch ingestion.
@Nullable
private final Interval interval;
@Nullable
private final List<WindowedSegmentId> segmentIds;
private final DimFilter dimFilter;
private final List<String> dimensions;
private final List<String> metrics;
private final IndexIO indexIO;
private final CoordinatorClient coordinatorClient;
private final SegmentLoaderFactory segmentLoaderFactory;
private final RetryPolicyFactory retryPolicyFactory;
@JsonCreator
public DruidInputSource(
@JsonProperty("dataSource") final String dataSource,
@JsonProperty("interval") @Nullable Interval interval,
// Specifying "segments" is intended only for when this FirehoseFactory has split itself,
// not for direct end user use.
@JsonProperty("segments") @Nullable List<WindowedSegmentId> segmentIds,
@JsonProperty("filter") DimFilter dimFilter,
@Nullable @JsonProperty("dimensions") List<String> dimensions,
@Nullable @JsonProperty("metrics") List<String> metrics,
@JacksonInject IndexIO indexIO,
@JacksonInject CoordinatorClient coordinatorClient,
@JacksonInject SegmentLoaderFactory segmentLoaderFactory,
@JacksonInject RetryPolicyFactory retryPolicyFactory
)
{
Preconditions.checkNotNull(dataSource, "dataSource");
if ((interval == null && segmentIds == null) || (interval != null && segmentIds != null)) {
throw new IAE("Specify exactly one of 'interval' and 'segments'");
}
this.dataSource = dataSource;
this.interval = interval;
this.segmentIds = segmentIds;
this.dimFilter = dimFilter;
this.dimensions = dimensions;
this.metrics = metrics;
this.indexIO = Preconditions.checkNotNull(indexIO, "null IndexIO");
this.coordinatorClient = Preconditions.checkNotNull(coordinatorClient, "null CoordinatorClient");
this.segmentLoaderFactory = Preconditions.checkNotNull(segmentLoaderFactory, "null SegmentLoaderFactory");
this.retryPolicyFactory = Preconditions.checkNotNull(retryPolicyFactory, "null RetryPolicyFactory");
}
@JsonProperty
public String getDataSource()
{
return dataSource;
}
@Nullable
@JsonProperty
public Interval getInterval()
{
return interval;
}
@Nullable
@JsonProperty("segments")
@JsonInclude(Include.NON_NULL)
public List<WindowedSegmentId> getSegmentIds()
{
return segmentIds;
}
@JsonProperty("filter")
public DimFilter getDimFilter()
{
return dimFilter;
}
@JsonProperty
public List<String> getDimensions()
{
return dimensions;
}
@JsonProperty
public List<String> getMetrics()
{
return metrics;
}
@Override
protected InputSourceReader fixedFormatReader(InputRowSchema inputRowSchema, @Nullable File temporaryDirectory)
{
final SegmentLoader segmentLoader = segmentLoaderFactory.manufacturate(temporaryDirectory);
final List<TimelineObjectHolder<String, DataSegment>> timeline = createTimeline();
final Iterator<DruidSegmentInputEntity> entityIterator = FluentIterable
.from(timeline)
.transformAndConcat(holder -> {
//noinspection ConstantConditions
final PartitionHolder<DataSegment> partitionHolder = holder.getObject();
//noinspection ConstantConditions
return FluentIterable
.from(partitionHolder)
.transform(chunk -> new DruidSegmentInputEntity(segmentLoader, chunk.getObject(), holder.getInterval()));
}).iterator();
final List<String> effectiveDimensions = ReingestionTimelineUtils.getDimensionsToReingest(
dimensions,
inputRowSchema.getDimensionsSpec(),
timeline
);
List<String> effectiveMetrics;
if (metrics == null) {
effectiveMetrics = ReingestionTimelineUtils.getUniqueMetrics(timeline);
} else {
effectiveMetrics = metrics;
}
final DruidSegmentInputFormat inputFormat = new DruidSegmentInputFormat(
indexIO,
dimFilter,
effectiveDimensions,
effectiveMetrics
);
return new InputEntityIteratingReader(
inputRowSchema,
inputFormat,
entityIterator,
temporaryDirectory
);
}
private List<TimelineObjectHolder<String, DataSegment>> createTimeline()
{
if (interval == null) {
return getTimelineForSegmentIds(coordinatorClient, dataSource, segmentIds);
} else {
return getTimelineForInterval(coordinatorClient, retryPolicyFactory, dataSource, interval);
}
}
@Override
public Stream<InputSplit<List<WindowedSegmentId>>> createSplits(
InputFormat inputFormat,
@Nullable SplitHintSpec splitHintSpec
)
{
// segmentIds is supposed to be specified by the supervisor task during the parallel indexing.
// If it's not null, segments are already split by the supervisor task and further split won't happen.
if (segmentIds == null) {
return Streams.sequentialStreamFrom(
createSplits(
coordinatorClient,
retryPolicyFactory,
dataSource,
interval,
splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
)
);
} else {
return Stream.of(new InputSplit<>(segmentIds));
}
}
@Override
public int estimateNumSplits(InputFormat inputFormat, @Nullable SplitHintSpec splitHintSpec)
{
// segmentIds is supposed to be specified by the supervisor task during the parallel indexing.
// If it's not null, segments are already split by the supervisor task and further split won't happen.
if (segmentIds == null) {
return Iterators.size(
createSplits(
coordinatorClient,
retryPolicyFactory,
dataSource,
interval,
splitHintSpec == null ? SplittableInputSource.DEFAULT_SPLIT_HINT_SPEC : splitHintSpec
)
);
} else {
return 1;
}
}
@Override
public SplittableInputSource<List<WindowedSegmentId>> withSplit(InputSplit<List<WindowedSegmentId>> split)
{
return new DruidInputSource(
dataSource,
null,
split.get(),
dimFilter,
dimensions,
metrics,
indexIO,
coordinatorClient,
segmentLoaderFactory,
retryPolicyFactory
);
}
@Override
public boolean needsFormat()
{
return false;
}
public static Iterator<InputSplit<List<WindowedSegmentId>>> createSplits(
CoordinatorClient coordinatorClient,
RetryPolicyFactory retryPolicyFactory,
String dataSource,
Interval interval,
SplitHintSpec splitHintSpec
)
{
final SplitHintSpec convertedSplitHintSpec;
if (splitHintSpec instanceof SegmentsSplitHintSpec) {
final SegmentsSplitHintSpec segmentsSplitHintSpec = (SegmentsSplitHintSpec) splitHintSpec;
convertedSplitHintSpec = new MaxSizeSplitHintSpec(
segmentsSplitHintSpec.getMaxInputSegmentBytesPerTask(),
segmentsSplitHintSpec.getMaxNumSegments()
);
} else {
convertedSplitHintSpec = splitHintSpec;
}
final List<TimelineObjectHolder<String, DataSegment>> timelineSegments = getTimelineForInterval(
coordinatorClient,
retryPolicyFactory,
dataSource,
interval
);
final Map<WindowedSegmentId, Long> segmentIdToSize = createWindowedSegmentIdFromTimeline(timelineSegments);
//noinspection ConstantConditions
return Iterators.transform(
convertedSplitHintSpec.split(
// segmentIdToSize is sorted by segment ID; useful for grouping up segments from the same time chunk into
// the same input split.
segmentIdToSize.keySet().iterator(),
segmentId -> new InputFileAttribute(
Preconditions.checkNotNull(segmentIdToSize.get(segmentId), "segment size for [%s]", segmentId)
)
),
InputSplit::new
);
}
/**
* Returns a map of {@link WindowedSegmentId} to size, sorted by {@link WindowedSegmentId#getSegmentId()}.
*/
private static SortedMap<WindowedSegmentId, Long> createWindowedSegmentIdFromTimeline(
List<TimelineObjectHolder<String, DataSegment>> timelineHolders
)
{
Map<DataSegment, WindowedSegmentId> windowedSegmentIds = new HashMap<>();
for (TimelineObjectHolder<String, DataSegment> holder : timelineHolders) {
for (PartitionChunk<DataSegment> chunk : holder.getObject()) {
windowedSegmentIds.computeIfAbsent(
chunk.getObject(),
segment -> new WindowedSegmentId(segment.getId().toString(), new ArrayList<>())
).addInterval(holder.getInterval());
}
}
// It is important to create this map after windowedSegmentIds is completely filled, because WindowedSegmentIds
// can be updated while being constructed. (Intervals are added.)
SortedMap<WindowedSegmentId, Long> segmentSizeMap = new TreeMap<>(WINDOWED_SEGMENT_ID_COMPARATOR);
windowedSegmentIds.forEach((segment, segmentId) -> segmentSizeMap.put(segmentId, segment.getSize()));
return segmentSizeMap;
}
public static List<TimelineObjectHolder<String, DataSegment>> getTimelineForInterval(
CoordinatorClient coordinatorClient,
RetryPolicyFactory retryPolicyFactory,
String dataSource,
Interval interval
)
{
Preconditions.checkNotNull(interval);
// This call used to use the TaskActionClient, so for compatibility we use the same retry configuration
// as TaskActionClient.
final RetryPolicy retryPolicy = retryPolicyFactory.makeRetryPolicy();
Collection<DataSegment> usedSegments;
while (true) {
try {
usedSegments = coordinatorClient.fetchUsedSegmentsInDataSourceForIntervals(
dataSource,
Collections.singletonList(interval)
);
break;
}
catch (Throwable e) {
LOG.warn(e, "Exception getting database segments");
final Duration delay = retryPolicy.getAndIncrementRetryDelay();
if (delay == null) {
throw e;
} else {
final long sleepTime = jitter(delay.getMillis());
LOG.info("Will try again in [%s].", new Duration(sleepTime).toString());
try {
Thread.sleep(sleepTime);
}
catch (InterruptedException e2) {
throw new RuntimeException(e2);
}
}
}
}
return VersionedIntervalTimeline.forSegments(usedSegments).lookup(interval);
}
public static List<TimelineObjectHolder<String, DataSegment>> getTimelineForSegmentIds(
CoordinatorClient coordinatorClient,
String dataSource,
List<WindowedSegmentId> segmentIds
)
{
final SortedMap<Interval, TimelineObjectHolder<String, DataSegment>> timeline = new TreeMap<>(
Comparators.intervalsByStartThenEnd()
);
for (WindowedSegmentId windowedSegmentId : Preconditions.checkNotNull(segmentIds, "segmentIds")) {
final DataSegment segment = coordinatorClient.fetchUsedSegment(
dataSource,
windowedSegmentId.getSegmentId()
);
for (Interval interval : windowedSegmentId.getIntervals()) {
final TimelineObjectHolder<String, DataSegment> existingHolder = timeline.get(interval);
if (existingHolder != null) {
if (!existingHolder.getVersion().equals(segment.getVersion())) {
throw new ISE("Timeline segments with the same interval should have the same version: " +
"existing version[%s] vs new segment[%s]", existingHolder.getVersion(), segment);
}
existingHolder.getObject().add(segment.getShardSpec().createChunk(segment));
} else {
timeline.put(
interval,
new TimelineObjectHolder<>(
interval,
segment.getInterval(),
segment.getVersion(),
new PartitionHolder<>(segment.getShardSpec().createChunk(segment))
)
);
}
}
}
// Validate that none of the given windows overlaps (except for when multiple segments share exactly the
// same interval).
Interval lastInterval = null;
for (Interval interval : timeline.keySet()) {
if (lastInterval != null && interval.overlaps(lastInterval)) {
throw new IAE(
"Distinct intervals in input segments may not overlap: [%s] vs [%s]",
lastInterval,
interval
);
}
lastInterval = interval;
}
return new ArrayList<>(timeline.values());
}
private static long jitter(long input)
{
final double jitter = ThreadLocalRandom.current().nextGaussian() * input / 4.0;
long retval = input + (long) jitter;
return retval < 0 ? 0 : retval;
}
}
| gianm/druid | indexing-service/src/main/java/org/apache/druid/indexing/input/DruidInputSource.java | Java | apache-2.0 | 17,743 |
//
// immer: immutable data structures for C++
// Copyright (C) 2016, 2017, 2018 Juan Pedro Bolivar Puente
//
// This software is distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE or copy at http://boost.org/LICENSE_1_0.txt
//
#include "fuzzer_gc_guard.hpp"
#include "fuzzer_input.hpp"
#include <immer/heap/gc_heap.hpp>
#include <immer/refcount/no_refcount_policy.hpp>
#include <immer/set.hpp>
#include <immer/algorithm.hpp>
#include <array>
using gc_memory = immer::memory_policy<immer::heap_policy<immer::gc_heap>,
immer::no_refcount_policy,
immer::default_lock_policy,
immer::gc_transience_policy,
false>;
struct colliding_hash_t
{
std::size_t operator()(std::size_t x) const { return x & ~15; }
};
extern "C" int LLVMFuzzerTestOneInput(const std::uint8_t* data,
std::size_t size)
{
auto guard = fuzzer_gc_guard{};
constexpr auto var_count = 4;
using set_t =
immer::set<size_t, colliding_hash_t, std::equal_to<>, gc_memory>;
auto vars = std::array<set_t, var_count>{};
auto is_valid_var = [&](auto idx) { return idx >= 0 && idx < var_count; };
return fuzzer_input{data, size}.run([&](auto& in) {
enum ops
{
op_insert,
op_erase,
op_insert_move,
op_erase_move,
op_iterate
};
auto src = read<char>(in, is_valid_var);
auto dst = read<char>(in, is_valid_var);
switch (read<char>(in)) {
case op_insert: {
auto value = read<size_t>(in);
vars[dst] = vars[src].insert(value);
break;
}
case op_erase: {
auto value = read<size_t>(in);
vars[dst] = vars[src].erase(value);
break;
}
case op_insert_move: {
auto value = read<size_t>(in);
vars[dst] = std::move(vars[src]).insert(value);
break;
}
case op_erase_move: {
auto value = read<size_t>(in);
vars[dst] = std::move(vars[src]).erase(value);
break;
}
case op_iterate: {
auto srcv = vars[src];
immer::for_each(srcv, [&](auto&& v) {
vars[dst] = std::move(vars[dst]).insert(v);
});
break;
}
default:
break;
};
return true;
});
}
| arangodb/arangodb | 3rdParty/immer/v0.7.0/extra/fuzzer/set-gc.cpp | C++ | apache-2.0 | 2,591 |
#!/bin/sh
curl -fsLO https://raw.githubusercontent.com/scijava/scijava-scripts/master/travis-build.sh
bash travis-build.sh $encrypted_f76761764219_key $encrypted_f76761764219_iv &&
if [ ! -f release.properties ]
then
echo
echo '== No release.properties; running integration tests =='
# Not a release -- also perform integration tests.
mvn -Dinvoker.debug=true -Prun-its
fi
| heathn/nar-maven-plugin | .travis/build.sh | Shell | apache-2.0 | 382 |
package pod
import (
"fmt"
"strings"
lru "github.com/hashicorp/golang-lru"
"github.com/rancher/norman/api/access"
"github.com/rancher/norman/types"
"github.com/rancher/norman/types/values"
"github.com/rancher/rancher/pkg/controllers/managementagent/workload"
"github.com/rancher/rancher/pkg/ref"
schema "github.com/rancher/rancher/pkg/schemas/project.cattle.io/v3"
"github.com/sirupsen/logrus"
)
var (
ownerCache, _ = lru.New(100000)
)
type key struct {
SubContext string
Namespace string
Kind string
Name string
}
type value struct {
Kind string
Name string
}
func getOwnerWithKind(apiContext *types.APIContext, namespace, ownerKind, name string) (string, string, error) {
subContext := apiContext.SubContext["/v3/schemas/project"]
if subContext == "" {
subContext = apiContext.SubContext["/v3/schemas/cluster"]
}
if subContext == "" {
logrus.Warnf("failed to find subcontext to lookup replicaSet owner")
return "", "", nil
}
key := key{
SubContext: subContext,
Namespace: namespace,
Kind: strings.ToLower(ownerKind),
Name: name,
}
val, ok := ownerCache.Get(key)
if ok {
value, _ := val.(value)
return value.Kind, value.Name, nil
}
data := map[string]interface{}{}
if err := access.ByID(apiContext, &schema.Version, ownerKind, ref.FromStrings(namespace, name), &data); err != nil {
return "", "", err
}
kind, name := getOwner(data)
if !workload.WorkloadKinds[kind] {
kind = ""
name = ""
}
ownerCache.Add(key, value{
Kind: kind,
Name: name,
})
return kind, name, nil
}
func getOwner(data map[string]interface{}) (string, string) {
ownerReferences, ok := values.GetSlice(data, "ownerReferences")
if !ok {
return "", ""
}
for _, ownerReference := range ownerReferences {
controller, _ := ownerReference["controller"].(bool)
if !controller {
continue
}
kind, _ := ownerReference["kind"].(string)
name, _ := ownerReference["name"].(string)
return kind, name
}
return "", ""
}
func SaveOwner(apiContext *types.APIContext, kind, name string, data map[string]interface{}) {
parentKind, parentName := getOwner(data)
namespace, _ := data["namespaceId"].(string)
subContext := apiContext.SubContext["/v3/schemas/project"]
if subContext == "" {
subContext = apiContext.SubContext["/v3/schemas/cluster"]
}
if subContext == "" {
return
}
key := key{
SubContext: subContext,
Namespace: namespace,
Kind: strings.ToLower(kind),
Name: name,
}
ownerCache.Add(key, value{
Kind: parentKind,
Name: parentName,
})
}
func resolveWorkloadID(apiContext *types.APIContext, data map[string]interface{}) string {
kind, name := getOwner(data)
if kind == "" || !workload.WorkloadKinds[kind] {
return ""
}
namespace, _ := data["namespaceId"].(string)
if ownerKind := strings.ToLower(kind); ownerKind == workload.ReplicaSetType || ownerKind == workload.JobType {
k, n, err := getOwnerWithKind(apiContext, namespace, ownerKind, name)
if err != nil {
return ""
}
if k != "" {
kind, name = k, n
}
}
return strings.ToLower(fmt.Sprintf("%s:%s:%s", kind, namespace, name))
}
| rancher/rancher | pkg/api/norman/store/pod/owner.go | GO | apache-2.0 | 3,140 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (9-Debian) on Thu Sep 28 23:13:23 GMT 2017 -->
<title>Uses of Interface dollar.internal.runtime.script.api.HasKeyword (dollar-script 0.4.5195 API)</title>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<meta name="date" content="2017-09-28">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
<link rel="stylesheet" type="text/css" href="../../../../../../jquery/jquery-ui.css" title="Style">
<script type="text/javascript" src="../../../../../../script.js"></script>
<script type="text/javascript" src="../../../../../../jquery/jszip/dist/jszip.min.js"></script>
<script type="text/javascript" src="../../../../../../jquery/jszip-utils/dist/jszip-utils.min.js"></script>
<!--[if IE]>
<script type="text/javascript" src="../../../../../../jquery/jszip-utils/dist/jszip-utils-ie.min.js"></script>
<![endif]-->
<script type="text/javascript" src="../../../../../../jquery/jquery-1.10.2.js"></script>
<script type="text/javascript" src="../../../../../../jquery/jquery-ui.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Interface dollar.internal.runtime.script.api.HasKeyword (dollar-script 0.4.5195 API)";
}
}
catch(err) {
}
//-->
var pathtoroot = "../../../../../../";loadScripts(document, 'script');</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<div class="fixedNav">
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../dollar/internal/runtime/script/api/HasKeyword.html" title="interface in dollar.internal.runtime.script.api">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?dollar/internal/runtime/script/api/class-use/HasKeyword.html" target="_top">Frames</a></li>
<li><a href="HasKeyword.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<ul class="navListSearch">
<li><span>SEARCH: </span>
<input type="text" id="search" value=" " disabled="disabled">
<input type="reset" id="reset" value=" " disabled="disabled">
</li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
</div>
<div class="navPadding"> </div>
<script type="text/javascript"><!--
$('.navPadding').css('padding-top', $('.fixedNav').css("height"));
//-->
</script>
<div class="header">
<h2 title="Uses of Interface dollar.internal.runtime.script.api.HasKeyword" class="title">Uses of Interface<br>dollar.internal.runtime.script.api.HasKeyword</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../dollar/internal/runtime/script/api/HasKeyword.html" title="interface in dollar.internal.runtime.script.api">HasKeyword</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<th class="colFirst" scope="row"><a href="#dollar.internal.runtime.script.parser">dollar.internal.runtime.script.parser</a></th>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="dollar.internal.runtime.script.parser">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../dollar/internal/runtime/script/api/HasKeyword.html" title="interface in dollar.internal.runtime.script.api">HasKeyword</a> in <a href="../../../../../../dollar/internal/runtime/script/parser/package-summary.html">dollar.internal.runtime.script.parser</a></h3>
<table class="useSummary" summary="Use table, listing classes, and an explanation">
<caption><span>Classes in <a href="../../../../../../dollar/internal/runtime/script/parser/package-summary.html">dollar.internal.runtime.script.parser</a> that implement <a href="../../../../../../dollar/internal/runtime/script/api/HasKeyword.html" title="interface in dollar.internal.runtime.script.api">HasKeyword</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colSecond" scope="col">Class</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>class </code></td>
<th class="colSecond" scope="row"><code><span class="memberNameLink"><a href="../../../../../../dollar/internal/runtime/script/parser/KeywordDef.html" title="class in dollar.internal.runtime.script.parser">KeywordDef</a></span></code></th>
<td class="colLast"> </td>
</tr>
<tr class="rowColor">
<td class="colFirst"><code>class </code></td>
<th class="colSecond" scope="row"><code><span class="memberNameLink"><a href="../../../../../../dollar/internal/runtime/script/parser/Op.html" title="class in dollar.internal.runtime.script.parser">Op</a></span></code></th>
<td class="colLast"> </td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../dollar/internal/runtime/script/api/HasKeyword.html" title="interface in dollar.internal.runtime.script.api">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?dollar/internal/runtime/script/api/class-use/HasKeyword.html" target="_top">Frames</a></li>
<li><a href="HasKeyword.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2017. All rights reserved.</small></p>
</body>
</html>
| neilellis/dollar | docs/dev/dollar-script/apidocs/dollar/internal/runtime/script/api/class-use/HasKeyword.html | HTML | apache-2.0 | 8,370 |
import java.io.File;
import java.io.FilenameFilter;
class A {
{
new java.io.File("aaa").list(new FilenameFilter() {
public boolean accept(File dir, String name) {
<selection>return false; //To change body of implemented methods use File | Settings | File Templates.</selection>
}
});
}
} | joewalnes/idea-community | java/java-tests/testData/codeInsight/completion/style/AfterNew15-out.java | Java | apache-2.0 | 329 |
//
// YWUserTool.h
// YiWobao
//
// Created by 刘毕涛 on 16/5/6.
// Copyright © 2016年 浙江蚁窝投资管理有限公司. All rights reserved.
//
#import <Foundation/Foundation.h>
@class YWUser;
@interface YWUserTool : NSObject
+ (void)saveAccount:(YWUser *)account;
+ (YWUser *)account;
+ (void)quit;
@end
| liubitao/honey | knock Honey/tool(工具类)/config/YWUserTool.h | C | apache-2.0 | 325 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_23) on Fri Nov 23 14:03:50 GMT 2012 -->
<TITLE>
Uses of Class org.apache.nutch.crawl.CrawlDbMerger (apache-nutch 1.6 API)
</TITLE>
<META NAME="date" CONTENT="2012-11-23">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.nutch.crawl.CrawlDbMerger (apache-nutch 1.6 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/nutch/crawl/CrawlDbMerger.html" title="class in org.apache.nutch.crawl"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/nutch/crawl//class-useCrawlDbMerger.html" target="_top"><B>FRAMES</B></A>
<A HREF="CrawlDbMerger.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.nutch.crawl.CrawlDbMerger</B></H2>
</CENTER>
No usage of org.apache.nutch.crawl.CrawlDbMerger
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../org/apache/nutch/crawl/CrawlDbMerger.html" title="class in org.apache.nutch.crawl"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../index.html?org/apache/nutch/crawl//class-useCrawlDbMerger.html" target="_top"><B>FRAMES</B></A>
<A HREF="CrawlDbMerger.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2012 The Apache Software Foundation
</BODY>
</HTML>
| yahoo/anthelion | docs/api/org/apache/nutch/crawl/class-use/CrawlDbMerger.html | HTML | apache-2.0 | 5,982 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor.interceptor;
import org.apache.camel.CamelExecutionException;
import org.apache.camel.ContextTestSupport;
import org.apache.camel.builder.AdviceWithRouteBuilder;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.model.RouteDefinition;
import org.junit.jupiter.api.Test;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.fail;
public class AdviceWithLambdaTest extends ContextTestSupport {
@Test
public void testNoAdvised() throws Exception {
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testAdvised() throws Exception {
AdviceWithRouteBuilder.adviceWith(context, null, a -> {
a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
// END SNIPPET: e1
@Test
public void testAdvisedNoLog() throws Exception {
AdviceWithRouteBuilder.adviceWith(context, null, false, a -> {
a.weaveByToUri("mock:result").remove();
a.weaveAddLast().transform().constant("Bye World");
});
getMockEndpoint("mock:foo").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(0);
Object out = template.requestBody("direct:start", "Hello World");
assertEquals("Bye World", out);
assertMockEndpointsSatisfied();
}
@Test
public void testAdvisedNoNewRoutesAllowed() throws Exception {
try {
AdviceWithRouteBuilder.adviceWith(context, 0, a -> {
a.from("direct:bar").to("mock:bar");
a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
});
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
// expected
}
}
@Test
public void testAdvisedThrowException() throws Exception {
AdviceWithRouteBuilder.adviceWith(context, "myRoute", a -> {
a.interceptSendToEndpoint("mock:foo").to("mock:advised").throwException(new IllegalArgumentException("Damn"));
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(0);
try {
template.sendBody("direct:start", "Hello World");
fail("Should have thrown exception");
} catch (CamelExecutionException e) {
assertIsInstanceOf(IllegalArgumentException.class, e.getCause());
assertEquals("Damn", e.getCause().getMessage());
}
assertMockEndpointsSatisfied();
}
@Test
public void testAdvisedRouteDefinition() throws Exception {
AdviceWithRouteBuilder.adviceWith(context, context.getRouteDefinitions().get(0), a -> {
a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
});
getMockEndpoint("mock:foo").expectedMessageCount(0);
getMockEndpoint("mock:advised").expectedMessageCount(1);
getMockEndpoint("mock:result").expectedMessageCount(1);
template.sendBody("direct:start", "Hello World");
assertMockEndpointsSatisfied();
}
@Test
public void testAdvisedEmptyRouteDefinition() throws Exception {
try {
AdviceWithRouteBuilder.adviceWith(context, new RouteDefinition(), a -> {
a.interceptSendToEndpoint("mock:foo").skipSendToOriginalEndpoint().to("log:foo").to("mock:advised");
});
fail("Should throw exception");
} catch (IllegalArgumentException e) {
// expected
}
}
@Override
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
@Override
public void configure() throws Exception {
from("direct:start").id("myRoute").to("mock:foo").to("mock:result");
}
};
}
}
| adessaigne/camel | core/camel-core/src/test/java/org/apache/camel/processor/interceptor/AdviceWithLambdaTest.java | Java | apache-2.0 | 5,390 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (1.8.0_121) on Sun Jul 02 12:07:17 PDT 2017 -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Uses of Class org.apache.guacamole.net.event.AuthenticationSuccessEvent (guacamole-ext 0.9.13-incubating API)</title>
<meta name="date" content="2017-07-02">
<link rel="stylesheet" type="text/css" href="../../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../../script.js"></script>
</head>
<body>
<script type="text/javascript"><!--
try {
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.guacamole.net.event.AuthenticationSuccessEvent (guacamole-ext 0.9.13-incubating API)";
}
}
catch(err) {
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar.top">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.top" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.top.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/apache/guacamole/net/event/AuthenticationSuccessEvent.html" title="class in org.apache.guacamole.net.event">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/apache/guacamole/net/event/class-use/AuthenticationSuccessEvent.html" target="_top">Frames</a></li>
<li><a href="AuthenticationSuccessEvent.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class org.apache.guacamole.net.event.AuthenticationSuccessEvent" class="title">Uses of Class<br>org.apache.guacamole.net.event.AuthenticationSuccessEvent</h2>
</div>
<div class="classUseContainer">
<ul class="blockList">
<li class="blockList">
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing packages, and an explanation">
<caption><span>Packages that use <a href="../../../../../../org/apache/guacamole/net/event/AuthenticationSuccessEvent.html" title="class in org.apache.guacamole.net.event">AuthenticationSuccessEvent</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Package</th>
<th class="colLast" scope="col">Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><a href="#org.apache.guacamole.net.event.listener">org.apache.guacamole.net.event.listener</a></td>
<td class="colLast">
<div class="block">Provides classes for hooking into various events that take place as
users log into and use the Guacamole web application.</div>
</td>
</tr>
</tbody>
</table>
</li>
<li class="blockList">
<ul class="blockList">
<li class="blockList"><a name="org.apache.guacamole.net.event.listener">
<!-- -->
</a>
<h3>Uses of <a href="../../../../../../org/apache/guacamole/net/event/AuthenticationSuccessEvent.html" title="class in org.apache.guacamole.net.event">AuthenticationSuccessEvent</a> in <a href="../../../../../../org/apache/guacamole/net/event/listener/package-summary.html">org.apache.guacamole.net.event.listener</a></h3>
<table class="useSummary" border="0" cellpadding="3" cellspacing="0" summary="Use table, listing methods, and an explanation">
<caption><span>Methods in <a href="../../../../../../org/apache/guacamole/net/event/listener/package-summary.html">org.apache.guacamole.net.event.listener</a> with parameters of type <a href="../../../../../../org/apache/guacamole/net/event/AuthenticationSuccessEvent.html" title="class in org.apache.guacamole.net.event">AuthenticationSuccessEvent</a></span><span class="tabEnd"> </span></caption>
<tr>
<th class="colFirst" scope="col">Modifier and Type</th>
<th class="colLast" scope="col">Method and Description</th>
</tr>
<tbody>
<tr class="altColor">
<td class="colFirst"><code>boolean</code></td>
<td class="colLast"><span class="typeNameLabel">AuthenticationSuccessListener.</span><code><span class="memberNameLink"><a href="../../../../../../org/apache/guacamole/net/event/listener/AuthenticationSuccessListener.html#authenticationSucceeded-org.apache.guacamole.net.event.AuthenticationSuccessEvent-">authenticationSucceeded</a></span>(<a href="../../../../../../org/apache/guacamole/net/event/AuthenticationSuccessEvent.html" title="class in org.apache.guacamole.net.event">AuthenticationSuccessEvent</a> e)</code>
<div class="block">Event hook which fires immediately after a user's authentication attempt
succeeds.</div>
</td>
</tr>
</tbody>
</table>
</li>
</ul>
</li>
</ul>
</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar.bottom">
<!-- -->
</a>
<div class="skipNav"><a href="#skip.navbar.bottom" title="Skip navigation links">Skip navigation links</a></div>
<a name="navbar.bottom.firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../../org/apache/guacamole/net/event/AuthenticationSuccessEvent.html" title="class in org.apache.guacamole.net.event">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../../index.html?org/apache/guacamole/net/event/class-use/AuthenticationSuccessEvent.html" target="_top">Frames</a></li>
<li><a href="AuthenticationSuccessEvent.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip.navbar.bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2017. All rights reserved.</small></p>
<!-- Google Analytics -->
<script type="text/javascript">
(function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
(i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
m=s.getElementsByTagName(o)[0];a.async=1;a.src=g;m.parentNode.insertBefore(a,m)
})(window,document,'script','//www.google-analytics.com/analytics.js','ga');
ga('create', 'UA-75289145-1', 'auto');
ga('send', 'pageview');
</script>
</body>
</html>
| mike-jumper/incubator-guacamole-website | doc/0.9.13-incubating/guacamole-ext/org/apache/guacamole/net/event/class-use/AuthenticationSuccessEvent.html | HTML | apache-2.0 | 8,097 |
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.launcher3;
import android.content.ContentValues;
import android.content.Context;
import com.android.launcher3.compat.UserHandleCompat;
import java.util.ArrayList;
/**
* Represents a folder containing shortcuts or apps.
*/
public class FolderInfo extends ItemInfo {
public static final int NO_FLAGS = 0x00000000;
/**
* The folder is locked in sorted mode
*/
public static final int FLAG_ITEMS_SORTED = 0x00000001;
/**
* It is a work folder
*/
public static final int FLAG_WORK_FOLDER = 0x00000002;
/**
* The multi-page animation has run for this folder
*/
public static final int FLAG_MULTI_PAGE_ANIMATION = 0x00000004;
/**
* Whether this folder has been opened
*/
public boolean opened;
public int options;
/**
* The apps and shortcuts
*/
public ArrayList<ShortcutInfo> contents = new ArrayList<ShortcutInfo>();
ArrayList<FolderListener> listeners = new ArrayList<FolderListener>();
public FolderInfo() {
itemType = LauncherSettings.Favorites.ITEM_TYPE_FOLDER;
user = UserHandleCompat.myUserHandle();
}
/**
* Add an app or shortcut
*
* @param item
*/
public void add(ShortcutInfo item, boolean animate) {
contents.add(item);
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onAdd(item);
}
itemsChanged(animate);
}
/**
* Remove an app or shortcut. Does not change the DB.
*
* @param item
*/
public void remove(ShortcutInfo item, boolean animate) {
contents.remove(item);
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onRemove(item);
}
itemsChanged(animate);
}
public void setTitle(CharSequence title) {
this.title = title;
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onTitleChanged(title);
}
}
@Override
void onAddToDatabase(Context context, ContentValues values) {
super.onAddToDatabase(context, values);
values.put(LauncherSettings.Favorites.TITLE, title.toString());
values.put(LauncherSettings.Favorites.OPTIONS, options);
}
public void addListener(FolderListener listener) {
listeners.add(listener);
}
public void removeListener(FolderListener listener) {
listeners.remove(listener);
}
public void itemsChanged(boolean animate) {
for (int i = 0; i < listeners.size(); i++) {
listeners.get(i).onItemsChanged(animate);
}
}
public interface FolderListener {
public void onAdd(ShortcutInfo item);
public void onRemove(ShortcutInfo item);
public void onTitleChanged(CharSequence title);
public void onItemsChanged(boolean animate);
}
public boolean hasOption(int optionFlag) {
return (options & optionFlag) != 0;
}
/**
* @param option flag to set or clear
* @param isEnabled whether to set or clear the flag
* @param context if not null, save changes to the db.
*/
public void setOption(int option, boolean isEnabled, Context context) {
int oldOptions = options;
if (isEnabled) {
options |= option;
} else {
options &= ~option;
}
if (context != null && oldOptions != options) {
LauncherModel.updateItemInDatabase(context, this);
}
}
}
| YAJATapps/FlickLauncher | src/com/android/launcher3/FolderInfo.java | Java | apache-2.0 | 4,146 |
/*
* Copyright 2014 BrightTag, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.brighttag.agathon.dao;
import javax.annotation.Nullable;
import com.google.common.collect.ImmutableSet;
import com.brighttag.agathon.model.CassandraInstance;
/**
* DAO for Cassandra Instances.
*
* @author codyaray
* @since 5/12/2012
*/
public interface CassandraInstanceDao {
/**
* Returns the set of Cassandra instances in a ring.
*
* @param ring name of the Cassandra ring
* @return set of Cassandra instances in the ring
* @throws BackingStoreException if there was a problem communicating with the backing store.
*/
ImmutableSet<CassandraInstance> findAll(String ring) throws BackingStoreException;
/**
* Returns the Cassandra instance with the given {@code id} or {@code null} if not found.
*
* @param ring name of the Cassandra ring
* @param id the Cassandra instance ID
* @return the Cassandra instance or {@code null} if not found
* @throws BackingStoreException if there was a problem communicating with the backing store.
*/
@Nullable CassandraInstance findById(String ring, int id) throws BackingStoreException;
/**
* Saves the Cassandra {@code instance}.
*
* @param ring name of the Cassandra ring
* @param instance the Cassandra instance
*/
void save(String ring, CassandraInstance instance);
/**
* Deletes the Cassandra {@code instance}.
*
* @param ring name of the Cassandra ring
* @param instance the Cassandra instance
*/
void delete(String ring, CassandraInstance instance);
}
| BrightTag/agathon | agathon-manager/src/main/java/com/brighttag/agathon/dao/CassandraInstanceDao.java | Java | apache-2.0 | 2,133 |
/*
* MainActivity.java
*
* Copyright (C) 2013 6 Wunderkinder GmbH.
*
* @author Jose L Ugia - @Jl_Ugia
* @author Antonio Consuegra - @aconsuegra
* @author Cesar Valiente - @CesarValiente
* @author Benedikt Lehnert - @blehnert
* @author Timothy Achumba - @iam_timm
* @version 1.0
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.wunderlist.slidinglayersample;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.content.SharedPreferences;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.view.KeyEvent;
import android.view.MenuItem;
import android.view.View;
import android.widget.RelativeLayout.LayoutParams;
import android.widget.TextView;
import com.wunderlist.slidinglayer.LayerTransformer;
import com.wunderlist.slidinglayer.SlidingLayer;
import com.wunderlist.slidinglayer.transformer.AlphaTransformer;
import com.wunderlist.slidinglayer.transformer.RotationTransformer;
import com.wunderlist.slidinglayer.transformer.SlideJoyTransformer;
public class MainActivity extends Activity {
private SlidingLayer mSlidingLayer;
private TextView swipeText;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
bindViews();
initState();
}
@SuppressLint("NewApi")
@Override
protected void onResume() {
super.onResume();
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
getActionBar().setDisplayHomeAsUpEnabled(true);
}
}
/**
* View binding
*/
private void bindViews() {
mSlidingLayer = (SlidingLayer) findViewById(R.id.slidingLayer1);
swipeText = (TextView) findViewById(R.id.swipeText);
}
/**
* Initializes the origin state of the layer
*/
private void initState() {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
setupSlidingLayerPosition(prefs.getString("layer_location", "right"));
setupSlidingLayerTransform(prefs.getString("layer_transform", "none"));
setupShadow(prefs.getBoolean("layer_has_shadow", false));
setupLayerOffset(prefs.getBoolean("layer_has_offset", false));
setupPreviewMode(prefs.getBoolean("preview_mode_enabled", false));
}
private void setupSlidingLayerPosition(String layerPosition) {
LayoutParams rlp = (LayoutParams) mSlidingLayer.getLayoutParams();
int textResource;
Drawable d;
switch (layerPosition) {
case "right":
textResource = R.string.swipe_right_label;
d = getResources().getDrawable(R.drawable.container_rocket_right);
mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_RIGHT);
break;
case "left":
textResource = R.string.swipe_left_label;
d = getResources().getDrawable(R.drawable.container_rocket_left);
mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_LEFT);
break;
case "top":
textResource = R.string.swipe_up_label;
d = getResources().getDrawable(R.drawable.container_rocket);
mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_TOP);
rlp.width = LayoutParams.MATCH_PARENT;
rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size);
break;
default:
textResource = R.string.swipe_down_label;
d = getResources().getDrawable(R.drawable.container_rocket);
mSlidingLayer.setStickTo(SlidingLayer.STICK_TO_BOTTOM);
rlp.width = LayoutParams.MATCH_PARENT;
rlp.height = getResources().getDimensionPixelSize(R.dimen.layer_size);
}
d.setBounds(0, 0, d.getIntrinsicWidth(), d.getIntrinsicHeight());
swipeText.setCompoundDrawables(null, d, null, null);
swipeText.setText(getResources().getString(textResource));
mSlidingLayer.setLayoutParams(rlp);
}
private void setupSlidingLayerTransform(String layerTransform) {
LayerTransformer transformer;
switch (layerTransform) {
case "alpha":
transformer = new AlphaTransformer();
break;
case "rotation":
transformer = new RotationTransformer();
break;
case "slide":
transformer = new SlideJoyTransformer();
break;
default:
return;
}
mSlidingLayer.setLayerTransformer(transformer);
}
private void setupShadow(boolean enabled) {
if (enabled) {
mSlidingLayer.setShadowSizeRes(R.dimen.shadow_size);
mSlidingLayer.setShadowDrawable(R.drawable.sidebar_shadow);
} else {
mSlidingLayer.setShadowSize(0);
mSlidingLayer.setShadowDrawable(null);
}
}
private void setupLayerOffset(boolean enabled) {
int offsetDistance = enabled ? getResources().getDimensionPixelOffset(R.dimen.offset_distance) : 0;
mSlidingLayer.setOffsetDistance(offsetDistance);
}
private void setupPreviewMode(boolean enabled) {
int previewOffset = enabled ? getResources().getDimensionPixelOffset(R.dimen.preview_offset_distance) : -1;
mSlidingLayer.setPreviewOffsetDistance(previewOffset);
}
public void buttonClicked(View v) {
switch (v.getId()) {
case R.id.buttonOpen:
mSlidingLayer.openLayer(true);
break;
case R.id.buttonClose:
mSlidingLayer.closeLayer(true);
break;
}
}
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_BACK:
if (mSlidingLayer.isOpened()) {
mSlidingLayer.closeLayer(true);
return true;
}
default:
return super.onKeyDown(keyCode, event);
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
finish();
return true;
}
}
| yadihaoku/android-sliding-layer-lib | SlidingLayerSample/src/main/java/com/wunderlist/slidinglayersample/MainActivity.java | Java | apache-2.0 | 6,768 |
\begin{ManPage}{\label{man-condor-cold-start}\Condor{cold\_start}}{1}
{install and start Condor on this machine}
\Synopsis \SynProg{\Condor{cold\_start}}
\Opt{-help}
\SynProg{\Condor{cold\_start}}
\oOptArg{-basedir}{directory}
\oOpt{-force}
%\oOpt{-dyn}
\oOpt{\Opt{-setuponly} \Bar \Opt{-runonly}}
\oOptArg{-arch}{architecture}
\oOptArg{-site}{repository}
\oOptArg{-localdir}{directory}
\oOptArg{-runlocalconfig}{file}
\oOptArg{-logarchive}{archive}
\oOptArg{-spoolarchive}{archive}
\oOptArg{-execarchive}{archive}
\oOpt{-filelock}
\oOpt{-pid}
\oOptArg{-artifact}{filename}
\oOpt{-wget}
\oOptArg{-globuslocation}{directory}
\OptArg{-configfile}{file}
\index{Condor commands!condor\_cold\_start}
\index{Deployment commands!condor\_cold\_start}
\index{condor\_cold\_start}
\Description
\Condor{cold\_start} installs and starts Condor on this machine,
setting up or using a predefined configuration.
In addition, it has the functionality
to determine the local architecture if one is not specified.
Additionally, this program can install pre-made \File{log},
\File{execute}, and/or
\File{spool} directories by specifying the archived versions.
\begin{Options}
\OptItem{\OptArg{-arch}{architecturestr}}{
Use the given \Arg{architecturestr} to fetch the installation
package. The string is in the format:
\Sinful{condor\_version}-\Sinful{machine\_arch}-\Sinful{os\_name}-\Sinful{os\_version}
(for example 6.6.7-i686-Linux-2.4).
The portion of this string
\Sinful{condor\_version}
may be replaced with the string "latest"
(for example, latest-i686-Linux-2.4)
to substitute the most recent version of Condor.
}
\OptItem{\OptArg{-artifact}{filename}}{
Use \Arg{filename} for name of the artifact file used to
determine whether the \Condor{master} daemon is still alive.
}
\OptItem{\OptArg{-basedir}{directory}}{
The directory to install or find the Condor executables and
libraries. When not specified, the current working directory
is assumed.
}
% \OptItem{\Opt{-dyn}}{
% Use dynamic names for the log, spool, and execute directories, as
% well as the binding configuration file. This option can be used
% to run multiple instances of condor in the same local directory.
% This option cannot be used with \Opt{-*archive} options. The
% dynamic names are created by appending the IP address and process
% id of the master to the file names.
% }
\OptItem{\OptArg{-execarchive}{archive}}{
Create the Condor \File{execute} directory from the given
\Arg{archive} file.
}
\OptItem{\Opt{-filelock}}{
Specifies that this program should use a POSIX file lock midwife
program to create an artifact of the birth of a \Condor{master} daemon.
A file lock undertaker can later be used to determine whether the
\Condor{master} daemon has exited.
This is the preferred option when the user wants
to check the status of the \Condor{master} daemon from another machine that
shares a distributed file system that supports POSIX file locking,
for example, AFS.
}
\OptItem{\Opt{-force}}{
Overwrite previously installed files, if necessary.
}
\OptItem{\OptArg{-globuslocation}{directory}}{
The location of the globus installation on this machine.
When not specified \File{/opt/globus} is the directory used.
This option is only necessary when other options of the
form \Opt{-*archive} are specified.
}
\OptItem{\Opt{-help}}{
Display brief usage information and exit.
}
\OptItem{\OptArg{-localdir}{directory}}{
The directory where the Condor \File{log}, \File{spool},
and \File{execute} directories
will be installed. Each running instance of Condor must have its
own local directory.
% or the dynamic naming option must be enabled.
}
\OptItem{\OptArg{-logarchive}{archive}}{
Create the Condor log directory from the given \Arg{archive} file.
}
\OptItem{\Opt{-pid}}{
This program is to use a unique process id midwife
program to create an artifact of the birth of a \Condor{master} daemon.
A unique pid undertaker can later be used to determine whether the
\Condor{master} daemon has exited.
This is the default option and the preferred method
to check the status of the \Condor{master} daemon from
the same machine it was started on.
}
\OptItem{\OptArg{-runlocalconfig}{file}}{
A special local configuration file bound into the Condor
configuration at runtime. This file only affects the instance
of Condor started by this command. No other Condor instance
sharing the same global configuration file will be affected.
}
\OptItem{\Opt{-runonly}}{
Run Condor from the specified installation directory without
installing it. It is possible to run several instantiations of
Condor from a single installation.
}
\OptItem{\Opt{-setuponly}}{
Install Condor without running it.
}
\OptItem{\OptArg{-site}{repository}}{
The ftp, http, gsiftp, or mounted file system directory where the
installation packages can be found (for example,
\File{www.cs.example.edu/packages/coldstart}).
}
\OptItem{\OptArg{-spoolarchive}{archive}}{
Create the Condor spool directory from the given \Arg{archive} file.
}
\OptItem{\Opt{-wget}}{
Use \Prog{wget} to fetch the \File{log}, \File{spool},
and \File{execute} directories,
if other options of the form \Opt{-*archive} are specified.
\Prog{wget} must be installed on the machine and in the user's path.
}
\OptItem{\OptArg{-configfile}{file}}{
A required option to specify the Condor configuration file to use for this
installation. This file can be located on an http, ftp, or gsiftp
site, or alternatively on a mounted file system.
}
\end{Options}
\ExitStatus
\Condor{cold\_start} will exit with a status value of 0 (zero) upon
success, and non-zero otherwise.
\Examples
To start a Condor installation on the current machine, using
\texttt{http://www.example.com/Condor/deployment} as the installation
site:
\footnotesize
\begin{verbatim}
% condor_cold_start \
-configfile http://www.example.com/Condor/deployment/condor_config.mobile \
-site http://www.example.com/Condor/deployment
\end{verbatim}
\normalsize
Optionally if this instance of Condor requires a local configuration
file \File{condor\_config.local}:
\footnotesize
\begin{verbatim}
% condor_cold_start \
-configfile http://www.example.com/Condor/deployment/condor_config.mobile \
-site http://www.example.com/Condor/deployment \
-runlocalconfig condor_config.local
\end{verbatim}
\normalsize
\SeeAlso
\Condor{cold\_stop} (on page~\pageref{man-condor-cold-stop}),
\Prog{filelock\_midwife} (on page~\pageref{man-filelock-midwife}),
\Prog{uniq\_pid\_midwife} (on page~\pageref{man-uniq-pid-midwife}).
\end{ManPage}
| bbockelm/condor-network-accounting | doc/man-pages/condor_cold_start.tex | TeX | apache-2.0 | 6,853 |
---
title: 'Webapp idea: #nowplaying radio'
category: Projects
tags:
- twitter
- programming
- Ruby
- Ruby on Rails
---
An oppurtunity to learn Ruby on Rails. Use a Twitter library to fetch tweets with the hashtag #nowplaying.
Present the user with an interface with exactly one button: Play
Parse Twitters results and fetch songs from youtube (embedded player, ajax/iframe)
User clicks play, the #nowplaying radio begins.
Ideas for later: add features such as result narrowing, sharing etc.
# Update:
I actually coded this during the day and got some insight into Rails. The coding part was actually quite easy, taking into account that I'd never met Ruby before, but the real obstacle was deployment.
Heroku is awesome, really, but the beginner - I - failed miserably with dependencies (twitter and youtube gems). Long story short, 2 hours of messing with git, Gemfile and bundle and I gave up. The app works, but only in localhost. Maybe I'll get help later.
 | anroots/sqroot.eu | _posts/2011/2011-02-14-webapp-idea-nowplaying-radio.markdown | Markdown | apache-2.0 | 1,064 |
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
/**
* @class
* Initializes a new instance of the USqlExternalDataSourceList class.
* @constructor
* A Data Lake Analytics catalog U-SQL external datasource item list.
*
*/
class USqlExternalDataSourceList extends Array {
constructor() {
super();
}
/**
* Defines the metadata of USqlExternalDataSourceList
*
* @returns {object} metadata of USqlExternalDataSourceList
*
*/
mapper() {
return {
required: false,
serializedName: 'USqlExternalDataSourceList',
type: {
name: 'Composite',
className: 'USqlExternalDataSourceList',
modelProperties: {
nextLink: {
required: false,
serializedName: 'nextLink',
type: {
name: 'String'
}
},
value: {
required: false,
readOnly: true,
serializedName: '',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'USqlExternalDataSourceElementType',
type: {
name: 'Composite',
className: 'USqlExternalDataSource'
}
}
}
}
}
}
};
}
}
module.exports = USqlExternalDataSourceList;
| AuxMon/azure-sdk-for-node | lib/services/dataLake.Analytics/lib/catalog/models/uSqlExternalDataSourceList.js | JavaScript | apache-2.0 | 1,673 |
{base},
Standard as (
SELECT distinct SOURCE_CODE, TARGET_CONCEPT_ID, TARGET_DOMAIN_ID, SOURCE_VALID_START_DATE, SOURCE_VALID_END_DATE
FROM Source_to_Standard
WHERE lower(SOURCE_VOCABULARY_ID) IN ('jnj_tru_p_spclty')
AND (TARGET_STANDARD_CONCEPT IS NOT NULL or TARGET_STANDARD_CONCEPT != '')
AND (TARGET_INVALID_REASON IS NULL or TARGET_INVALID_REASON = '')
)
select distinct Standard.*
from Standard | OHDSI/ETL-CDMBuilder | source/org.ohdsi.cdm.framework.etl/org.ohdsi.cdm.framework.etl.ibm/ETL/IBM/Lookups/Specialty.sql | SQL | apache-2.0 | 404 |
/**
* @license
* Copyright 2015 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*/
CLASS({
name: 'LogEntryView',
package: 'foam.flow',
extends: 'foam.flow.Element',
constants: { ELEMENT_NAME: 'log-entry' },
properties: [
{
name: 'data',
// type: 'foam.flow.LogEntry'
}
],
templates: [
function toInnerHTML() {/*
<num>{{this.data.id}}</num><{{{this.data.mode}}}>{{this.data.contents}}</{{{this.data.mode}}}>
*/},
function CSS() {/*
log-entry {
display: flex;
}
log-entry > num {
min-width: 35px;
max-width: 35px;
display: inline-block;
text-align: right;
padding-right: 13px;
font-weight: bold;
-webkit-touch-callout: none;
-webkit-user-select: none;
-khtml-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
background: #E0E0E0;
}
log-entry > log, log-entry > warn, log-entry > error {
padding-left: 4px;
white-space: pre-wrap;
}
log-entry > log {
color: #333;
}
log-entry > warn {
color: #CC9900;
}
log-entry > error {
color: #C00;
}
*/}
]
});
| jlhughes/foam | js/foam/flow/LogEntryView.js | JavaScript | apache-2.0 | 1,487 |
(function() {
function LandingCtrl() {
this.heroTitle = "Turn the Music Up!";
}
angular
.module('blocJams')
.controller('LandingCtrl', LandingCtrl);
})();
| ganaraja/bloc-jams-angular | dist/scripts/controllers/LandingCtrl.js | JavaScript | apache-2.0 | 195 |
<include file="Index/header" />
<style type="text/css">
</style>
<!-- 右边内容 START -->
<div class="col-xs-12 col-sm-12 col-md-12 col-lg-12 " style="padding:20px">
<if condition="$userinfo['wechat_card_num'] - $thisUser['wechat_card_num'] gt 0 " >
<a class="btn btn-primary" onclick="location.href='{gr-:U('Index/add')}';">添加微信公众号</a>
<span class="text-info" >您还可以创建{gr-$userinfo['wechat_card_num'] - $thisUser['wechat_card_num']}个微信公众号</span>
<else />
</if>
<!-- 公众号列表 START-->
<div class=" " >
<table class="table table-condensed table-bordered table-striped" border="0" cellSpacing="0" cellPadding="0" width="100%">
<thead>
<tr>
<th>公众号名称</th>
<th style="text-align:center">用户组</th>
<th>到期时间</th>
<!-- <th>已定义/上限</th> -->
<!-- <th>请求数</th> -->
<th>操作</th>
</tr>
</thead>
<tbody>
<tr></tr>
<volist name="info" id="vo">
<tr>
<td><p><a href="{gr-:U('Function/index',array('id'=>$vo['id'],'token'=>$vo['token']))}" title="点击进入功能管理"><img src="{gr-$vo.headerpic}" width="40" height="40"></a></p><p>{gr-$vo.wxname}</p></td>
<td align="center">{gr-$thisGroup.name}</td>
<td>{gr-$viptime|date="Y-m-d",###} <!-- <a href="###" onclick="alert('请联系我们,电话0575-89974522')" id="smemberss" class="btn btn-flat btn-link btn-sm text-info"><em>如何续费</em></a> --></td>
<td class="norightborder">
<a class="btn btn-lg btn-primary" href="{gr-:U('Function/index',array('id'=>$vo['id'],'token'=>$vo['token']))}" class="btn btn-primary" >进入管理</a>
<a target="_blank" href="{gr-:U('Home/Index/bind',array('token'=>$vo['token'],'encodingaeskey'=>$vo['encodingaeskey']))}" class="btn btn-primary btn-sm" >绑定公众号</a>
<a class="btn btn-warning btn-sm" href="{gr-:U('Index/edit',array('id'=>$vo['id']))}"><i class="mdi-editor-mode-edit"></i>编辑</a>
<a href="javascript:drop_confirm('您确定要删除吗?', '{gr-:U('Index/del',array('id'=>$vo['id']))}');" class="btn btn-danger btn-sm"><i class="mdi-action-delete"></i>删除</a>
</td>
</tr>
</volist>
</tbody>
</table>
</div>
<!-- 公众号列表 END-->
<!-- 分页 START -->
<div class="pageNavigator right">
<div class="pages"></div>
</div>
<!-- 分页 END-->
</div>
<include file="Public/footer"/> | h136799711/2015weitonghui | Application/User/View/gooraye/Index/index.html | HTML | apache-2.0 | 2,796 |
#!/bin/bash
# This script runs in a loop (configurable with LOOP), checks for updates to the
# Hugo docs theme or to the docs on certain branches and rebuilds the public
# folder for them. It has be made more generalized, so that we don't have to
# hardcode versions.
# Warning - Changes should not be made on the server on which this script is running
# becauses this script does git checkout and merge.
set -e
GREEN='\033[32;1m'
RESET='\033[0m'
HOST="${HOST:-https://dgraph.io/docs/badger}"
# Name of output public directory
PUBLIC="${PUBLIC:-public}"
# LOOP true makes this script run in a loop to check for updates
LOOP="${LOOP:-true}"
# Binary of hugo command to run.
HUGO="${HUGO:-hugo}"
# TODO - Maybe get list of released versions from Github API and filter
# those which have docs.
# Place the latest version at the beginning so that version selector can
# append '(latest)' to the version string, followed by the master version,
# and then the older versions in descending order, such that the
# build script can place the artifact in an appropriate location.
VERSIONS_ARRAY=(
'master'
)
joinVersions() {
versions=$(printf ",%s" "${VERSIONS_ARRAY[@]}")
echo "${versions:1}"
}
function version { echo "$@" | gawk -F. '{ printf("%03d%03d%03d\n", $1,$2,$3); }'; }
rebuild() {
echo -e "$(date) $GREEN Updating docs for branch: $1.$RESET"
# The latest documentation is generated in the root of /public dir
# Older documentations are generated in their respective `/public/vx.x.x` dirs
dir=''
if [[ $2 != "${VERSIONS_ARRAY[0]}" ]]; then
dir=$2
fi
VERSION_STRING=$(joinVersions)
# In Unix environments, env variables should also be exported to be seen by Hugo
export CURRENT_BRANCH=${1}
export CURRENT_VERSION=${2}
export VERSIONS=${VERSION_STRING}
HUGO_TITLE="Badger Doc ${2}"\
VERSIONS=${VERSION_STRING}\
CURRENT_BRANCH=${1}\
CURRENT_VERSION=${2} ${HUGO} \
--destination="${PUBLIC}"/"$dir"\
--baseURL="$HOST"/"$dir" 1> /dev/null
}
branchUpdated()
{
local branch="$1"
git checkout -q "$1"
UPSTREAM=$(git rev-parse "@{u}")
LOCAL=$(git rev-parse "@")
if [ "$LOCAL" != "$UPSTREAM" ] ; then
git merge -q origin/"$branch"
return 0
else
return 1
fi
}
publicFolder()
{
dir=''
if [[ $1 == "${VERSIONS_ARRAY[0]}" ]]; then
echo "${PUBLIC}"
else
echo "${PUBLIC}/$1"
fi
}
checkAndUpdate()
{
local version="$1"
local branch=""
if [[ $version == "master" ]]; then
branch="master"
else
branch="release/$version"
fi
if branchUpdated "$branch" ; then
git merge -q origin/"$branch"
rebuild "$branch" "$version"
fi
folder=$(publicFolder "$version")
if [ "$firstRun" = 1 ] || [ "$themeUpdated" = 0 ] || [ ! -d "$folder" ] ; then
rebuild "$branch" "$version"
fi
}
firstRun=1
while true; do
# Lets move to the docs directory.
pushd "$(dirname "$0")/.." > /dev/null
currentBranch=$(git rev-parse --abbrev-ref HEAD)
# Lets check if the theme was updated.
pushd themes/hugo-docs > /dev/null
git remote update > /dev/null
themeUpdated=1
if branchUpdated "master" ; then
echo -e "$(date) $GREEN Theme has been updated. Now will update the docs.$RESET"
themeUpdated=0
fi
popd > /dev/null
# Now lets check the theme.
echo -e "$(date) Starting to check branches."
git remote update > /dev/null
for version in "${VERSIONS_ARRAY[@]}"
do
checkAndUpdate "$version"
done
echo -e "$(date) Done checking branches.\n"
git checkout -q "$currentBranch"
popd > /dev/null
firstRun=0
if ! $LOOP; then
exit
fi
sleep 60
done | dgraph-io/badger | docs/scripts/build.sh | Shell | apache-2.0 | 3,536 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/**
* ClientInfo.java
*
* This file was auto-generated from WSDL
* by the Apache Axis2 version: #axisVersion# #today#
*/
package org.apache.axis2.databinding;
import org.apache.axiom.om.OMFactory;
import org.apache.axis2.databinding.utils.writer.MTOMAwareXMLStreamWriter;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLStreamException;
/** ClientInfo bean class */
public class ClientInfo
implements org.apache.axis2.databinding.ADBBean {
/* This type was generated from the piece of schema that had
name = ClientInfo
Namespace URI = http://www.wso2.com/types
Namespace Prefix = ns1
*/
public ClientInfo(String localName, String localSsn) {
this.localName = localName;
this.localSsn = localSsn;
}
public ClientInfo() {
}
/** field for Name */
protected java.lang.String localName;
/**
* Auto generated getter method
*
* @return java.lang.String
*/
public java.lang.String getName() {
return localName;
}
/**
* Auto generated setter method
*
* @param param Name
*/
public void setName(java.lang.String param) {
this.localName = param;
}
/** field for Ssn */
protected java.lang.String localSsn;
/**
* Auto generated getter method
*
* @return java.lang.String
*/
public java.lang.String getSsn() {
return localSsn;
}
/**
* Auto generated setter method
*
* @param param Ssn
*/
public void setSsn(java.lang.String param) {
this.localSsn = param;
}
/** databinding method to get an XML representation of this object */
public javax.xml.stream.XMLStreamReader getPullParser(javax.xml.namespace.QName qName) {
java.util.ArrayList elementList = new java.util.ArrayList();
java.util.ArrayList attribList = new java.util.ArrayList();
elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types",
"name"));
elementList
.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localName));
elementList.add(new javax.xml.namespace.QName("http://www.wso2.com/types",
"ssn"));
elementList.add(org.apache.axis2.databinding.utils.ConverterUtil.convertToString(localSsn));
return new org.apache.axis2.databinding.utils.reader.ADBXMLStreamReaderImpl
(qName, elementList.toArray(), attribList.toArray());
}
public void serialize(final QName parentQName,
final OMFactory factory,
MTOMAwareXMLStreamWriter xmlWriter)
throws XMLStreamException, ADBException {
serialize(parentQName,factory,xmlWriter,false);
}
public void serialize(final QName parentQName,
final OMFactory factory,
MTOMAwareXMLStreamWriter xmlWriter,
boolean serializeType)
throws XMLStreamException, ADBException {
throw new UnsupportedOperationException("Un implemented method");
}
/** Factory class that keeps the parse method */
public static class Factory {
/** static method to create the object */
public static ClientInfo parse(javax.xml.stream.XMLStreamReader reader)
throws java.lang.Exception {
ClientInfo object = new ClientInfo();
try {
int event = reader.getEventType();
int count = 0;
int argumentCount = 2;
boolean done = false;
//event better be a START_ELEMENT. if not we should go up to the start element here
while (!reader.isStartElement()) {
event = reader.next();
}
while (!done) {
if (javax.xml.stream.XMLStreamConstants.START_ELEMENT == event) {
if ("name".equals(reader.getLocalName())) {
String content = reader.getElementText();
object.setName(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
content));
count++;
}
if ("ssn".equals(reader.getLocalName())) {
String content = reader.getElementText();
object.setSsn(
org.apache.axis2.databinding.utils.ConverterUtil.convertToString(
content));
count++;
}
}
if (argumentCount == count) {
done = true;
}
if (!done) {
event = reader.next();
}
}
} catch (javax.xml.stream.XMLStreamException e) {
throw new java.lang.Exception(e);
}
return object;
}
}//end of factory class
}
| intalio/axis2 | modules/adb/test/org/apache/axis2/databinding/ClientInfo.java | Java | apache-2.0 | 6,169 |
npm install
(cd ./client/ && npm install)
| tjdulka/tjd-watson-agent | install.sh | Shell | apache-2.0 | 42 |
<?php
require_once '../autoload.php';
use Qiniu\Auth;
$accessKey = 'Access_Key';
$secretKey = 'Secret_Key';
$auth = new Auth($accessKey, $secretKey);
$bucket = 'Bucket_Name';
$upToken = $auth->uploadToken($bucket);
echo $upToken;
| 976112643/manor | php-sdk-master/examples/upload_token.php | PHP | apache-2.0 | 234 |
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
package org.datavec.api.transform.transform.time;
import lombok.Data;
import lombok.EqualsAndHashCode;
import org.datavec.api.transform.ColumnType;
import org.datavec.api.transform.Transform;
import org.datavec.api.transform.metadata.ColumnMetaData;
import org.datavec.api.transform.metadata.IntegerMetaData;
import org.datavec.api.transform.metadata.StringMetaData;
import org.datavec.api.transform.metadata.TimeMetaData;
import org.datavec.api.transform.schema.Schema;
import org.datavec.api.util.jackson.DateTimeFieldTypeDeserializer;
import org.datavec.api.util.jackson.DateTimeFieldTypeSerializer;
import org.datavec.api.writable.IntWritable;
import org.datavec.api.writable.Text;
import org.datavec.api.writable.Writable;
import org.joda.time.DateTime;
import org.joda.time.DateTimeFieldType;
import org.joda.time.DateTimeZone;
import org.joda.time.format.DateTimeFormat;
import org.joda.time.format.DateTimeFormatter;
import org.nd4j.shade.jackson.annotation.JsonIgnore;
import org.nd4j.shade.jackson.annotation.JsonIgnoreProperties;
import org.nd4j.shade.jackson.annotation.JsonInclude;
import org.nd4j.shade.jackson.annotation.JsonProperty;
import org.nd4j.shade.jackson.databind.annotation.JsonDeserialize;
import org.nd4j.shade.jackson.databind.annotation.JsonSerialize;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
/**
* Create a number of new columns by deriving their values from a Time column.
* Can be used for example to create new columns with the year, month, day, hour, minute, second etc.
*
* @author Alex Black
*/
@JsonIgnoreProperties({"inputSchema", "insertAfterIdx", "deriveFromIdx"})
@EqualsAndHashCode(exclude = {"inputSchema", "insertAfterIdx", "deriveFromIdx"})
@Data
public class DeriveColumnsFromTimeTransform implements Transform {
private final String columnName;
private final String insertAfter;
private DateTimeZone inputTimeZone;
private final List<DerivedColumn> derivedColumns;
private Schema inputSchema;
private int insertAfterIdx = -1;
private int deriveFromIdx = -1;
private DeriveColumnsFromTimeTransform(Builder builder) {
this.derivedColumns = builder.derivedColumns;
this.columnName = builder.columnName;
this.insertAfter = builder.insertAfter;
}
public DeriveColumnsFromTimeTransform(@JsonProperty("columnName") String columnName,
@JsonProperty("insertAfter") String insertAfter,
@JsonProperty("inputTimeZone") DateTimeZone inputTimeZone,
@JsonProperty("derivedColumns") List<DerivedColumn> derivedColumns) {
this.columnName = columnName;
this.insertAfter = insertAfter;
this.inputTimeZone = inputTimeZone;
this.derivedColumns = derivedColumns;
}
@Override
public Schema transform(Schema inputSchema) {
List<ColumnMetaData> oldMeta = inputSchema.getColumnMetaData();
List<ColumnMetaData> newMeta = new ArrayList<>(oldMeta.size() + derivedColumns.size());
List<String> oldNames = inputSchema.getColumnNames();
for (int i = 0; i < oldMeta.size(); i++) {
String current = oldNames.get(i);
newMeta.add(oldMeta.get(i));
if (insertAfter.equals(current)) {
//Insert the derived columns here
for (DerivedColumn d : derivedColumns) {
switch (d.columnType) {
case String:
newMeta.add(new StringMetaData(d.columnName));
break;
case Integer:
newMeta.add(new IntegerMetaData(d.columnName)); //TODO: ranges... if it's a day, we know it must be 1 to 31, etc...
break;
default:
throw new IllegalStateException("Unexpected column type: " + d.columnType);
}
}
}
}
return inputSchema.newSchema(newMeta);
}
@Override
public void setInputSchema(Schema inputSchema) {
insertAfterIdx = inputSchema.getColumnNames().indexOf(insertAfter);
if (insertAfterIdx == -1) {
throw new IllegalStateException(
"Invalid schema/insert after column: input schema does not contain column \"" + insertAfter
+ "\"");
}
deriveFromIdx = inputSchema.getColumnNames().indexOf(columnName);
if (deriveFromIdx == -1) {
throw new IllegalStateException(
"Invalid source column: input schema does not contain column \"" + columnName + "\"");
}
this.inputSchema = inputSchema;
if (!(inputSchema.getMetaData(columnName) instanceof TimeMetaData))
throw new IllegalStateException("Invalid state: input column \"" + columnName
+ "\" is not a time column. Is: " + inputSchema.getMetaData(columnName));
TimeMetaData meta = (TimeMetaData) inputSchema.getMetaData(columnName);
inputTimeZone = meta.getTimeZone();
}
@Override
public Schema getInputSchema() {
return inputSchema;
}
@Override
public List<Writable> map(List<Writable> writables) {
if (writables.size() != inputSchema.numColumns()) {
throw new IllegalStateException("Cannot execute transform: input writables list length (" + writables.size()
+ ") does not " + "match expected number of elements (schema: " + inputSchema.numColumns()
+ "). Transform = " + toString());
}
int i = 0;
Writable source = writables.get(deriveFromIdx);
List<Writable> list = new ArrayList<>(writables.size() + derivedColumns.size());
for (Writable w : writables) {
list.add(w);
if (i++ == insertAfterIdx) {
for (DerivedColumn d : derivedColumns) {
switch (d.columnType) {
case String:
list.add(new Text(d.dateTimeFormatter.print(source.toLong())));
break;
case Integer:
DateTime dt = new DateTime(source.toLong(), inputTimeZone);
list.add(new IntWritable(dt.get(d.fieldType)));
break;
default:
throw new IllegalStateException("Unexpected column type: " + d.columnType);
}
}
}
}
return list;
}
@Override
public List<List<Writable>> mapSequence(List<List<Writable>> sequence) {
List<List<Writable>> out = new ArrayList<>(sequence.size());
for (List<Writable> step : sequence) {
out.add(map(step));
}
return out;
}
/**
* Transform an object
* in to another object
*
* @param input the record to transform
* @return the transformed writable
*/
@Override
public Object map(Object input) {
List<Object> ret = new ArrayList<>();
Long l = (Long) input;
for (DerivedColumn d : derivedColumns) {
switch (d.columnType) {
case String:
ret.add(d.dateTimeFormatter.print(l));
break;
case Integer:
DateTime dt = new DateTime(l, inputTimeZone);
ret.add(dt.get(d.fieldType));
break;
default:
throw new IllegalStateException("Unexpected column type: " + d.columnType);
}
}
return ret;
}
/**
* Transform a sequence
*
* @param sequence
*/
@Override
public Object mapSequence(Object sequence) {
List<Long> longs = (List<Long>) sequence;
List<List<Object>> ret = new ArrayList<>();
for (Long l : longs)
ret.add((List<Object>) map(l));
return ret;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("DeriveColumnsFromTimeTransform(timeColumn=\"").append(columnName).append("\",insertAfter=\"")
.append(insertAfter).append("\",derivedColumns=(");
boolean first = true;
for (DerivedColumn d : derivedColumns) {
if (!first)
sb.append(",");
sb.append(d);
first = false;
}
sb.append("))");
return sb.toString();
}
/**
* The output column name
* after the operation has been applied
*
* @return the output column name
*/
@Override
public String outputColumnName() {
return outputColumnNames()[0];
}
/**
* The output column names
* This will often be the same as the input
*
* @return the output column names
*/
@Override
public String[] outputColumnNames() {
String[] ret = new String[derivedColumns.size()];
for (int i = 0; i < ret.length; i++)
ret[i] = derivedColumns.get(i).columnName;
return ret;
}
/**
* Returns column names
* this op is meant to run on
*
* @return
*/
@Override
public String[] columnNames() {
return new String[] {columnName()};
}
/**
* Returns a singular column name
* this op is meant to run on
*
* @return
*/
@Override
public String columnName() {
return columnName;
}
public static class Builder {
private final String columnName;
private String insertAfter;
private final List<DerivedColumn> derivedColumns = new ArrayList<>();
/**
* @param timeColumnName The name of the time column from which to derive the new values
*/
public Builder(String timeColumnName) {
this.columnName = timeColumnName;
this.insertAfter = timeColumnName;
}
/**
* Where should the new columns be inserted?
* By default, they will be inserted after the source column
*
* @param columnName Name of the column to insert the derived columns after
*/
public Builder insertAfter(String columnName) {
this.insertAfter = columnName;
return this;
}
/**
* Add a String column (for example, human readable format), derived from the time
*
* @param columnName Name of the new/derived column
* @param format Joda time format, as per <a href="http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html">http://www.joda.org/joda-time/apidocs/org/joda/time/format/DateTimeFormat.html</a>
* @param timeZone Timezone to use for formatting
*/
public Builder addStringDerivedColumn(String columnName, String format, DateTimeZone timeZone) {
derivedColumns.add(new DerivedColumn(columnName, ColumnType.String, format, timeZone, null));
return this;
}
/**
* Add an integer derived column - for example, the hour of day, etc. Uses timezone from the time column metadata
*
* @param columnName Name of the column
* @param type Type of field (for example, DateTimeFieldType.hourOfDay() etc)
*/
public Builder addIntegerDerivedColumn(String columnName, DateTimeFieldType type) {
derivedColumns.add(new DerivedColumn(columnName, ColumnType.Integer, null, null, type));
return this;
}
/**
* Create the transform instance
*/
public DeriveColumnsFromTimeTransform build() {
return new DeriveColumnsFromTimeTransform(this);
}
}
@JsonInclude(JsonInclude.Include.NON_NULL)
@EqualsAndHashCode(exclude = "dateTimeFormatter")
@Data
@JsonIgnoreProperties({"dateTimeFormatter"})
public static class DerivedColumn implements Serializable {
private final String columnName;
private final ColumnType columnType;
private final String format;
private final DateTimeZone dateTimeZone;
@JsonSerialize(using = DateTimeFieldTypeSerializer.class)
@JsonDeserialize(using = DateTimeFieldTypeDeserializer.class)
private final DateTimeFieldType fieldType;
private transient DateTimeFormatter dateTimeFormatter;
// public DerivedColumn(String columnName, ColumnType columnType, String format, DateTimeZone dateTimeZone, DateTimeFieldType fieldType) {
public DerivedColumn(@JsonProperty("columnName") String columnName,
@JsonProperty("columnType") ColumnType columnType, @JsonProperty("format") String format,
@JsonProperty("dateTimeZone") DateTimeZone dateTimeZone,
@JsonProperty("fieldType") DateTimeFieldType fieldType) {
this.columnName = columnName;
this.columnType = columnType;
this.format = format;
this.dateTimeZone = dateTimeZone;
this.fieldType = fieldType;
if (format != null)
dateTimeFormatter = DateTimeFormat.forPattern(this.format).withZone(dateTimeZone);
}
@Override
public String toString() {
return "(name=" + columnName + ",type=" + columnType + ",derived=" + (format != null ? format : fieldType)
+ ")";
}
//Custom serialization methods, because Joda Time doesn't allow DateTimeFormatter objects to be serialized :(
private void writeObject(ObjectOutputStream out) throws IOException {
out.defaultWriteObject();
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
if (format != null)
dateTimeFormatter = DateTimeFormat.forPattern(format).withZone(dateTimeZone);
}
}
}
| deeplearning4j/deeplearning4j | datavec/datavec-api/src/main/java/org/datavec/api/transform/transform/time/DeriveColumnsFromTimeTransform.java | Java | apache-2.0 | 15,108 |
package com.humbinal.ssm.test;
public class User {
private long user_Id;
private String user_name;
private int user_age;
public User() {
}
public long getUser_Id() {
return user_Id;
}
public void setUser_Id(long user_Id) {
this.user_Id = user_Id;
}
public String getUser_name() {
return user_name;
}
public void setUser_name(String user_name) {
this.user_name = user_name;
}
public int getUser_age() {
return user_age;
}
public void setUser_age(int user_age) {
this.user_age = user_age;
}
}
| Humbinal/java-items | hum-web/hum-ssm/src/test/java/com/humbinal/ssm/test/User.java | Java | apache-2.0 | 616 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.5.0_10) on Mon Feb 26 03:04:16 CET 2007 -->
<TITLE>
S-Index (REPSI Tool)
</TITLE>
<LINK REL ="stylesheet" TYPE="text/css" HREF="../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
parent.document.title="S-Index (REPSI Tool)";
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Package</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Index</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="index-11.html"><B>PREV LETTER</B></A>
<A HREF="index-13.html"><B>NEXT LETTER</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../index.html?index-filesindex-12.html" target="_top"><B>FRAMES</B></A>
<A HREF="index-12.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<A HREF="index-1.html">C</A> <A HREF="index-2.html">D</A> <A HREF="index-3.html">E</A> <A HREF="index-4.html">F</A> <A HREF="index-5.html">G</A> <A HREF="index-6.html">I</A> <A HREF="index-7.html">M</A> <A HREF="index-8.html">N</A> <A HREF="index-9.html">O</A> <A HREF="index-10.html">P</A> <A HREF="index-11.html">R</A> <A HREF="index-12.html">S</A> <A HREF="index-13.html">T</A> <HR>
<A NAME="_S_"><!-- --></A><H2>
<B>S</B></H2>
<DL>
<DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SEPARATOR_COMMA_SPACE_SINGLE_QUOTE"><B>SEPARATOR_COMMA_SPACE_SINGLE_QUOTE</B></A> -
Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A>
<DD>Separator - comma, space & single quote.
<DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SEPARATOR_SINGLE_QUOTE_COMMA_SPACE_SINGLE_QUOTE"><B>SEPARATOR_SINGLE_QUOTE_COMMA_SPACE_SINGLE_QUOTE</B></A> -
Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A>
<DD>Separator - single quote, comma, space & single quote.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setAppliedEndAction(java.util.Date, java.util.Date, long)"><B>setAppliedEndAction(Date, Date, long)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A>
<DD>Updates in the database the columns
<code>APPLIED_DURATION_MICRO_SECOND</code>,
<code>APPLIED_END_TIME</code>, and <code>APPLIED_START_TIME</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setAppliedEndActionError(java.lang.String)"><B>setAppliedEndActionError(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A>
<DD>Updates in the database the columns <code>APPLIED_ERROR_MESSAGE</code>
and <code>APPLIED_STATUS</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setAppliedStartAction(java.lang.String, java.lang.String)"><B>setAppliedStartAction(String, String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A>
<DD>Updates in the database the columns
<code>APPLIED_PATTERN_SELECT_STMNT</code> and
<code>APPLIED_STATUS</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setComparison(java.lang.String, java.lang.String)"><B>setComparison(String, String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A>
<DD>Updates in the database the columns <code>COMPARISON_EQUALS</code> and
<code>COMPARISON_MESSAGE</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setComparison(java.lang.String, java.lang.String)"><B>setComparison(String, String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A>
<DD>Updates in the database the columns <code>COMPARISON_EQUALS</code> and
<code>COMPARISON_MESSAGE</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setDescription(java.lang.String)"><B>setDescription(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A>
<DD>Updates in the database the columns <code>DESCRIPTION</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunMapper.html#setDescription(java.lang.String)"><B>setDescription(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunMapper</A>
<DD>Updates in the database the columns <code>DESCRIPTION</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html#setDescription(java.lang.String, int)"><B>setDescription(String, int)</B></A> -
Method in class edu.ou.weinmann.repsi.model.trial.util.<A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html" title="class in edu.ou.weinmann.repsi.model.trial.util">ResultSetComparator</A>
<DD>Sets the description at the required position.
<DT><A HREF="../edu/ou/weinmann/repsi/model/util/DatabaseAccessor.html#setFetchSize(int)"><B>setFetchSize(int)</B></A> -
Method in class edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/DatabaseAccessor.html" title="class in edu.ou.weinmann.repsi.model.util">DatabaseAccessor</A>
<DD>Creates a <code>Statement</code> object associtated with this
<code>Connnection</code> object.
<DT><A HREF="../edu/ou/weinmann/repsi/model/calibration/Calibration.html#setObject(java.lang.String)"><B>setObject(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.calibration.<A HREF="../edu/ou/weinmann/repsi/model/calibration/Calibration.html" title="class in edu.ou.weinmann.repsi.model.calibration">Calibration</A>
<DD>Sets the type of the <code>Calibration</code> object.
<DT><A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html#setOrderBy(java.lang.String, int)"><B>setOrderBy(String, int)</B></A> -
Method in class edu.ou.weinmann.repsi.model.trial.util.<A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html" title="class in edu.ou.weinmann.repsi.model.trial.util">ResultSetComparator</A>
<DD>Sets the order by clause at the required position.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setPatternSqlIdiomName(java.lang.String)"><B>setPatternSqlIdiomName(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A>
<DD>Updates in the database the columns <code>PATTERN_SQL_IDIOM_NAME</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/util/Configurator.html#setProperty(java.lang.String, java.lang.String)"><B>setProperty(String, String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Configurator.html" title="class in edu.ou.weinmann.repsi.model.util">Configurator</A>
<DD>sets the value of a given property key.
<DT><A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html#setSelectStmnt(java.lang.String, int)"><B>setSelectStmnt(String, int)</B></A> -
Method in class edu.ou.weinmann.repsi.model.trial.util.<A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html" title="class in edu.ou.weinmann.repsi.model.trial.util">ResultSetComparator</A>
<DD>Sets the <code>SELECT</code> statement at the required position.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setSequenceNumberAction(long)"><B>setSequenceNumberAction(long)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A>
<DD>Sets the current action sequence number.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunProtocolMapper.html#setSequenceNumberAction(long)"><B>setSequenceNumberAction(long)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunProtocolMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunProtocolMapper</A>
<DD>Sets the current action sequence number.
<DT><A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html#setSqlSyntaxCode(java.lang.String)"><B>setSqlSyntaxCode(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.trial.util.<A HREF="../edu/ou/weinmann/repsi/model/trial/util/ResultSetComparator.html" title="class in edu.ou.weinmann.repsi.model.trial.util">ResultSetComparator</A>
<DD>Sets the <code>SQL</code> syntax code.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setSqlSyntaxCodeTqp(java.lang.String)"><B>setSqlSyntaxCodeTqp(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A>
<DD>Updates in the database the columns <code>SQL_SYNTAX_CODE_TTQP</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/database/Database.html#setSqlSyntaxSource(java.lang.String)"><B>setSqlSyntaxSource(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.database.<A HREF="../edu/ou/weinmann/repsi/model/database/Database.html" title="class in edu.ou.weinmann.repsi.model.database">Database</A>
<DD>Sets the type of the SQL syntax version.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html#setStatus()"><B>setStatus()</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/CalibrationMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">CalibrationMapper</A>
<DD>Updates in the database the columns <code>END_TIME</code> and
<code>STATUS_CODE</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunMapper.html#setStatus(java.lang.String)"><B>setStatus(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunMapper</A>
<DD>Updates in the database the columns <code>END_TIME</code> and
<code>STATUS_CODE</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setTableName(java.lang.String)"><B>setTableName(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A>
<DD>Updates in the database the column <code>TABLE_NAME</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setUnappliedEndAction(java.util.Date, java.util.Date, long)"><B>setUnappliedEndAction(Date, Date, long)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A>
<DD>Updates in the database the columns
<code>UNAPPLIED_DURATION_MICRO_SECOND</code>,
<code>UNAPPLIED_END_TIME</code>, and <code>UNAPPLIED_START_TIME</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setUnappliedEndActionError(java.lang.String)"><B>setUnappliedEndActionError(String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A>
<DD>Updates in the database the columns <code>UNAPPLIED_ERROR_MESSAGE</code>
and <code>UNAPPLIED_STATUS</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html#setUnappliedStartAction(java.lang.String, java.lang.String)"><B>setUnappliedStartAction(String, String)</B></A> -
Method in class edu.ou.weinmann.repsi.model.mapper.<A HREF="../edu/ou/weinmann/repsi/model/mapper/TrialRunActionMapper.html" title="class in edu.ou.weinmann.repsi.model.mapper">TrialRunActionMapper</A>
<DD>Updates in the database the columns
<code>UNAPPLIED_PATTERN_SELECT_STMNT</code> and
<code>UNAPPLIED_STATUS</code>.
<DT><A HREF="../edu/ou/weinmann/repsi/model/trial/metadata/Columns.html#sizeColumns()"><B>sizeColumns()</B></A> -
Method in class edu.ou.weinmann.repsi.model.trial.metadata.<A HREF="../edu/ou/weinmann/repsi/model/trial/metadata/Columns.html" title="class in edu.ou.weinmann.repsi.model.trial.metadata">Columns</A>
<DD>Returns the number of columns in this database table.
<DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SQL_COLUMN_TYPE_CHAR"><B>SQL_COLUMN_TYPE_CHAR</B></A> -
Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A>
<DD>SQL column type - CHAR.
<DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SQL_COLUMN_TYPE_VARCHAR2"><B>SQL_COLUMN_TYPE_VARCHAR2</B></A> -
Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A>
<DD>SQL column type - VARCHAR2.
<DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SQL_SYNTAX_CODE_ORACLE_10G"><B>SQL_SYNTAX_CODE_ORACLE_10G</B></A> -
Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A>
<DD>SQL syntax code - Oracle 10g Release 2.
<DT><A HREF="../edu/ou/weinmann/repsi/model/util/Global.html#SQL_SYNTAX_CODE_SQL_99"><B>SQL_SYNTAX_CODE_SQL_99</B></A> -
Static variable in interface edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/Global.html" title="interface in edu.ou.weinmann.repsi.model.util">Global</A>
<DD>SQL syntax code - standard SQL:1999.
<DT><A HREF="../edu/ou/weinmann/repsi/model/util/SQLRewriter.html" title="class in edu.ou.weinmann.repsi.model.util"><B>SQLRewriter</B></A> - Class in <A HREF="../edu/ou/weinmann/repsi/model/util/package-summary.html">edu.ou.weinmann.repsi.model.util</A><DD>Adapts the syntactical variations of different SQL versions by rewriting the
SQL statements.<DT><A HREF="../edu/ou/weinmann/repsi/model/util/SQLRewriter.html#SQLRewriter()"><B>SQLRewriter()</B></A> -
Constructor for class edu.ou.weinmann.repsi.model.util.<A HREF="../edu/ou/weinmann/repsi/model/util/SQLRewriter.html" title="class in edu.ou.weinmann.repsi.model.util">SQLRewriter</A>
<DD>Constructs a <code>SQLRewriter</code> object.
<DT><A HREF="../edu/ou/weinmann/repsi/model/database/Database.html#startElement(java.lang.String, java.lang.String, java.lang.String, org.xml.sax.Attributes)"><B>startElement(String, String, String, Attributes)</B></A> -
Method in class edu.ou.weinmann.repsi.model.database.<A HREF="../edu/ou/weinmann/repsi/model/database/Database.html" title="class in edu.ou.weinmann.repsi.model.database">Database</A>
<DD>Receive notification of the start of an element.
</DL>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Package</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../overview-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Index</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="index-11.html"><B>PREV LETTER</B></A>
<A HREF="index-13.html"><B>NEXT LETTER</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../index.html?index-filesindex-12.html" target="_top"><B>FRAMES</B></A>
<A HREF="index-12.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<A HREF="index-1.html">C</A> <A HREF="index-2.html">D</A> <A HREF="index-3.html">E</A> <A HREF="index-4.html">F</A> <A HREF="index-5.html">G</A> <A HREF="index-6.html">I</A> <A HREF="index-7.html">M</A> <A HREF="index-8.html">N</A> <A HREF="index-9.html">O</A> <A HREF="index-10.html">P</A> <A HREF="index-11.html">R</A> <A HREF="index-12.html">S</A> <A HREF="index-13.html">T</A> <HR>
</BODY>
</HTML>
| walter-weinmann/repsi-tool | REPSI_Tool_02.00_Development/doc/api/index-files/index-12.html | HTML | apache-2.0 | 20,599 |
/**
Copyright (c) 2013 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
**/
require("../base/extension_registry.js");
require("./event.js");
require("./object_snapshot.js");
require("../base/range.js");
require("../base/sorted_array_utils.js");
'use strict';
/**
* @fileoverview Provides the ObjectSnapshot and ObjectHistory classes.
*/
global.tr.exportTo('tr.model', function() {
var ObjectSnapshot = tr.model.ObjectSnapshot;
/**
* An object with a specific id, whose state has been snapshotted several
* times.
*
* @constructor
*/
function ObjectInstance(
parent, id, category, name, creationTs, opt_baseTypeName) {
tr.model.Event.call(this);
this.parent = parent;
this.id = id;
this.category = category;
this.baseTypeName = opt_baseTypeName ? opt_baseTypeName : name;
this.name = name;
this.creationTs = creationTs;
this.creationTsWasExplicit = false;
this.deletionTs = Number.MAX_VALUE;
this.deletionTsWasExplicit = false;
this.colorId = 0;
this.bounds = new tr.b.Range();
this.snapshots = [];
this.hasImplicitSnapshots = false;
}
ObjectInstance.prototype = {
__proto__: tr.model.Event.prototype,
get typeName() {
return this.name;
},
addBoundsToRange: function(range) {
range.addRange(this.bounds);
},
addSnapshot: function(ts, args, opt_name, opt_baseTypeName) {
if (ts < this.creationTs)
throw new Error('Snapshots must be >= instance.creationTs');
if (ts >= this.deletionTs)
throw new Error('Snapshots cannot be added after ' +
'an objects deletion timestamp.');
var lastSnapshot;
if (this.snapshots.length > 0) {
lastSnapshot = this.snapshots[this.snapshots.length - 1];
if (lastSnapshot.ts == ts)
throw new Error('Snapshots already exists at this time!');
if (ts < lastSnapshot.ts) {
throw new Error(
'Snapshots must be added in increasing timestamp order');
}
}
// Update baseTypeName if needed.
if (opt_name &&
(this.name != opt_name)) {
if (!opt_baseTypeName)
throw new Error('Must provide base type name for name update');
if (this.baseTypeName != opt_baseTypeName)
throw new Error('Cannot update type name: base types dont match');
this.name = opt_name;
}
var snapshotConstructor =
tr.model.ObjectSnapshot.getConstructor(
this.category, this.name);
var snapshot = new snapshotConstructor(this, ts, args);
this.snapshots.push(snapshot);
return snapshot;
},
wasDeleted: function(ts) {
var lastSnapshot;
if (this.snapshots.length > 0) {
lastSnapshot = this.snapshots[this.snapshots.length - 1];
if (lastSnapshot.ts > ts)
throw new Error(
'Instance cannot be deleted at ts=' +
ts + '. A snapshot exists that is older.');
}
this.deletionTs = ts;
this.deletionTsWasExplicit = true;
},
/**
* See ObjectSnapshot constructor notes on object initialization.
*/
preInitialize: function() {
for (var i = 0; i < this.snapshots.length; i++)
this.snapshots[i].preInitialize();
},
/**
* See ObjectSnapshot constructor notes on object initialization.
*/
initialize: function() {
for (var i = 0; i < this.snapshots.length; i++)
this.snapshots[i].initialize();
},
getSnapshotAt: function(ts) {
if (ts < this.creationTs) {
if (this.creationTsWasExplicit)
throw new Error('ts must be within lifetime of this instance');
return this.snapshots[0];
}
if (ts > this.deletionTs)
throw new Error('ts must be within lifetime of this instance');
var snapshots = this.snapshots;
var i = tr.b.findIndexInSortedIntervals(
snapshots,
function(snapshot) { return snapshot.ts; },
function(snapshot, i) {
if (i == snapshots.length - 1)
return snapshots[i].objectInstance.deletionTs;
return snapshots[i + 1].ts - snapshots[i].ts;
},
ts);
if (i < 0) {
// Note, this is a little bit sketchy: this lets early ts point at the
// first snapshot, even before it is taken. We do this because raster
// tasks usually post before their tile snapshots are dumped. This may
// be a good line of code to re-visit if we start seeing strange and
// confusing object references showing up in the traces.
return this.snapshots[0];
}
if (i >= this.snapshots.length)
return this.snapshots[this.snapshots.length - 1];
return this.snapshots[i];
},
updateBounds: function() {
this.bounds.reset();
this.bounds.addValue(this.creationTs);
if (this.deletionTs != Number.MAX_VALUE)
this.bounds.addValue(this.deletionTs);
else if (this.snapshots.length > 0)
this.bounds.addValue(this.snapshots[this.snapshots.length - 1].ts);
},
shiftTimestampsForward: function(amount) {
this.creationTs += amount;
if (this.deletionTs != Number.MAX_VALUE)
this.deletionTs += amount;
this.snapshots.forEach(function(snapshot) {
snapshot.ts += amount;
});
},
get userFriendlyName() {
return this.typeName + ' object ' + this.id;
}
};
tr.model.EventRegistry.register(
ObjectInstance,
{
name: 'objectInstance',
pluralName: 'objectInstances',
singleViewElementName: 'tr-ui-a-single-object-instance-sub-view',
multiViewElementName: 'tr-ui-a-multi-object-sub-view'
});
var options = new tr.b.ExtensionRegistryOptions(
tr.b.TYPE_BASED_REGISTRY_MODE);
options.mandatoryBaseClass = ObjectInstance;
options.defaultConstructor = ObjectInstance;
tr.b.decorateExtensionRegistry(ObjectInstance, options);
return {
ObjectInstance: ObjectInstance
};
});
| googlearchive/node-big-rig | lib/third_party/tracing/model/object_instance.js | JavaScript | apache-2.0 | 6,147 |
// Licensed to the Apache Software Foundation (ASF) under one or more
// contributor license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright ownership.
// The ASF licenses this file to You under the Apache License, Version 2.0
// (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package graphx
import (
"encoding/json"
"fmt"
"github.com/apache/beam/sdks/go/pkg/beam/core/graph/coder"
"github.com/apache/beam/sdks/go/pkg/beam/core/runtime/graphx/v1"
"github.com/apache/beam/sdks/go/pkg/beam/core/typex"
"github.com/apache/beam/sdks/go/pkg/beam/core/util/protox"
pb "github.com/apache/beam/sdks/go/pkg/beam/model/pipeline_v1"
"github.com/golang/protobuf/proto"
)
const (
// Model constants
urnBytesCoder = "beam:coder:bytes:v1"
urnVarIntCoder = "beam:coder:varint:v1"
urnLengthPrefixCoder = "beam:coder:length_prefix:v1"
urnKVCoder = "beam:coder:kv:v1"
urnIterableCoder = "beam:coder:iterable:v1"
urnWindowedValueCoder = "beam:coder:windowed_value:v1"
urnGlobalWindow = "beam:coder:global_window:v1"
urnIntervalWindow = "beam:coder:interval_window:v1"
// SDK constants
urnCustomCoder = "beam:go:coder:custom:v1"
urnCoGBKList = "beam:go:coder:cogbklist:v1" // CoGBK representation. Not a coder.
)
// MarshalCoders marshals a list of coders into model coders.
func MarshalCoders(coders []*coder.Coder) ([]string, map[string]*pb.Coder) {
b := NewCoderMarshaller()
ids := b.AddMulti(coders)
return ids, b.Build()
}
// UnmarshalCoders unmarshals coders.
func UnmarshalCoders(ids []string, m map[string]*pb.Coder) ([]*coder.Coder, error) {
b := NewCoderUnmarshaller(m)
var coders []*coder.Coder
for _, id := range ids {
c, err := b.Coder(id)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal coder %v: %v", id, err)
}
coders = append(coders, c)
}
return coders, nil
}
// CoderUnmarshaller is an incremental unmarshaller of model coders. Identical
// coders are shared.
type CoderUnmarshaller struct {
models map[string]*pb.Coder
coders map[string]*coder.Coder
windowCoders map[string]*coder.WindowCoder
}
// NewCoderUnmarshaller returns a new CoderUnmarshaller.
func NewCoderUnmarshaller(m map[string]*pb.Coder) *CoderUnmarshaller {
return &CoderUnmarshaller{
models: m,
coders: make(map[string]*coder.Coder),
windowCoders: make(map[string]*coder.WindowCoder),
}
}
func (b *CoderUnmarshaller) Coders(ids []string) ([]*coder.Coder, error) {
coders := make([]*coder.Coder, len(ids))
for i, id := range ids {
c, err := b.Coder(id)
if err != nil {
return nil, err
}
coders[i] = c
}
return coders, nil
}
// Coder unmarshals a coder with the given id.
func (b *CoderUnmarshaller) Coder(id string) (*coder.Coder, error) {
if c, exists := b.coders[id]; exists {
return c, nil
}
c, ok := b.models[id]
if !ok {
return nil, fmt.Errorf("coder with id %v not found", id)
}
ret, err := b.makeCoder(c)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal coder %v: %v", id, err)
}
b.coders[id] = ret
return ret, nil
}
// WindowCoder unmarshals a window coder with the given id.
func (b *CoderUnmarshaller) WindowCoder(id string) (*coder.WindowCoder, error) {
if w, exists := b.windowCoders[id]; exists {
return w, nil
}
c, err := b.peek(id)
if err != nil {
return nil, err
}
w := urnToWindowCoder(c.GetSpec().GetSpec().GetUrn())
b.windowCoders[id] = w
return w, nil
}
func urnToWindowCoder(urn string) *coder.WindowCoder {
switch urn {
case urnGlobalWindow:
return coder.NewGlobalWindow()
case urnIntervalWindow:
return coder.NewIntervalWindow()
default:
panic(fmt.Sprintf("Unexpected window coder: %v", urn))
}
}
func (b *CoderUnmarshaller) makeCoder(c *pb.Coder) (*coder.Coder, error) {
urn := c.GetSpec().GetSpec().GetUrn()
components := c.GetComponentCoderIds()
switch urn {
case urnBytesCoder:
return coder.NewBytes(), nil
case urnVarIntCoder:
return coder.NewVarInt(), nil
case urnKVCoder:
if len(components) != 2 {
return nil, fmt.Errorf("bad pair: %v", c)
}
key, err := b.Coder(components[0])
if err != nil {
return nil, err
}
id := components[1]
kind := coder.KV
root := typex.KVType
elm, err := b.peek(id)
if err != nil {
return nil, err
}
isGBK := elm.GetSpec().GetSpec().GetUrn() == urnIterableCoder
if isGBK {
id = elm.GetComponentCoderIds()[0]
kind = coder.CoGBK
root = typex.CoGBKType
// TODO(BEAM-490): If CoGBK with > 1 input, handle as special GBK. We expect
// it to be encoded as CoGBK<K,LP<CoGBKList<V,W,..>>>. Remove this handling once
// CoGBK has a first-class representation.
if ids, ok := b.isCoGBKList(id); ok {
// CoGBK<K,V,W,..>
values, err := b.Coders(ids)
if err != nil {
return nil, err
}
t := typex.New(root, append([]typex.FullType{key.T}, coder.Types(values)...)...)
return &coder.Coder{Kind: kind, T: t, Components: append([]*coder.Coder{key}, values...)}, nil
}
}
value, err := b.Coder(id)
if err != nil {
return nil, err
}
t := typex.New(root, key.T, value.T)
return &coder.Coder{Kind: kind, T: t, Components: []*coder.Coder{key, value}}, nil
case urnLengthPrefixCoder:
if len(components) != 1 {
return nil, fmt.Errorf("bad length prefix: %v", c)
}
elm, err := b.peek(components[0])
if err != nil {
return nil, err
}
if elm.GetSpec().GetSpec().GetUrn() != urnCustomCoder {
// TODO(herohde) 11/17/2017: revisit this restriction
return nil, fmt.Errorf("expected length prefix of custom coder only: %v", elm)
}
var ref v1.CustomCoder
if err := protox.DecodeBase64(string(elm.GetSpec().GetSpec().GetPayload()), &ref); err != nil {
return nil, err
}
custom, err := decodeCustomCoder(&ref)
if err != nil {
return nil, err
}
t := typex.New(custom.Type)
return &coder.Coder{Kind: coder.Custom, T: t, Custom: custom}, nil
case urnWindowedValueCoder:
if len(components) != 2 {
return nil, fmt.Errorf("bad windowed value: %v", c)
}
elm, err := b.Coder(components[0])
if err != nil {
return nil, err
}
w, err := b.WindowCoder(components[1])
if err != nil {
return nil, err
}
t := typex.New(typex.WindowedValueType, elm.T)
return &coder.Coder{Kind: coder.WindowedValue, T: t, Components: []*coder.Coder{elm}, Window: w}, nil
case streamType:
return nil, fmt.Errorf("stream must be pair value: %v", c)
case "":
// TODO(herohde) 11/27/2017: we still see CoderRefs from Dataflow. Handle that
// case here, for now, so that the harness can use this logic.
payload := c.GetSpec().GetSpec().GetPayload()
var ref CoderRef
if err := json.Unmarshal(payload, &ref); err != nil {
return nil, fmt.Errorf("failed to decode urn-less coder payload \"%v\": %v", string(payload), err)
}
c, err := DecodeCoderRef(&ref)
if err != nil {
return nil, fmt.Errorf("failed to translate coder \"%v\": %v", string(payload), err)
}
return c, nil
default:
return nil, fmt.Errorf("custom coders must be length prefixed: %v", c)
}
}
func (b *CoderUnmarshaller) peek(id string) (*pb.Coder, error) {
c, ok := b.models[id]
if !ok {
return nil, fmt.Errorf("coder with id %v not found", id)
}
return c, nil
}
func (b *CoderUnmarshaller) isCoGBKList(id string) ([]string, bool) {
elm, err := b.peek(id)
if err != nil {
return nil, false
}
if elm.GetSpec().GetSpec().GetUrn() != urnLengthPrefixCoder {
return nil, false
}
elm2, err := b.peek(elm.GetComponentCoderIds()[0])
if err != nil {
return nil, false
}
if elm2.GetSpec().GetSpec().GetUrn() != urnCoGBKList {
return nil, false
}
return elm2.GetComponentCoderIds(), true
}
// CoderMarshaller incrementally builds a compact model representation of a set
// of coders. Identical coders are shared.
type CoderMarshaller struct {
coders map[string]*pb.Coder
coder2id map[string]string // index of serialized coders to id to deduplicate
}
// NewCoderMarshaller returns a new CoderMarshaller.
func NewCoderMarshaller() *CoderMarshaller {
return &CoderMarshaller{
coders: make(map[string]*pb.Coder),
coder2id: make(map[string]string),
}
}
// Add adds the given coder to the set and returns its id. Idempotent.
func (b *CoderMarshaller) Add(c *coder.Coder) string {
switch c.Kind {
case coder.Custom:
ref, err := encodeCustomCoder(c.Custom)
if err != nil {
panic(fmt.Sprintf("failed to encode custom coder: %v", err))
}
data, err := protox.EncodeBase64(ref)
if err != nil {
panic(fmt.Sprintf("failed to marshal custom coder: %v", err))
}
inner := b.internCoder(&pb.Coder{
Spec: &pb.SdkFunctionSpec{
Spec: &pb.FunctionSpec{
Urn: urnCustomCoder,
Payload: []byte(data),
},
// TODO(BEAM-3204): coders should not have environments.
},
})
return b.internBuiltInCoder(urnLengthPrefixCoder, inner)
case coder.KV:
comp := b.AddMulti(c.Components)
return b.internBuiltInCoder(urnKVCoder, comp...)
case coder.CoGBK:
comp := b.AddMulti(c.Components)
value := comp[1]
if len(comp) > 2 {
// TODO(BEAM-490): don't inject union coder for CoGBK.
union := b.internBuiltInCoder(urnCoGBKList, comp[1:]...)
value = b.internBuiltInCoder(urnLengthPrefixCoder, union)
}
stream := b.internBuiltInCoder(urnIterableCoder, value)
return b.internBuiltInCoder(urnKVCoder, comp[0], stream)
case coder.WindowedValue:
comp := b.AddMulti(c.Components)
comp = append(comp, b.AddWindowCoder(c.Window))
return b.internBuiltInCoder(urnWindowedValueCoder, comp...)
case coder.Bytes:
// TODO(herohde) 6/27/2017: add length-prefix and not assume nested by context?
return b.internBuiltInCoder(urnBytesCoder)
case coder.VarInt:
return b.internBuiltInCoder(urnVarIntCoder)
default:
panic(fmt.Sprintf("Unexpected coder kind: %v", c.Kind))
}
}
// AddMulti adds the given coders to the set and returns their ids. Idempotent.
func (b *CoderMarshaller) AddMulti(list []*coder.Coder) []string {
var ids []string
for _, c := range list {
ids = append(ids, b.Add(c))
}
return ids
}
// AddWindowCoder adds a window coder.
func (b *CoderMarshaller) AddWindowCoder(w *coder.WindowCoder) string {
switch w.Kind {
case coder.GlobalWindow:
return b.internBuiltInCoder(urnGlobalWindow)
case coder.IntervalWindow:
return b.internBuiltInCoder(urnIntervalWindow)
default:
panic(fmt.Sprintf("Unexpected window kind: %v", w.Kind))
}
}
// Build returns the set of model coders. Note that the map may be larger
// than the number of coders added, because component coders are included.
func (b *CoderMarshaller) Build() map[string]*pb.Coder {
return b.coders
}
func (b *CoderMarshaller) internBuiltInCoder(urn string, components ...string) string {
return b.internCoder(&pb.Coder{
Spec: &pb.SdkFunctionSpec{
Spec: &pb.FunctionSpec{
Urn: urn,
},
},
ComponentCoderIds: components,
})
}
func (b *CoderMarshaller) internCoder(coder *pb.Coder) string {
key := proto.MarshalTextString(coder)
if id, exists := b.coder2id[key]; exists {
return id
}
id := fmt.Sprintf("c%v", len(b.coder2id))
b.coder2id[key] = id
b.coders[id] = coder
return id
}
| tgroh/incubator-beam | sdks/go/pkg/beam/core/runtime/graphx/coder.go | GO | apache-2.0 | 11,649 |
// Copyright 2004 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry.vlib.ejb.impl;
import java.rmi.RemoteException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.ejb.CreateException;
import javax.ejb.FinderException;
import javax.ejb.RemoveException;
import javax.ejb.SessionBean;
import javax.ejb.SessionContext;
import javax.naming.Context;
import javax.naming.InitialContext;
import javax.naming.NamingException;
import javax.rmi.PortableRemoteObject;
import javax.sql.DataSource;
import org.apache.tapestry.Tapestry;
import org.apache.tapestry.contrib.ejb.XCreateException;
import org.apache.tapestry.contrib.ejb.XEJBException;
import org.apache.tapestry.contrib.ejb.XRemoveException;
import org.apache.tapestry.contrib.jdbc.IStatement;
import org.apache.tapestry.contrib.jdbc.StatementAssembly;
import org.apache.tapestry.vlib.ejb.Book;
import org.apache.tapestry.vlib.ejb.BorrowException;
import org.apache.tapestry.vlib.ejb.IBook;
import org.apache.tapestry.vlib.ejb.IBookHome;
import org.apache.tapestry.vlib.ejb.IPerson;
import org.apache.tapestry.vlib.ejb.IPersonHome;
import org.apache.tapestry.vlib.ejb.IPublisher;
import org.apache.tapestry.vlib.ejb.IPublisherHome;
import org.apache.tapestry.vlib.ejb.LoginException;
import org.apache.tapestry.vlib.ejb.Person;
import org.apache.tapestry.vlib.ejb.Publisher;
import org.apache.tapestry.vlib.ejb.RegistrationException;
import org.apache.tapestry.vlib.ejb.SortColumn;
import org.apache.tapestry.vlib.ejb.SortOrdering;
/**
* Implementation of the {@link org.apache.tapestry.vlib.ejb.IOperations}
* stateless session bean.
*
* <p>Implenents a number of stateless operations for the front end.
*
* @version $Id$
* @author Howard Lewis Ship
*
**/
public class OperationsBean implements SessionBean
{
private SessionContext _context;
private transient Context _environment;
private transient IBookHome _bookHome;
private transient IPersonHome _personHome;
private transient IPublisherHome _publisherHome;
/**
* Data source, retrieved from the ENC property
* "jdbc/dataSource".
*
**/
private transient DataSource _dataSource;
/**
* Sets up the bean. Locates the {@link DataSource} for the bean
* as <code>jdbc/dataSource</code> within the ENC; this data source is
* later used by {@link #getConnection()}.
*
**/
public void ejbCreate()
{
Context initial;
try
{
initial = new InitialContext();
_environment = (Context) initial.lookup("java:comp/env");
}
catch (NamingException e)
{
throw new XEJBException("Could not lookup environment.", e);
}
try
{
_dataSource = (DataSource) _environment.lookup("jdbc/dataSource");
}
catch (NamingException e)
{
e.printStackTrace();
throw new XEJBException("Could not lookup data source.", e);
}
}
public void ejbRemove()
{
}
/**
* Does nothing, not invoked in stateless session beans.
**/
public void ejbPassivate()
{
}
public void setSessionContext(SessionContext value)
{
_context = value;
}
/**
* Does nothing, not invoked in stateless session beans.
*
**/
public void ejbActivate()
{
}
/**
* Finds the book and borrower (by thier primary keys) and updates the book.
*
* <p>The {@link Book} value object is returned.
*
**/
public Book borrowBook(Integer bookId, Integer borrowerId)
throws FinderException, RemoteException, BorrowException
{
IBookHome bookHome = getBookHome();
IPersonHome personHome = getPersonHome();
IBook book = bookHome.findByPrimaryKey(bookId);
if (!book.getLendable())
throw new BorrowException("Book may not be borrowed.");
// Verify that the borrower exists.
personHome.findByPrimaryKey(borrowerId);
// TBD: Check that borrower has authenticated
// findByPrimaryKey() throws an exception if the EJB doesn't exist,
// so we're safe.
personHome.findByPrimaryKey(book.getOwnerId());
// Here's the real work; just setting the holder of the book
// to be the borrower.
book.setHolderId(borrowerId);
return getBook(bookId);
}
/**
* Adds a new book, verifying that the publisher and holder actually exist.
*
**/
public Integer addBook(Map attributes) throws CreateException, RemoteException
{
IBookHome home = getBookHome();
attributes.put("dateAdded", new Timestamp(System.currentTimeMillis()));
IBook book = home.create(attributes);
return (Integer) book.getPrimaryKey();
}
/**
* Adds a book, which will be owned and held by the specified owner.
*
* <p>The publisherName may either be the name of a known publisher, or
* a new name. A new {@link IPublisher} will be created as necessary.
*
* <p>Returns the newly created book, as a {@link Map} of attributes.
*
**/
public Integer addBook(Map attributes, String publisherName)
throws CreateException, RemoteException
{
IPublisher publisher = null;
IPublisherHome publisherHome = getPublisherHome();
// Find or create the publisher.
try
{
publisher = publisherHome.findByName(publisherName);
}
catch (FinderException e)
{
// Ignore, means that no publisher with the given name already exists.
}
if (publisher == null)
publisher = publisherHome.create(publisherName);
attributes.put("publisherId", publisher.getPrimaryKey());
return addBook(attributes);
}
/**
* Updates a book.
*
* <p>Returns the updated book.
*
* @param bookId The primary key of the book to update.
*
**/
public void updateBook(Integer bookId, Map attributes) throws FinderException, RemoteException
{
IBookHome bookHome = getBookHome();
IBook book = bookHome.findByPrimaryKey(bookId);
book.updateEntityAttributes(attributes);
}
/**
* Updates a book, adding a new Publisher at the same time.
*
*
* @param bookPK The primary key of the book to update.
* @param attributes attributes to change
* @param publisherName The name of the new publisher.
* @throws FinderException if the book, holder or publisher can not be located.
* @throws CreateException if the {@link IPublisher} can not be created.
**/
public void updateBook(Integer bookId, Map attributes, String publisherName)
throws CreateException, FinderException, RemoteException
{
IPublisher publisher = null;
IPublisherHome publisherHome = getPublisherHome();
try
{
publisher = publisherHome.findByName(publisherName);
}
catch (FinderException e)
{
// Ignore, means we need to create the Publisher
}
if (publisher == null)
publisher = publisherHome.create(publisherName);
// Don't duplicate all that other code!
attributes.put("publisherId", publisher.getPrimaryKey());
updateBook(bookId, attributes);
}
public void updatePerson(Integer personId, Map attributes)
throws FinderException, RemoteException
{
IPersonHome home = getPersonHome();
IPerson person = home.findByPrimaryKey(personId);
person.updateEntityAttributes(attributes);
}
public Publisher[] getPublishers()
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
List list = new ArrayList();
try
{
connection = getConnection();
StatementAssembly assembly = new StatementAssembly();
assembly.newLine("SELECT PUBLISHER_ID, NAME");
assembly.newLine("FROM PUBLISHER");
assembly.newLine("ORDER BY NAME");
statement = assembly.createStatement(connection);
set = statement.executeQuery();
while (set.next())
{
Integer primaryKey = (Integer) set.getObject(1);
String name = set.getString(2);
list.add(new Publisher(primaryKey, name));
}
}
catch (SQLException ex)
{
ex.printStackTrace();
throw new XEJBException("Could not fetch all Publishers.", ex);
}
finally
{
close(connection, statement, set);
}
// Convert from List to Publisher[]
return (Publisher[]) list.toArray(new Publisher[list.size()]);
}
/**
* Fetchs all {@link IPerson} beans in the database and converts them
* to {@link Person} objects.
*
* Returns the {@link Person}s sorted by last name, then first.
**/
public Person[] getPersons()
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
List list = new ArrayList();
try
{
connection = getConnection();
StatementAssembly assembly = buildBasePersonQuery();
assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME");
statement = assembly.createStatement(connection);
set = statement.executeQuery();
Object[] columns = new Object[Person.N_COLUMNS];
while (set.next())
{
list.add(convertRowToPerson(set, columns));
}
}
catch (SQLException ex)
{
throw new XEJBException("Could not fetch all Persons.", ex);
}
finally
{
close(connection, statement, set);
}
return (Person[]) list.toArray(new Person[list.size()]);
}
/**
* Gets the {@link Person} for primary key.
*
* @throws FinderException if the Person does not exist.
**/
public Person getPerson(Integer personId) throws FinderException
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
Person result = null;
try
{
connection = getConnection();
StatementAssembly assembly = buildBasePersonQuery();
assembly.newLine("WHERE ");
assembly.add("PERSON_ID = ");
assembly.addParameter(personId);
assembly.newLine("ORDER BY LAST_NAME, FIRST_NAME");
statement = assembly.createStatement(connection);
set = statement.executeQuery();
if (!set.next())
throw new FinderException("Person #" + personId + " does not exist.");
Object[] columns = new Object[Person.N_COLUMNS];
result = convertRowToPerson(set, columns);
}
catch (SQLException ex)
{
throw new XEJBException("Unable to perform database query.", ex);
}
finally
{
close(connection, statement, set);
}
return result;
}
public Person login(String email, String password) throws RemoteException, LoginException
{
IPersonHome home = getPersonHome();
IPerson person = null;
Person result = null;
try
{
person = home.findByEmail(email);
}
catch (FinderException ex)
{
throw new LoginException("Unknown e-mail address.", false);
}
if (!person.getPassword().equals(password))
throw new LoginException("Invalid password.", true);
try
{
result = getPerson((Integer) person.getPrimaryKey());
}
catch (FinderException ex)
{
throw new LoginException("Could not read person.", false);
}
if (result.isLockedOut())
throw new LoginException("You have been locked out of the Virtual Library.", false);
// Set the last access time for any subsequent login.
person.setLastAccess(new Timestamp(System.currentTimeMillis()));
return result;
}
public Map getPersonAttributes(Integer personId) throws FinderException, RemoteException
{
IPersonHome home = getPersonHome();
IPerson person = home.findByPrimaryKey(personId);
return person.getEntityAttributes();
}
/**
* Retrieves a single {@link Book} by its primary key.
*
* @throws FinderException if the Book does not exist.
*
**/
public Book getBook(Integer bookId) throws FinderException
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
Book result = null;
try
{
connection = getConnection();
StatementAssembly assembly = buildBaseBookQuery();
assembly.addSep(" AND ");
assembly.add("book.BOOK_ID = ");
assembly.addParameter(bookId);
statement = assembly.createStatement(connection);
set = statement.executeQuery();
if (!set.next())
throw new FinderException("Book " + bookId + " does not exist.");
Object[] columns = new Object[Book.N_COLUMNS];
result = convertRowToBook(set, columns);
}
catch (SQLException ex)
{
throw new XEJBException("Unable to perform database query.", ex);
}
finally
{
close(connection, statement, set);
}
return result;
}
public Map getBookAttributes(Integer bookId) throws FinderException, RemoteException
{
IBookHome home = getBookHome();
IBook book = home.findByPrimaryKey(bookId);
return book.getEntityAttributes();
}
/**
* Attempts to register a new user, first checking that the
* e-mail and names are unique. Returns the primary key of the
* new {@link IPerson}.
*
**/
public Person registerNewUser(String firstName, String lastName, String email, String password)
throws RegistrationException, CreateException, RemoteException
{
IPersonHome home;
if (password == null || password.trim().length() == 0)
throw new RegistrationException("Must specify a password.");
validateUniquePerson(firstName, lastName, email);
home = getPersonHome();
Map attributes = new HashMap();
attributes.put("lastName", lastName.trim());
attributes.put("firstName", firstName.trim());
attributes.put("email", email.trim());
attributes.put("password", password.trim());
attributes.put("lastAccess", new Timestamp(System.currentTimeMillis()));
IPerson person = home.create(attributes);
Integer personId = (Integer) person.getPrimaryKey();
try
{
return getPerson(personId);
}
catch (FinderException ex)
{
throw new XCreateException("Unable to find newly created Person.", ex);
}
}
public Book deleteBook(Integer bookId) throws RemoveException, RemoteException
{
IBookHome home = getBookHome();
Book result = null;
try
{
result = getBook(bookId);
}
catch (FinderException ex)
{
throw new XRemoveException(ex);
}
home.remove(bookId);
return result;
}
/**
* Transfers a number of books to a new owner.
*
**/
public void transferBooks(Integer newOwnerId, Integer[] bookIds)
throws FinderException, RemoteException
{
if (bookIds == null)
throw new RemoteException("Must supply non-null list of books to transfer.");
if (newOwnerId == null)
throw new RemoteException("Must provide an owner for the books.");
// Verify that the new owner exists.
IPersonHome personHome = getPersonHome();
personHome.findByPrimaryKey(newOwnerId);
// Direct SQL would be more efficient, but this'll probably do.
IBookHome home = getBookHome();
for (int i = 0; i < bookIds.length; i++)
{
IBook book = home.findByPrimaryKey(bookIds[i]);
book.setOwnerId(newOwnerId);
}
}
public void updatePublishers(Publisher[] updated, Integer[] deleted)
throws FinderException, RemoveException, RemoteException
{
IPublisherHome home = getPublisherHome();
if (updated != null)
{
for (int i = 0; i < updated.length; i++)
{
IPublisher publisher = home.findByPrimaryKey(updated[i].getId());
publisher.setName(updated[i].getName());
}
}
if (deleted != null)
{
for (int i = 0; i < deleted.length; i++)
{
home.remove(deleted[i]);
}
}
}
public void updatePersons(
Person[] updated,
Integer[] resetPassword,
String newPassword,
Integer[] deleted,
Integer adminId)
throws FinderException, RemoveException, RemoteException
{
IPersonHome home = getPersonHome();
int count = Tapestry.size(updated);
for (int i = 0; i < count; i++)
{
Person u = updated[i];
IPerson person = home.findByPrimaryKey(u.getId());
person.setAdmin(u.isAdmin());
person.setLockedOut(u.isLockedOut());
}
count = Tapestry.size(resetPassword);
for (int i = 0; i < count; i++)
{
IPerson person = home.findByPrimaryKey(resetPassword[i]);
person.setPassword(newPassword);
}
count = Tapestry.size(deleted);
if (count > 0)
{
returnBooksFromDeletedPersons(deleted);
moveBooksFromDeletedPersons(deleted, adminId);
}
for (int i = 0; i < count; i++)
home.remove(deleted[i]);
}
/**
* Invoked to update all books owned by people about to be deleted, to
* reassign the books holder back to the owner.
*
**/
private void returnBooksFromDeletedPersons(Integer deletedPersonIds[]) throws RemoveException
{
StatementAssembly assembly = new StatementAssembly();
assembly.add("UPDATE BOOK");
assembly.newLine("SET HOLDER_ID = OWNER_ID");
assembly.newLine("WHERE HOLDER_ID IN (");
assembly.addParameterList(deletedPersonIds, ", ");
assembly.add(")");
executeUpdate(assembly);
}
/**
* Invoked to execute a bulk update that moves books to the new admin.
*
**/
private void moveBooksFromDeletedPersons(Integer deletedPersonIds[], Integer adminId)
throws RemoveException
{
StatementAssembly assembly = new StatementAssembly();
assembly.add("UPDATE BOOK");
assembly.newLine("SET OWNER_ID = ");
assembly.addParameter(adminId);
assembly.newLine("WHERE OWNER_ID IN (");
assembly.addParameterList(deletedPersonIds, ", ");
assembly.add(")");
executeUpdate(assembly);
}
private void executeUpdate(StatementAssembly assembly) throws XRemoveException
{
Connection connection = null;
IStatement statement = null;
try
{
connection = getConnection();
statement = assembly.createStatement(connection);
statement.executeUpdate();
statement.close();
statement = null;
connection.close();
connection = null;
}
catch (SQLException ex)
{
throw new XRemoveException(
"Unable to execute " + assembly + ": " + ex.getMessage(),
ex);
}
finally
{
close(connection, statement, null);
}
}
/**
* Translates the next row from the result set into a {@link Book}.
*
* <p>This works with queries generated by {@link #buildBaseBookQuery()}.
*
**/
protected Book convertRowToBook(ResultSet set, Object[] columns) throws SQLException
{
int column = 1;
columns[Book.ID_COLUMN] = set.getObject(column++);
columns[Book.TITLE_COLUMN] = set.getString(column++);
columns[Book.DESCRIPTION_COLUMN] = set.getString(column++);
columns[Book.ISBN_COLUMN] = set.getString(column++);
columns[Book.OWNER_ID_COLUMN] = set.getObject(column++);
columns[Book.OWNER_NAME_COLUMN] =
buildName(set.getString(column++), set.getString(column++));
columns[Book.HOLDER_ID_COLUMN] = set.getObject(column++);
columns[Book.HOLDER_NAME_COLUMN] =
buildName(set.getString(column++), set.getString(column++));
columns[Book.PUBLISHER_ID_COLUMN] = set.getObject(column++);
columns[Book.PUBLISHER_NAME_COLUMN] = set.getString(column++);
columns[Book.AUTHOR_COLUMN] = set.getString(column++);
columns[Book.HIDDEN_COLUMN] = getBoolean(set, column++);
columns[Book.LENDABLE_COLUMN] = getBoolean(set, column++);
columns[Book.DATE_ADDED_COLUMN] = set.getTimestamp(column++);
return new Book(columns);
}
private String buildName(String firstName, String lastName)
{
if (firstName == null)
return lastName;
return firstName + " " + lastName;
}
/**
* All queries must use this exact set of select columns, so that
* {@link #convertRow(ResultSet, Object[])} can build
* the correct {@link Book} from each row.
*
**/
private static final String[] BOOK_SELECT_COLUMNS =
{
"book.BOOK_ID",
"book.TITLE",
"book.DESCRIPTION",
"book.ISBN",
"owner.PERSON_ID",
"owner.FIRST_NAME",
"owner.LAST_NAME",
"holder.PERSON_ID",
"holder.FIRST_NAME",
"holder.LAST_NAME",
"publisher.PUBLISHER_ID",
"publisher.NAME",
"book.AUTHOR",
"book.HIDDEN",
"book.LENDABLE",
"book.DATE_ADDED" };
private static final String[] BOOK_ALIAS_COLUMNS =
{ "BOOK book", "PERSON owner", "PERSON holder", "PUBLISHER publisher" };
private static final String[] BOOK_JOINS =
{
"book.OWNER_ID = owner.PERSON_ID",
"book.HOLDER_ID = holder.PERSON_ID",
"book.PUBLISHER_ID = publisher.PUBLISHER_ID" };
private static final Map BOOK_SORT_ASCENDING = new HashMap();
private static final Map BOOK_SORT_DESCENDING = new HashMap();
static {
BOOK_SORT_ASCENDING.put(SortColumn.TITLE, "book.TITLE");
BOOK_SORT_ASCENDING.put(SortColumn.HOLDER, "holder.LAST_NAME, holder.FIRST_NAME");
BOOK_SORT_ASCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME, owner.LAST_NAME");
BOOK_SORT_ASCENDING.put(SortColumn.PUBLISHER, "publisher.NAME");
BOOK_SORT_ASCENDING.put(SortColumn.AUTHOR, "book.AUTHOR");
BOOK_SORT_DESCENDING.put(SortColumn.TITLE, "book.TITLE DESC");
BOOK_SORT_DESCENDING.put(
SortColumn.HOLDER,
"holder.LAST_NAME DESC, holder.FIRST_NAME DESC");
BOOK_SORT_DESCENDING.put(SortColumn.OWNER, "owner.FIRST_NAME DESC, owner.LAST_NAME DESC");
BOOK_SORT_DESCENDING.put(SortColumn.PUBLISHER, "publisher.NAME DESC");
BOOK_SORT_DESCENDING.put(SortColumn.AUTHOR, "book.AUTHOR DESC");
}
protected StatementAssembly buildBaseBookQuery()
{
StatementAssembly result = new StatementAssembly();
result.newLine("SELECT ");
result.addList(BOOK_SELECT_COLUMNS, ", ");
result.newLine("FROM ");
result.addList(BOOK_ALIAS_COLUMNS, ", ");
result.newLine("WHERE ");
result.addList(BOOK_JOINS, " AND ");
return result;
}
/**
* Adds a sort ordering clause to the statement. If ordering is null,
* orders by book title.
*
* @param assembly to update
* @param ordering defines the column to sort on, and the order (ascending or descending)
* @since 3.0
*
*
**/
protected void addSortOrdering(StatementAssembly assembly, SortOrdering ordering)
{
if (ordering == null)
{
assembly.newLine("ORDER BY book.TITLE");
return;
}
Map sorts = ordering.isDescending() ? BOOK_SORT_DESCENDING : BOOK_SORT_ASCENDING;
String term = (String) sorts.get(ordering.getColumn());
assembly.newLine("ORDER BY ");
assembly.add(term);
}
protected void addSubstringSearch(StatementAssembly assembly, String column, String value)
{
if (value == null)
return;
String trimmed = value.trim();
if (trimmed.length() == 0)
return;
// Here's the McKoi dependency: LOWER() is a database-specific
// SQL function.
assembly.addSep(" AND LOWER(");
assembly.add(column);
assembly.add(") LIKE");
assembly.addParameter("%" + trimmed.toLowerCase() + "%");
}
/**
* Closes the resultSet (if not null), then the statement (if not null),
* then the Connection (if not null). Exceptions are written to System.out.
*
**/
protected void close(Connection connection, IStatement statement, ResultSet resultSet)
{
if (resultSet != null)
{
try
{
resultSet.close();
}
catch (SQLException ex)
{
System.out.println("Exception closing result set.");
ex.printStackTrace();
}
}
if (statement != null)
{
try
{
statement.close();
}
catch (SQLException ex)
{
System.out.println("Exception closing statement.");
ex.printStackTrace();
}
}
if (connection != null)
{
try
{
connection.close();
}
catch (SQLException ex)
{
System.out.println("Exception closing connection.");
ex.printStackTrace();
}
}
}
private IPersonHome getPersonHome()
{
if (_personHome == null)
{
try
{
Object raw = _environment.lookup("ejb/Person");
_personHome = (IPersonHome) PortableRemoteObject.narrow(raw, IPersonHome.class);
}
catch (NamingException ex)
{
throw new XEJBException("Could not lookup Person home interface.", ex);
}
}
return _personHome;
}
private IPublisherHome getPublisherHome()
{
if (_publisherHome == null)
{
try
{
Object raw = _environment.lookup("ejb/Publisher");
_publisherHome =
(IPublisherHome) PortableRemoteObject.narrow(raw, IPublisherHome.class);
}
catch (NamingException e)
{
throw new XEJBException("Could not lookup Publisher home interface.", e);
}
}
return _publisherHome;
}
private IBookHome getBookHome()
{
if (_bookHome == null)
{
try
{
Object raw = _environment.lookup("ejb/Book");
_bookHome = (IBookHome) PortableRemoteObject.narrow(raw, IBookHome.class);
}
catch (NamingException e)
{
throw new XEJBException("Could not lookup Book home interface.", e);
}
}
return _bookHome;
}
/**
* Gets a new connection from the data source.
*
**/
protected Connection getConnection()
{
try
{
return _dataSource.getConnection();
}
catch (SQLException e)
{
throw new XEJBException("Unable to get database connection from pool.", e);
}
}
protected StatementAssembly buildBasePersonQuery()
{
StatementAssembly result;
result = new StatementAssembly();
result.newLine("SELECT PERSON_ID, FIRST_NAME, LAST_NAME, EMAIL, ");
result.newLine(" LOCKED_OUT, ADMIN, LAST_ACCESS");
result.newLine("FROM PERSON");
return result;
}
/**
* Translates the next row from the result set into a {@link Person}.
*
* <p>This works with queries generated by {@link #buildBasePersonQuery()}.
*
**/
protected Person convertRowToPerson(ResultSet set, Object[] columns) throws SQLException
{
int column = 1;
columns[Person.ID_COLUMN] = set.getObject(column++);
columns[Person.FIRST_NAME_COLUMN] = set.getString(column++);
columns[Person.LAST_NAME_COLUMN] = set.getString(column++);
columns[Person.EMAIL_COLUMN] = set.getString(column++);
columns[Person.LOCKED_OUT_COLUMN] = getBoolean(set, column++);
columns[Person.ADMIN_COLUMN] = getBoolean(set, column++);
columns[Person.LAST_ACCESS_COLUMN] = set.getTimestamp(column++);
return new Person(columns);
}
private Boolean getBoolean(ResultSet set, int index) throws SQLException
{
return set.getBoolean(index) ? Boolean.TRUE : Boolean.FALSE;
}
private void validateUniquePerson(String firstName, String lastName, String email)
throws RegistrationException
{
Connection connection = null;
IStatement statement = null;
ResultSet set = null;
String trimmedEmail = email.trim().toLowerCase();
String trimmedLastName = lastName.trim().toLowerCase();
String trimmedFirstName = firstName.trim().toLowerCase();
try
{
connection = getConnection();
StatementAssembly assembly = new StatementAssembly();
assembly.newLine("SELECT PERSON_ID");
assembly.newLine("FROM PERSON");
assembly.newLine("WHERE ");
assembly.add("LOWER(EMAIL) = ");
assembly.addParameter(trimmedEmail);
statement = assembly.createStatement(connection);
set = statement.executeQuery();
if (set.next())
throw new RegistrationException("Email address is already in use by another user.");
close(null, statement, set);
assembly = new StatementAssembly();
assembly.newLine("SELECT PERSON_ID");
assembly.newLine("FROM PERSON");
assembly.newLine("WHERE ");
assembly.add("LOWER(FIRST_NAME) = ");
assembly.addParameter(trimmedFirstName);
assembly.addSep(" AND ");
assembly.add("LOWER(LAST_NAME) = ");
assembly.addParameter(trimmedLastName);
statement = assembly.createStatement(connection);
set = statement.executeQuery();
if (set.next())
throw new RegistrationException("Name provided is already in use by another user.");
}
catch (SQLException e)
{
throw new RegistrationException("Could not access database: " + e.getMessage(), e);
}
finally
{
close(connection, statement, set);
}
}
public Book returnBook(Integer bookId) throws RemoteException, FinderException
{
IBookHome bookHome = getBookHome();
IBook book = bookHome.findByPrimaryKey(bookId);
Integer ownerPK = book.getOwnerId();
book.setHolderId(ownerPK);
return getBook(bookId);
}
} | apache/tapestry3 | tapestry-examples/VlibBeans/src/org/apache/tapestry/vlib/ejb/impl/OperationsBean.java | Java | apache-2.0 | 33,019 |
/*
* Copyright © 2014 - 2018 Leipzig University (Database Research Group)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Contains implementations graph pattern matching on a single input graph.
*/
package org.gradoop.flink.model.impl.operators.matching.transactional.function;
| niklasteichmann/gradoop | gradoop-flink/src/main/java/org/gradoop/flink/model/impl/operators/matching/transactional/function/package-info.java | Java | apache-2.0 | 802 |
package org.sakaiproject.scorm.ui.player.behaviors;
import org.adl.api.ecmascript.SCORM13APIInterface;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.scorm.model.api.ScoBean;
import org.sakaiproject.scorm.model.api.SessionBean;
import org.sakaiproject.scorm.navigation.INavigable;
import org.sakaiproject.scorm.navigation.INavigationEvent;
import org.sakaiproject.scorm.service.api.ScormApplicationService;
import org.sakaiproject.scorm.service.api.ScormSequencingService;
public abstract class SCORM13API implements SCORM13APIInterface {
private static Log log = LogFactory.getLog(SCORM13API.class);
// String value of FALSE for JavaScript returns.
protected static final String STRING_FALSE = "false";
// String value of TRUE for JavaScript returns.
protected static final String STRING_TRUE = "true";
public abstract SessionBean getSessionBean();
public abstract ScormApplicationService getApplicationService();
public abstract ScormSequencingService getSequencingService();
public abstract ScoBean getScoBean();
public abstract INavigable getAgent();
public abstract Object getTarget();
// Implementation of SCORM13APIInterface
public String Commit(String parameter) {
// TODO: Disable UI controls -- or throttle them on server -- don't mess with js
// Assume failure
String result = STRING_FALSE;
if (null == getSessionBean()) {
log.error("Null run state!");
}
if (getApplicationService().commit(parameter, getSessionBean(), getScoBean()))
result = STRING_TRUE;
// TODO: Enable UI controls
return result;
}
public String GetDiagnostic(String errorCode) {
return getApplicationService().getDiagnostic(errorCode, getSessionBean());
}
public String GetErrorString(String errorCode) {
return getApplicationService().getErrorString(errorCode, getSessionBean());
}
public String GetLastError() {
return getApplicationService().getLastError(getSessionBean());
}
public String GetValue(String parameter) {
return getApplicationService().getValue(parameter, getSessionBean(), getScoBean());
}
public String Initialize(String parameter) {
// Assume failure
String result = STRING_FALSE;
if (getApplicationService().initialize(parameter, getSessionBean(), getScoBean()))
result = STRING_TRUE;
return result;
}
public String SetValue(String dataModelElement, String value) {
// Assume failure
String result = STRING_FALSE;
if (getApplicationService().setValue(dataModelElement, value, getSessionBean(), getScoBean())) {
result = STRING_TRUE;
}
return result;
}
public String Terminate(String parameter) {
// Assume failure
String result = STRING_FALSE;
if (null == getSessionBean()) {
log.error("Null run state!");
return result;
}
INavigationEvent navigationEvent = getApplicationService().newNavigationEvent();
boolean isSuccessful = getApplicationService().terminate(parameter, navigationEvent,
getSessionBean(), getScoBean());
if (isSuccessful) {
result = STRING_TRUE;
if (navigationEvent.isChoiceEvent()) {
getSequencingService().navigate(navigationEvent.getChoiceEvent(), getSessionBean(), getAgent(), getTarget());
} else {
getSequencingService().navigate(navigationEvent.getEvent(), getSessionBean(), getAgent(), getTarget());
}
}
return result;
}
}
| marktriggs/nyu-sakai-10.4 | scorm/scorm-tool/src/java/org/sakaiproject/scorm/ui/player/behaviors/SCORM13API.java | Java | apache-2.0 | 3,425 |
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
#pragma once
#include <aws/alexaforbusiness/AlexaForBusiness_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <utility>
namespace Aws
{
template<typename RESULT_TYPE>
class AmazonWebServiceResult;
namespace Utils
{
namespace Json
{
class JsonValue;
} // namespace Json
} // namespace Utils
namespace AlexaForBusiness
{
namespace Model
{
class AWS_ALEXAFORBUSINESS_API CreateAddressBookResult
{
public:
CreateAddressBookResult();
CreateAddressBookResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
CreateAddressBookResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
/**
* <p>The ARN of the newly created address book.</p>
*/
inline const Aws::String& GetAddressBookArn() const{ return m_addressBookArn; }
/**
* <p>The ARN of the newly created address book.</p>
*/
inline void SetAddressBookArn(const Aws::String& value) { m_addressBookArn = value; }
/**
* <p>The ARN of the newly created address book.</p>
*/
inline void SetAddressBookArn(Aws::String&& value) { m_addressBookArn = std::move(value); }
/**
* <p>The ARN of the newly created address book.</p>
*/
inline void SetAddressBookArn(const char* value) { m_addressBookArn.assign(value); }
/**
* <p>The ARN of the newly created address book.</p>
*/
inline CreateAddressBookResult& WithAddressBookArn(const Aws::String& value) { SetAddressBookArn(value); return *this;}
/**
* <p>The ARN of the newly created address book.</p>
*/
inline CreateAddressBookResult& WithAddressBookArn(Aws::String&& value) { SetAddressBookArn(std::move(value)); return *this;}
/**
* <p>The ARN of the newly created address book.</p>
*/
inline CreateAddressBookResult& WithAddressBookArn(const char* value) { SetAddressBookArn(value); return *this;}
private:
Aws::String m_addressBookArn;
};
} // namespace Model
} // namespace AlexaForBusiness
} // namespace Aws
| cedral/aws-sdk-cpp | aws-cpp-sdk-alexaforbusiness/include/aws/alexaforbusiness/model/CreateAddressBookResult.h | C | apache-2.0 | 2,626 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.util;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import junit.framework.TestCase;
/**
* @version
*/
public class CaseInsensitiveMapTest extends TestCase {
public void testLookupCaseAgnostic() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals("cheese", map.get("FOO"));
}
public void testLookupCaseAgnosticAddHeader() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals("cheese", map.get("FOO"));
assertNull(map.get("unknown"));
map.put("bar", "beer");
assertEquals("beer", map.get("bar"));
assertEquals("beer", map.get("Bar"));
assertEquals("beer", map.get("BAR"));
assertNull(map.get("unknown"));
}
public void testLookupCaseAgnosticAddHeader2() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertNull(map.get("unknown"));
map.put("bar", "beer");
assertEquals("beer", map.get("BAR"));
assertEquals("beer", map.get("bar"));
assertEquals("beer", map.get("Bar"));
assertNull(map.get("unknown"));
}
public void testLookupCaseAgnosticAddHeaderRemoveHeader() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals("cheese", map.get("FOO"));
assertNull(map.get("unknown"));
map.put("bar", "beer");
assertEquals("beer", map.get("bar"));
assertEquals("beer", map.get("Bar"));
assertEquals("beer", map.get("BAR"));
assertNull(map.get("unknown"));
map.remove("bar");
assertNull(map.get("bar"));
assertNull(map.get("unknown"));
}
public void testSetWithDifferentCase() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
map.put("Foo", "bar");
assertEquals("bar", map.get("FOO"));
assertEquals("bar", map.get("foo"));
assertEquals("bar", map.get("Foo"));
}
public void testRemoveWithDifferentCase() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
map.put("foo", "cheese");
map.put("Foo", "bar");
assertEquals("bar", map.get("FOO"));
assertEquals("bar", map.get("foo"));
assertEquals("bar", map.get("Foo"));
map.remove("FOO");
assertEquals(null, map.get("foo"));
assertEquals(null, map.get("Foo"));
assertEquals(null, map.get("FOO"));
assertTrue(map.isEmpty());
}
public void testPutAll() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
Map<String, Object> other = new CaseInsensitiveMap();
other.put("Foo", "cheese");
other.put("bar", 123);
map.putAll(other);
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals(123, map.get("BAR"));
assertEquals(123, map.get("bar"));
assertEquals(123, map.get("BaR"));
// key case should be preserved
Map<String, Object> keys = new HashMap<String, Object>();
keys.putAll(map);
assertEquals("cheese", keys.get("Foo"));
assertNull(keys.get("foo"));
assertNull(keys.get("FOO"));
assertEquals(123, keys.get("bar"));
assertNull(keys.get("Bar"));
assertNull(keys.get("BAR"));
}
public void testPutAllOther() {
Map<String, Object> map = new CaseInsensitiveMap();
assertNull(map.get("foo"));
Map<String, Object> other = new HashMap<String, Object>();
other.put("Foo", "cheese");
other.put("bar", 123);
map.putAll(other);
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals(123, map.get("BAR"));
assertEquals(123, map.get("bar"));
assertEquals(123, map.get("BaR"));
}
public void testPutAllEmpty() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("foo", "cheese");
Map<String, Object> other = new HashMap<String, Object>();
map.putAll(other);
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals(1, map.size());
}
public void testConstructFromOther() {
Map<String, Object> other = new HashMap<String, Object>();
other.put("Foo", "cheese");
other.put("bar", 123);
Map<String, Object> map = new CaseInsensitiveMap(other);
assertEquals("cheese", map.get("FOO"));
assertEquals("cheese", map.get("foo"));
assertEquals("cheese", map.get("Foo"));
assertEquals(123, map.get("BAR"));
assertEquals(123, map.get("bar"));
assertEquals(123, map.get("BaR"));
}
public void testKeySet() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", 123);
map.put("baZ", "beer");
Set keys = map.keySet();
// we should be able to lookup no matter what case
assertTrue(keys.contains("Foo"));
assertTrue(keys.contains("foo"));
assertTrue(keys.contains("FOO"));
assertTrue(keys.contains("BAR"));
assertTrue(keys.contains("bar"));
assertTrue(keys.contains("Bar"));
assertTrue(keys.contains("baZ"));
assertTrue(keys.contains("baz"));
assertTrue(keys.contains("Baz"));
assertTrue(keys.contains("BAZ"));
}
public void testRetainKeysCopyToAnotherMap() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", 123);
map.put("baZ", "beer");
Map<String, Object> other = new HashMap<String, Object>(map);
// we should retain the cases of the original keys
// when its copied to another map
assertTrue(other.containsKey("Foo"));
assertFalse(other.containsKey("foo"));
assertFalse(other.containsKey("FOO"));
assertTrue(other.containsKey("BAR"));
assertFalse(other.containsKey("bar"));
assertFalse(other.containsKey("Bar"));
assertTrue(other.containsKey("baZ"));
assertFalse(other.containsKey("baz"));
assertFalse(other.containsKey("Baz"));
assertFalse(other.containsKey("BAZ"));
}
public void testValues() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "123");
map.put("baZ", "Beer");
Iterator it = map.values().iterator();
// should be String values
assertEquals("String", it.next().getClass().getSimpleName());
assertEquals("String", it.next().getClass().getSimpleName());
assertEquals("String", it.next().getClass().getSimpleName());
Collection values = map.values();
assertEquals(3, values.size());
assertTrue(values.contains("cheese"));
assertTrue(values.contains("123"));
assertTrue(values.contains("Beer"));
}
public void testRomeks() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("foo", "cheese");
assertEquals(1, map.size());
assertEquals("cheese", map.get("fOo"));
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("FOO"));
assertEquals(true, map.keySet().contains("FOO"));
map.put("FOO", "cake");
assertEquals(1, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("FOO"));
assertEquals("cake", map.get("fOo"));
}
public void testRomeksUsingRegularHashMap() {
Map<String, Object> map = new HashMap<String, Object>();
map.put("foo", "cheese");
assertEquals(1, map.size());
assertEquals(null, map.get("fOo"));
assertEquals(true, map.containsKey("foo"));
assertEquals(false, map.containsKey("FOO"));
assertEquals(false, map.keySet().contains("FOO"));
map.put("FOO", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("FOO"));
assertEquals(null, map.get("fOo"));
assertEquals("cheese", map.get("foo"));
assertEquals("cake", map.get("FOO"));
}
public void testRomeksTransferredToHashMapAfterwards() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("FOO", "cake");
assertEquals(1, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("FOO"));
Map<String, Object> other = new HashMap<String, Object>(map);
assertEquals(false, other.containsKey("foo"));
assertEquals(true, other.containsKey("FOO"));
assertEquals(1, other.size());
}
public void testSerialization() throws Exception {
CaseInsensitiveMap testMap = new CaseInsensitiveMap();
testMap.put("key", "value");
// force entry set to be created which could cause the map to be non serializable
testMap.entrySet();
ByteArrayOutputStream bStream = new ByteArrayOutputStream();
ObjectOutputStream objStream = new ObjectOutputStream(bStream);
objStream.writeObject(testMap);
ObjectInputStream inStream = new ObjectInputStream(new ByteArrayInputStream(bStream.toByteArray()));
CaseInsensitiveMap testMapCopy = (CaseInsensitiveMap) inStream.readObject();
assertTrue(testMapCopy.containsKey("key"));
}
public void testCopyToAnotherMapPreserveKeyCaseEntrySet() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("bar"));
Map<String, Object> other = new HashMap<String, Object>();
for (Map.Entry<String, Object> entry : map.entrySet()) {
String key = entry.getKey();
Object value = entry.getValue();
other.put(key, value);
}
assertEquals(false, other.containsKey("foo"));
assertEquals(true, other.containsKey("Foo"));
assertEquals(false, other.containsKey("bar"));
assertEquals(true, other.containsKey("BAR"));
assertEquals(2, other.size());
}
public void testCopyToAnotherMapPreserveKeyCasePutAll() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("bar"));
Map<String, Object> other = new HashMap<String, Object>();
other.putAll(map);
assertEquals(false, other.containsKey("foo"));
assertEquals(true, other.containsKey("Foo"));
assertEquals(false, other.containsKey("bar"));
assertEquals(true, other.containsKey("BAR"));
assertEquals(2, other.size());
}
public void testCopyToAnotherMapPreserveKeyCaseCtr() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("bar"));
Map<String, Object> other = new HashMap<String, Object>(map);
assertEquals(false, other.containsKey("foo"));
assertEquals(true, other.containsKey("Foo"));
assertEquals(false, other.containsKey("bar"));
assertEquals(true, other.containsKey("BAR"));
assertEquals(2, other.size());
}
public void testCopyToAnotherMapPreserveKeyKeySet() {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("Foo", "cheese");
map.put("BAR", "cake");
assertEquals(2, map.size());
assertEquals(true, map.containsKey("foo"));
assertEquals(true, map.containsKey("bar"));
Map<String, Object> other = new HashMap<String, Object>();
// this is wrong!!! you should use entrySet
for (String key : map.keySet()) {
Object value = map.get(key);
other.put(key, value);
}
// now the keys will be in lower case
assertEquals(true, other.containsKey("foo"));
assertEquals(false, other.containsKey("Foo"));
assertEquals(true, other.containsKey("bar"));
assertEquals(false, other.containsKey("BAR"));
assertEquals(2, other.size());
}
public void testConcurrent() throws Exception {
ExecutorService service = Executors.newFixedThreadPool(5);
final CountDownLatch latch = new CountDownLatch(1000);
final Map<String, Object> map = new CaseInsensitiveMap();
// do some stuff concurrently
for (int i = 0; i < 1000; i++) {
final int count = i;
service.submit(new Runnable() {
public void run() {
Map<String, Object> foo = new CaseInsensitiveMap();
foo.put("counter" + count, count);
foo.put("foo", 123);
foo.put("bar", 456);
foo.put("cake", "cheese");
// copy foo to map as map is a shared resource
map.putAll(foo);
latch.countDown();
}
});
}
latch.await(10, TimeUnit.SECONDS);
assertEquals(1003, map.size());
assertEquals(true, map.containsKey("counter0"));
assertEquals(true, map.containsKey("counter500"));
assertEquals(true, map.containsKey("counter999"));
assertEquals(123, map.get("FOO"));
assertEquals(456, map.get("Bar"));
assertEquals("cheese", map.get("cAKe"));
service.shutdownNow();
}
public void testCopyMapWithCamelHeadersTest() throws Exception {
Map<String, Object> map = new CaseInsensitiveMap();
map.put("CamelA", "A");
map.put("CamelB", "B");
map.put("CamelC", "C");
// retain maps so we can profile that the map doesn't duplicate
// camel keys as they are intern
List<Map> maps = new ArrayList<Map>();
for (int i = 0; i < 10000; i++) {
Map<String, Object> copy = new CaseInsensitiveMap(map);
assertEquals(3, copy.size());
assertEquals("A", copy.get("CamelA"));
assertEquals("B", copy.get("CamelB"));
assertEquals("C", copy.get("CamelC"));
maps.add(copy);
}
assertEquals(10000, maps.size());
assertEquals(3, map.size());
assertEquals("A", map.get("CamelA"));
assertEquals("B", map.get("CamelB"));
assertEquals("C", map.get("CamelC"));
// use a memory profiler to see memory allocation
// often you may want to give it time to run so you
// have chance to capture memory snapshot in profiler
// Thread.sleep(9999999);
}
} | chicagozer/rheosoft | camel-core/src/test/java/org/apache/camel/util/CaseInsensitiveMapTest.java | Java | apache-2.0 | 17,444 |
/*
* Copyright DbMaintain.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dbmaintain.script.parser.impl;
import org.dbmaintain.script.parser.ScriptParser;
import org.dbmaintain.script.parser.parsingstate.ParsingState;
import org.dbmaintain.util.DbMaintainException;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.util.Properties;
/**
* A class for parsing statements out of sql scripts.
* <p/>
* All statements should be separated with a semicolon (;). The last statement will be
* added even if it does not end with a semicolon. The semicolons will not be included in the returned statements.
* <p/>
* This parser also takes quoted literals, double quoted text and in-line (--comment) and block (/ * comment * /)
* into account when parsing the statements.
*
* @author Tim Ducheyne
* @author Filip Neven
* @author Stefan Bangels
*/
public class DefaultScriptParser implements ScriptParser {
/**
* The reader for the script content stream
*/
protected Reader scriptReader;
/**
* Whether backslash escaping is enabled
*/
protected boolean backSlashEscapingEnabled;
/**
* Parameters that must be replaced in the script. Null if there are no such parameters
*/
protected Properties scriptParameters;
/**
* The starting state
*/
protected ParsingState initialParsingState;
/**
* True if the script has ended
*/
protected boolean endOfScriptReached = false;
/**
* The current parsed character
*/
protected Character currentChar, nextChar;
/**
* Constructor for DefaultScriptParser.
*
* @param scriptReader the reader that will provide the script content, not null
* @param initialParsingState the inial state when starting to parse a script, not null
* @param backSlashEscapingEnabled true if backslash escaping is enabled
* @param scriptParameters parameters that must be replaced in the script. null if there are no such parameters.
*/
public DefaultScriptParser(Reader scriptReader, ParsingState initialParsingState, boolean backSlashEscapingEnabled,
Properties scriptParameters) {
this.scriptReader = scriptReader;
this.backSlashEscapingEnabled = backSlashEscapingEnabled;
this.initialParsingState = initialParsingState;
this.scriptParameters = scriptParameters;
this.scriptReader = new BufferedReader(scriptReader);
}
/**
* Parses the next statement out of the given script stream.
*
* @return the statements, null if no more statements
*/
public String getNextStatement() {
try {
return getNextStatementImpl();
} catch (IOException e) {
throw new DbMaintainException("Unable to parse next statement from script.", e);
}
}
/**
* Actual implementation of getNextStatement.
*
* @return the statements, null if no more statements
* @throws IOException if a problem occurs reading the script from the file system
*/
protected String getNextStatementImpl() throws IOException {
StatementBuilder statementBuilder = createStatementBuilder();
// Make sure that we read currentChar when we start reading a new script. If not null, currentChar was already
// set to the first character of the next statement when we read the previous statement.
if (currentChar == null) {
currentChar = readNextCharacter();
}
while (!endOfScriptReached) {
if (currentChar == null) {
endOfScriptReached = true;
}
nextChar = readNextCharacter();
statementBuilder.addCharacter(currentChar, nextChar);
currentChar = nextChar;
if (statementBuilder.isComplete()) {
if (statementBuilder.hasExecutableContent()) {
return statementBuilder.buildStatement();
}
statementBuilder = createStatementBuilder();
}
}
if (!statementBuilder.isComplete() && statementBuilder.hasExecutableContent()) {
throw new DbMaintainException("Last statement in script was not ended correctly.");
}
return null;
}
protected Character readNextCharacter() throws IOException {
int charAsInt = scriptReader.read();
return charAsInt == -1 ? null : (char) charAsInt;
}
/**
* Factory method for the statement builder.
*
* @return The statement builder, not null
*/
protected StatementBuilder createStatementBuilder() {
return new StatementBuilder(initialParsingState, scriptParameters);
}
}
| fcamblor/dbmaintain-maven-plugin | dbmaintain/src/main/java/org/dbmaintain/script/parser/impl/DefaultScriptParser.java | Java | apache-2.0 | 5,339 |
import logging
import re
import socket
from mopidy.config import validators
from mopidy.internal import log, path
def decode(value):
if isinstance(value, bytes):
value = value.decode(errors="surrogateescape")
for char in ("\\", "\n", "\t"):
value = value.replace(
char.encode(encoding="unicode-escape").decode(), char
)
return value
def encode(value):
if isinstance(value, bytes):
value = value.decode(errors="surrogateescape")
for char in ("\\", "\n", "\t"):
value = value.replace(
char, char.encode(encoding="unicode-escape").decode()
)
return value
class DeprecatedValue:
pass
class ConfigValue:
"""Represents a config key's value and how to handle it.
Normally you will only be interacting with sub-classes for config values
that encode either deserialization behavior and/or validation.
Each config value should be used for the following actions:
1. Deserializing from a raw string and validating, raising ValueError on
failure.
2. Serializing a value back to a string that can be stored in a config.
3. Formatting a value to a printable form (useful for masking secrets).
:class:`None` values should not be deserialized, serialized or formatted,
the code interacting with the config should simply skip None config values.
"""
def deserialize(self, value):
"""Cast raw string to appropriate type."""
return decode(value)
def serialize(self, value, display=False):
"""Convert value back to string for saving."""
if value is None:
return ""
return str(value)
class Deprecated(ConfigValue):
"""Deprecated value.
Used for ignoring old config values that are no longer in use, but should
not cause the config parser to crash.
"""
def deserialize(self, value):
return DeprecatedValue()
def serialize(self, value, display=False):
return DeprecatedValue()
class String(ConfigValue):
"""String value.
Is decoded as utf-8 and \\n \\t escapes should work and be preserved.
"""
def __init__(self, optional=False, choices=None):
self._required = not optional
self._choices = choices
def deserialize(self, value):
value = decode(value).strip()
validators.validate_required(value, self._required)
if not value:
return None
validators.validate_choice(value, self._choices)
return value
def serialize(self, value, display=False):
if value is None:
return ""
return encode(value)
class Secret(String):
"""Secret string value.
Is decoded as utf-8 and \\n \\t escapes should work and be preserved.
Should be used for passwords, auth tokens etc. Will mask value when being
displayed.
"""
def __init__(self, optional=False, choices=None):
self._required = not optional
self._choices = None # Choices doesn't make sense for secrets
def serialize(self, value, display=False):
if value is not None and display:
return "********"
return super().serialize(value, display)
class Integer(ConfigValue):
"""Integer value."""
def __init__(
self, minimum=None, maximum=None, choices=None, optional=False
):
self._required = not optional
self._minimum = minimum
self._maximum = maximum
self._choices = choices
def deserialize(self, value):
value = decode(value)
validators.validate_required(value, self._required)
if not value:
return None
value = int(value)
validators.validate_choice(value, self._choices)
validators.validate_minimum(value, self._minimum)
validators.validate_maximum(value, self._maximum)
return value
class Boolean(ConfigValue):
"""Boolean value.
Accepts ``1``, ``yes``, ``true``, and ``on`` with any casing as
:class:`True`.
Accepts ``0``, ``no``, ``false``, and ``off`` with any casing as
:class:`False`.
"""
true_values = ("1", "yes", "true", "on")
false_values = ("0", "no", "false", "off")
def __init__(self, optional=False):
self._required = not optional
def deserialize(self, value):
value = decode(value)
validators.validate_required(value, self._required)
if not value:
return None
if value.lower() in self.true_values:
return True
elif value.lower() in self.false_values:
return False
raise ValueError(f"invalid value for boolean: {value!r}")
def serialize(self, value, display=False):
if value is True:
return "true"
elif value in (False, None):
return "false"
else:
raise ValueError(f"{value!r} is not a boolean")
class List(ConfigValue):
"""List value.
Supports elements split by commas or newlines. Newlines take presedence and
empty list items will be filtered out.
"""
def __init__(self, optional=False):
self._required = not optional
def deserialize(self, value):
value = decode(value)
if "\n" in value:
values = re.split(r"\s*\n\s*", value)
else:
values = re.split(r"\s*,\s*", value)
values = tuple(v.strip() for v in values if v.strip())
validators.validate_required(values, self._required)
return tuple(values)
def serialize(self, value, display=False):
if not value:
return ""
return "\n " + "\n ".join(encode(v) for v in value if v)
class LogColor(ConfigValue):
def deserialize(self, value):
value = decode(value)
validators.validate_choice(value.lower(), log.COLORS)
return value.lower()
def serialize(self, value, display=False):
if value.lower() in log.COLORS:
return encode(value.lower())
return ""
class LogLevel(ConfigValue):
"""Log level value.
Expects one of ``critical``, ``error``, ``warning``, ``info``, ``debug``,
``trace``, or ``all``, with any casing.
"""
levels = {
"critical": logging.CRITICAL,
"error": logging.ERROR,
"warning": logging.WARNING,
"info": logging.INFO,
"debug": logging.DEBUG,
"trace": log.TRACE_LOG_LEVEL,
"all": logging.NOTSET,
}
def deserialize(self, value):
value = decode(value)
validators.validate_choice(value.lower(), self.levels.keys())
return self.levels.get(value.lower())
def serialize(self, value, display=False):
lookup = {v: k for k, v in self.levels.items()}
if value in lookup:
return encode(lookup[value])
return ""
class Hostname(ConfigValue):
"""Network hostname value."""
def __init__(self, optional=False):
self._required = not optional
def deserialize(self, value, display=False):
value = decode(value).strip()
validators.validate_required(value, self._required)
if not value:
return None
socket_path = path.get_unix_socket_path(value)
if socket_path is not None:
path_str = Path(not self._required).deserialize(socket_path)
return f"unix:{path_str}"
try:
socket.getaddrinfo(value, None)
except OSError:
raise ValueError("must be a resolveable hostname or valid IP")
return value
class Port(Integer):
"""Network port value.
Expects integer in the range 0-65535, zero tells the kernel to simply
allocate a port for us.
"""
def __init__(self, choices=None, optional=False):
super().__init__(
minimum=0, maximum=2 ** 16 - 1, choices=choices, optional=optional
)
class _ExpandedPath(str):
def __new__(cls, original, expanded):
return super().__new__(cls, expanded)
def __init__(self, original, expanded):
self.original = original
class Path(ConfigValue):
"""File system path.
The following expansions of the path will be done:
- ``~`` to the current user's home directory
- ``$XDG_CACHE_DIR`` according to the XDG spec
- ``$XDG_CONFIG_DIR`` according to the XDG spec
- ``$XDG_DATA_DIR`` according to the XDG spec
- ``$XDG_MUSIC_DIR`` according to the XDG spec
"""
def __init__(self, optional=False):
self._required = not optional
def deserialize(self, value):
value = decode(value).strip()
expanded = path.expand_path(value)
validators.validate_required(value, self._required)
validators.validate_required(expanded, self._required)
if not value or expanded is None:
return None
return _ExpandedPath(value, expanded)
def serialize(self, value, display=False):
if isinstance(value, _ExpandedPath):
value = value.original
if isinstance(value, bytes):
value = value.decode(errors="surrogateescape")
return value
| kingosticks/mopidy | mopidy/config/types.py | Python | apache-2.0 | 9,146 |
// AnyChatCallCenterServerDlg.h : header file
//
#if !defined(AFX_ANYCHATCALLCENTERSERVERDLG_H__69ADA4B7_BCD7_435B_A14D_20271C905BA1__INCLUDED_)
#define AFX_ANYCHATCALLCENTERSERVERDLG_H__69ADA4B7_BCD7_435B_A14D_20271C905BA1__INCLUDED_
#if _MSC_VER > 1000
#pragma once
#endif // _MSC_VER > 1000
#include <list>
class CAnyChatCallCenterServerDlg : public CDialog
{
// Construction
public:
CAnyChatCallCenterServerDlg(CWnd* pParent = NULL); // standard constructor
public:
CString m_strLogInfo; ///< ±£´æÈÕÖ¾ÐÅÏ¢
// ÏÔʾÈÕÖ¾ÐÅÏ¢
void AppendLogString(CString logstr);
// ³õʼ»¯ÒµÎñ¶ÓÁÐ
void InitAnyChatQueue(void);
// Dialog Data
//{{AFX_DATA(CAnyChatCallCenterServerDlg)
enum { IDD = IDD_ANYCHATCALLCENTERSERVER_DIALOG };
CEdit m_ctrlEditLog;
CComboBox m_ComboStyle;
int m_iTargetId;
BOOL m_bShowUserLog;
//}}AFX_DATA
// ClassWizard generated virtual function overrides
//{{AFX_VIRTUAL(CAnyChatCallCenterServerDlg)
protected:
virtual void DoDataExchange(CDataExchange* pDX); // DDX/DDV support
//}}AFX_VIRTUAL
// Implementation
protected:
HICON m_hIcon;
// Generated message map functions
//{{AFX_MSG(CAnyChatCallCenterServerDlg)
virtual BOOL OnInitDialog();
afx_msg void OnPaint();
afx_msg HCURSOR OnQueryDragIcon();
afx_msg void OnDestroy();
afx_msg void OnButtonSendbuf();
afx_msg void OnButtonTransFile();
afx_msg void OnButtonTransBufferEx();
afx_msg void OnButtonTransBuffer();
afx_msg void OnButtonStartRecord();
afx_msg void OnButtonStopRecord();
afx_msg void OnCheckShowLog();
afx_msg void OnTimer(UINT nIDEvent);
afx_msg void OnButtonKickOut();
afx_msg void OnButtonHangUp();
//}}AFX_MSG
DECLARE_MESSAGE_MAP()
};
//{{AFX_INSERT_LOCATION}}
// Microsoft Visual C++ will insert additional declarations immediately before the previous line.
#endif // !defined(AFX_ANYCHATCALLCENTERSERVERDLG_H__69ADA4B7_BCD7_435B_A14D_20271C905BA1__INCLUDED_)
| alucard263096/AMKRemoteClass | Documents/AnyChat/AnyChatCoreSDK_Win32_r4840/src/server/c++/AnyChatCallCenterServer/AnyChatCallCenterServerDlg.h | C | apache-2.0 | 1,901 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc (version 1.7.0_09-icedtea) on Sat Mar 30 09:57:52 CET 2013 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>org.togglz.core.proxy (Togglz 1.1.1.Final API)</title>
<meta name="date" content="2013-03-30">
<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style">
</head>
<body>
<h1 class="bar"><a href="../../../../org/togglz/core/proxy/package-summary.html" target="classFrame">org.togglz.core.proxy</a></h1>
<div class="indexContainer">
<h2 title="Classes">Classes</h2>
<ul title="Classes">
<li><a href="FeatureProxyInvocationHandler.html" title="class in org.togglz.core.proxy" target="classFrame">FeatureProxyInvocationHandler</a></li>
</ul>
</div>
</body>
</html>
| togglz/togglz-site | apidocs/1.1.1.Final/org/togglz/core/proxy/package-frame.html | HTML | apache-2.0 | 898 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.platform;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteAtomicSequence;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.IgniteDataStreamer;
import org.apache.ignite.IgniteException;
import org.apache.ignite.IgniteLogger;
import org.apache.ignite.cluster.BaselineNode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.NearCacheConfiguration;
import org.apache.ignite.configuration.PlatformConfiguration;
import org.apache.ignite.internal.GridKernalContext;
import org.apache.ignite.internal.IgniteInternalFuture;
import org.apache.ignite.internal.binary.BinaryRawReaderEx;
import org.apache.ignite.internal.binary.BinaryRawWriterEx;
import org.apache.ignite.internal.cluster.DetachedClusterNode;
import org.apache.ignite.internal.logger.platform.PlatformLogger;
import org.apache.ignite.internal.processors.GridProcessorAdapter;
import org.apache.ignite.internal.processors.cache.IgniteCacheProxy;
import org.apache.ignite.internal.processors.datastreamer.DataStreamerImpl;
import org.apache.ignite.internal.processors.datastructures.GridCacheAtomicLongImpl;
import org.apache.ignite.internal.processors.platform.binary.PlatformBinaryProcessor;
import org.apache.ignite.internal.processors.platform.cache.PlatformCache;
import org.apache.ignite.internal.processors.platform.cache.PlatformCacheExtension;
import org.apache.ignite.internal.processors.platform.cache.affinity.PlatformAffinity;
import org.apache.ignite.internal.processors.platform.cache.store.PlatformCacheStore;
import org.apache.ignite.internal.processors.platform.cluster.PlatformClusterGroup;
import org.apache.ignite.internal.processors.platform.datastreamer.PlatformDataStreamer;
import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicLong;
import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicReference;
import org.apache.ignite.internal.processors.platform.datastructures.PlatformAtomicSequence;
import org.apache.ignite.internal.processors.platform.dotnet.PlatformDotNetCacheStore;
import org.apache.ignite.internal.processors.platform.memory.PlatformMemory;
import org.apache.ignite.internal.processors.platform.memory.PlatformOutputStream;
import org.apache.ignite.internal.processors.platform.transactions.PlatformTransactions;
import org.apache.ignite.internal.processors.platform.utils.PlatformConfigurationUtils;
import org.apache.ignite.internal.processors.platform.utils.PlatformUtils;
import org.apache.ignite.internal.util.typedef.CI1;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.lang.IgniteFuture;
import org.jetbrains.annotations.Nullable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import static org.apache.ignite.internal.processors.platform.PlatformAbstractTarget.FALSE;
import static org.apache.ignite.internal.processors.platform.PlatformAbstractTarget.TRUE;
import static org.apache.ignite.internal.processors.platform.client.ClientConnectionContext.CURRENT_VER;
/**
* GridGain platform processor.
*/
@SuppressWarnings({"unchecked"})
public class PlatformProcessorImpl extends GridProcessorAdapter implements PlatformProcessor, PlatformTarget {
/** */
private static final int OP_GET_CACHE = 1;
/** */
private static final int OP_CREATE_CACHE = 2;
/** */
private static final int OP_GET_OR_CREATE_CACHE = 3;
/** */
private static final int OP_CREATE_CACHE_FROM_CONFIG = 4;
/** */
private static final int OP_GET_OR_CREATE_CACHE_FROM_CONFIG = 5;
/** */
private static final int OP_DESTROY_CACHE = 6;
/** */
private static final int OP_GET_AFFINITY = 7;
/** */
private static final int OP_GET_DATA_STREAMER = 8;
/** */
private static final int OP_GET_TRANSACTIONS = 9;
/** */
private static final int OP_GET_CLUSTER_GROUP = 10;
/** */
private static final int OP_GET_EXTENSION = 11;
/** */
private static final int OP_GET_ATOMIC_LONG = 12;
/** */
private static final int OP_GET_ATOMIC_REFERENCE = 13;
/** */
private static final int OP_GET_ATOMIC_SEQUENCE = 14;
/** */
private static final int OP_GET_IGNITE_CONFIGURATION = 15;
/** */
private static final int OP_GET_CACHE_NAMES = 16;
/** */
private static final int OP_CREATE_NEAR_CACHE = 17;
/** */
private static final int OP_GET_OR_CREATE_NEAR_CACHE = 18;
/** */
private static final int OP_LOGGER_IS_LEVEL_ENABLED = 19;
/** */
private static final int OP_LOGGER_LOG = 20;
/** */
private static final int OP_GET_BINARY_PROCESSOR = 21;
/** */
private static final int OP_RELEASE_START = 22;
/** */
private static final int OP_ADD_CACHE_CONFIGURATION = 23;
/** */
private static final int OP_SET_BASELINE_TOPOLOGY_VER = 24;
/** */
private static final int OP_SET_BASELINE_TOPOLOGY_NODES = 25;
/** */
private static final int OP_GET_BASELINE_TOPOLOGY = 26;
/** */
private static final int OP_DISABLE_WAL = 27;
/** */
private static final int OP_ENABLE_WAL = 28;
/** */
private static final int OP_IS_WAL_ENABLED = 29;
/** */
private static final int OP_SET_TX_TIMEOUT_ON_PME = 30;
/** Start latch. */
private final CountDownLatch startLatch = new CountDownLatch(1);
/** Stores pending initialization. */
private final Collection<StoreInfo> pendingStores =
Collections.newSetFromMap(new ConcurrentHashMap<StoreInfo, Boolean>());
/** Lock for store lifecycle operations. */
private final ReadWriteLock storeLock = new ReentrantReadWriteLock();
/** Logger. */
@SuppressWarnings("FieldCanBeLocal")
private final IgniteLogger log;
/** Context. */
private final PlatformContext platformCtx;
/** Interop configuration. */
private final PlatformConfigurationEx interopCfg;
/** Extensions. */
private final PlatformPluginExtension[] extensions;
/** Whether processor is started. */
private boolean started;
/** Whether processor if stopped (or stopping). */
private volatile boolean stopped;
/** Cache extensions. */
private final PlatformCacheExtension[] cacheExts;
/** Cluster restart flag for the reconnect callback. */
private volatile boolean clusterRestarted;
/**
* Constructor.
*
* @param ctx Kernal context.
*/
public PlatformProcessorImpl(GridKernalContext ctx) {
super(ctx);
log = ctx.log(PlatformProcessorImpl.class);
PlatformConfiguration interopCfg0 = ctx.config().getPlatformConfiguration();
assert interopCfg0 != null : "Must be checked earlier during component creation.";
if (!(interopCfg0 instanceof PlatformConfigurationEx))
throw new IgniteException("Unsupported platform configuration: " + interopCfg0.getClass().getName());
interopCfg = (PlatformConfigurationEx)interopCfg0;
if (!F.isEmpty(interopCfg.warnings())) {
for (String w : interopCfg.warnings())
U.warn(log, w);
}
platformCtx = new PlatformContextImpl(ctx, interopCfg.gate(), interopCfg.memory(), interopCfg.platform());
// Initialize cache extensions (if any).
cacheExts = prepareCacheExtensions(interopCfg.cacheExtensions());
if (interopCfg.logger() != null)
interopCfg.logger().setContext(platformCtx);
// Initialize extensions (if any).
extensions = prepareExtensions(ctx.plugins().extensions(PlatformPluginExtension.class));
}
/** {@inheritDoc} */
@Override public void start() throws IgniteCheckedException {
try (PlatformMemory mem = platformCtx.memory().allocate()) {
PlatformOutputStream out = mem.output();
BinaryRawWriterEx writer = platformCtx.writer(out);
writer.writeString(ctx.igniteInstanceName());
out.synchronize();
platformCtx.gateway().onStart(new PlatformTargetProxyImpl(this, platformCtx), mem.pointer());
}
// At this moment all necessary native libraries must be loaded, so we can process with store creation.
storeLock.writeLock().lock();
try {
for (StoreInfo store : pendingStores)
registerStore0(store.store, store.convertBinary);
pendingStores.clear();
started = true;
}
finally {
storeLock.writeLock().unlock();
}
// Add Interop node attributes.
ctx.addNodeAttribute(PlatformUtils.ATTR_PLATFORM, interopCfg.platform());
}
/** {@inheritDoc} */
@Override public void onKernalStop(boolean cancel) {
startLatch.countDown();
}
/** {@inheritDoc} */
@Override public void stop(boolean cancel) throws IgniteCheckedException {
if (platformCtx != null) {
stopped = true;
platformCtx.gateway().onStop();
}
}
/** {@inheritDoc} */
@Override public Ignite ignite() {
return ctx.grid();
}
/** {@inheritDoc} */
@Override public long environmentPointer() {
return platformCtx.gateway().environmentPointer();
}
/** {@inheritDoc} */
@Override public void releaseStart() {
startLatch.countDown();
}
/** {@inheritDoc} */
@Override public void awaitStart() throws IgniteCheckedException {
U.await(startLatch);
}
/** {@inheritDoc} */
@Override public PlatformContext context() {
return platformCtx;
}
/** {@inheritDoc} */
@Override public void registerStore(PlatformCacheStore store, boolean convertBinary)
throws IgniteCheckedException {
storeLock.readLock().lock();
try {
if (stopped)
throw new IgniteCheckedException("Failed to initialize interop store because node is stopping: " +
store);
if (started)
registerStore0(store, convertBinary);
else
pendingStores.add(new StoreInfo(store, convertBinary));
}
finally {
storeLock.readLock().unlock();
}
}
/** {@inheritDoc} */
@Override public void onDisconnected(IgniteFuture<?> reconnectFut) throws IgniteCheckedException {
platformCtx.gateway().onClientDisconnected();
// 1) onReconnected is called on all grid components.
// 2) After all of grid components have completed their reconnection, reconnectFut is completed.
reconnectFut.listen(new CI1<IgniteFuture<?>>() {
@Override public void apply(IgniteFuture<?> future) {
platformCtx.gateway().onClientReconnected(clusterRestarted);
}
});
}
/** {@inheritDoc} */
@Override public IgniteInternalFuture<?> onReconnected(boolean clusterRestarted) throws IgniteCheckedException {
// Save the flag value for callback of reconnectFut.
this.clusterRestarted = clusterRestarted;
return null;
}
/**
* Creates new platform cache.
*/
private PlatformTarget createPlatformCache(IgniteCacheProxy cache) {
assert cache != null;
return new PlatformCache(platformCtx, cache, false, cacheExts);
}
/**
* Checks whether logger level is enabled.
*
* @param level Level.
* @return Result.
*/
private boolean loggerIsLevelEnabled(int level) {
IgniteLogger log = ctx.grid().log();
switch (level) {
case PlatformLogger.LVL_TRACE:
return log.isTraceEnabled();
case PlatformLogger.LVL_DEBUG:
return log.isDebugEnabled();
case PlatformLogger.LVL_INFO:
return log.isInfoEnabled();
case PlatformLogger.LVL_WARN:
return true;
case PlatformLogger.LVL_ERROR:
return true;
default:
assert false;
}
return false;
}
/**
* Logs to the Ignite logger.
*
* @param level Level.
* @param message Message.
* @param category Category.
* @param errorInfo Exception.
*/
private void loggerLog(int level, String message, String category, String errorInfo) {
IgniteLogger log = ctx.grid().log();
if (category != null)
log = log.getLogger(category);
Throwable err = errorInfo == null ? null : new IgniteException("Platform error:" + errorInfo);
switch (level) {
case PlatformLogger.LVL_TRACE:
log.trace(message);
break;
case PlatformLogger.LVL_DEBUG:
log.debug(message);
break;
case PlatformLogger.LVL_INFO:
log.info(message);
break;
case PlatformLogger.LVL_WARN:
log.warning(message, err);
break;
case PlatformLogger.LVL_ERROR:
log.error(message, err);
break;
default:
assert false;
}
}
/** {@inheritDoc} */
@Override public long processInLongOutLong(int type, long val) throws IgniteCheckedException {
switch (type) {
case OP_LOGGER_IS_LEVEL_ENABLED: {
return loggerIsLevelEnabled((int) val) ? TRUE : FALSE;
}
case OP_RELEASE_START: {
releaseStart();
return 0;
}
case OP_SET_BASELINE_TOPOLOGY_VER: {
ctx.grid().cluster().setBaselineTopology(val);
return 0;
}
}
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public long processInStreamOutLong(int type, BinaryRawReaderEx reader) throws IgniteCheckedException {
switch (type) {
case OP_DESTROY_CACHE: {
ctx.grid().destroyCache(reader.readString());
return 0;
}
case OP_LOGGER_LOG: {
loggerLog(reader.readInt(), reader.readString(), reader.readString(), reader.readString());
return 0;
}
case OP_SET_BASELINE_TOPOLOGY_NODES: {
int cnt = reader.readInt();
Collection<BaselineNode> nodes = new ArrayList<>(cnt);
for (int i = 0; i < cnt; i++) {
Object consId = reader.readObjectDetached();
Map<String, Object> attrs = PlatformUtils.readNodeAttributes(reader);
nodes.add(new DetachedClusterNode(consId, attrs));
}
ctx.grid().cluster().setBaselineTopology(nodes);
return 0;
}
case OP_ADD_CACHE_CONFIGURATION:
CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER);
ctx.grid().addCacheConfiguration(cfg);
return 0;
case OP_DISABLE_WAL:
ctx.grid().cluster().disableWal(reader.readString());
return 0;
case OP_ENABLE_WAL:
ctx.grid().cluster().enableWal(reader.readString());
return 0;
case OP_SET_TX_TIMEOUT_ON_PME:
ctx.grid().cluster().setTxTimeoutOnPartitionMapExchange(reader.readLong());
return 0;
case OP_IS_WAL_ENABLED:
return ctx.grid().cluster().isWalEnabled(reader.readString()) ? TRUE : FALSE;
}
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public long processInStreamOutLong(int type, BinaryRawReaderEx reader, PlatformMemory mem) throws IgniteCheckedException {
return processInStreamOutLong(type, reader);
}
/** {@inheritDoc} */
@Override public void processInStreamOutStream(int type, BinaryRawReaderEx reader, BinaryRawWriterEx writer) throws IgniteCheckedException {
PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public PlatformTarget processInStreamOutObject(int type, BinaryRawReaderEx reader) throws IgniteCheckedException {
switch (type) {
case OP_GET_CACHE: {
String name = reader.readString();
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().cache(name);
if (cache == null)
throw new IllegalArgumentException("Cache doesn't exist: " + name);
return createPlatformCache(cache);
}
case OP_CREATE_CACHE: {
String name = reader.readString();
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createCache(name);
return createPlatformCache(cache);
}
case OP_GET_OR_CREATE_CACHE: {
String name = reader.readString();
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateCache(name);
return createPlatformCache(cache);
}
case OP_CREATE_CACHE_FROM_CONFIG: {
CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER);
IgniteCacheProxy cache = reader.readBoolean()
? (IgniteCacheProxy)ctx.grid().createCache(cfg, PlatformConfigurationUtils.readNearConfiguration(reader))
: (IgniteCacheProxy)ctx.grid().createCache(cfg);
return createPlatformCache(cache);
}
case OP_GET_OR_CREATE_CACHE_FROM_CONFIG: {
CacheConfiguration cfg = PlatformConfigurationUtils.readCacheConfiguration(reader, CURRENT_VER);
IgniteCacheProxy cache = reader.readBoolean()
? (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg,
PlatformConfigurationUtils.readNearConfiguration(reader))
: (IgniteCacheProxy)ctx.grid().getOrCreateCache(cfg);
return createPlatformCache(cache);
}
case OP_GET_AFFINITY: {
return new PlatformAffinity(platformCtx, ctx, reader.readString());
}
case OP_GET_DATA_STREAMER: {
String cacheName = reader.readString();
boolean keepBinary = reader.readBoolean();
IgniteDataStreamer ldr = ctx.dataStream().dataStreamer(cacheName);
ldr.keepBinary(true);
return new PlatformDataStreamer(platformCtx, cacheName, (DataStreamerImpl)ldr, keepBinary);
}
case OP_GET_EXTENSION: {
int id = reader.readInt();
if (extensions != null && id < extensions.length) {
PlatformPluginExtension ext = extensions[id];
if (ext != null) {
return ext.createTarget();
}
}
throw new IgniteException("Platform extension is not registered [id=" + id + ']');
}
case OP_GET_ATOMIC_LONG: {
String name = reader.readString();
long initVal = reader.readLong();
boolean create = reader.readBoolean();
GridCacheAtomicLongImpl atomicLong = (GridCacheAtomicLongImpl)ignite().atomicLong(name, initVal, create);
if (atomicLong == null)
return null;
return new PlatformAtomicLong(platformCtx, atomicLong);
}
case OP_GET_ATOMIC_REFERENCE: {
String name = reader.readString();
Object initVal = reader.readObjectDetached();
boolean create = reader.readBoolean();
return PlatformAtomicReference.createInstance(platformCtx, name, initVal, create);
}
case OP_GET_ATOMIC_SEQUENCE: {
String name = reader.readString();
long initVal = reader.readLong();
boolean create = reader.readBoolean();
IgniteAtomicSequence atomicSeq = ignite().atomicSequence(name, initVal, create);
if (atomicSeq == null)
return null;
return new PlatformAtomicSequence(platformCtx, atomicSeq);
}
case OP_CREATE_NEAR_CACHE: {
String cacheName = reader.readString();
NearCacheConfiguration cfg = PlatformConfigurationUtils.readNearConfiguration(reader);
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().createNearCache(cacheName, cfg);
return createPlatformCache(cache);
}
case OP_GET_OR_CREATE_NEAR_CACHE: {
String cacheName = reader.readString();
NearCacheConfiguration cfg = PlatformConfigurationUtils.readNearConfiguration(reader);
IgniteCacheProxy cache = (IgniteCacheProxy)ctx.grid().getOrCreateNearCache(cacheName, cfg);
return createPlatformCache(cache);
}
case OP_GET_TRANSACTIONS: {
String lbl = reader.readString();
return new PlatformTransactions(platformCtx, lbl);
}
}
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public PlatformTarget processInObjectStreamOutObjectStream(int type, @Nullable PlatformTarget arg,
BinaryRawReaderEx reader,
BinaryRawWriterEx writer)
throws IgniteCheckedException {
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public void processOutStream(int type, BinaryRawWriterEx writer) throws IgniteCheckedException {
switch (type) {
case OP_GET_IGNITE_CONFIGURATION: {
PlatformConfigurationUtils.writeIgniteConfiguration(writer, ignite().configuration(), CURRENT_VER);
return;
}
case OP_GET_CACHE_NAMES: {
Collection<String> names = ignite().cacheNames();
writer.writeInt(names.size());
for (String name : names)
writer.writeString(name);
return;
}
case OP_GET_BASELINE_TOPOLOGY: {
Collection<BaselineNode> blt = ignite().cluster().currentBaselineTopology();
writer.writeInt(blt.size());
for (BaselineNode n : blt) {
writer.writeObjectDetached(n.consistentId());
PlatformUtils.writeNodeAttributes(writer, n.attributes());
}
return;
}
}
PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public PlatformTarget processOutObject(int type) throws IgniteCheckedException {
switch (type) {
case OP_GET_TRANSACTIONS:
return new PlatformTransactions(platformCtx);
case OP_GET_CLUSTER_GROUP:
return new PlatformClusterGroup(platformCtx, ctx.grid().cluster());
case OP_GET_BINARY_PROCESSOR: {
return new PlatformBinaryProcessor(platformCtx);
}
}
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public PlatformAsyncResult processInStreamAsync(int type, BinaryRawReaderEx reader) throws IgniteCheckedException {
return PlatformAbstractTarget.throwUnsupported(type);
}
/** {@inheritDoc} */
@Override public Exception convertException(Exception e) {
return e;
}
/**
* Internal store initialization routine.
*
* @param store Store.
* @param convertBinary Convert binary flag.
* @throws IgniteCheckedException If failed.
*/
private void registerStore0(PlatformCacheStore store, boolean convertBinary) throws IgniteCheckedException {
if (store instanceof PlatformDotNetCacheStore) {
PlatformDotNetCacheStore store0 = (PlatformDotNetCacheStore)store;
store0.initialize(ctx, convertBinary);
}
else
throw new IgniteCheckedException("Unsupported interop store: " + store);
}
/**
* Prepare cache extensions.
*
* @param cacheExts Original extensions.
* @return Prepared extensions.
*/
private static PlatformCacheExtension[] prepareCacheExtensions(Collection<PlatformCacheExtension> cacheExts) {
if (!F.isEmpty(cacheExts)) {
int maxExtId = 0;
Map<Integer, PlatformCacheExtension> idToExt = new HashMap<>();
for (PlatformCacheExtension cacheExt : cacheExts) {
if (cacheExt == null)
throw new IgniteException("Platform cache extension cannot be null.");
if (cacheExt.id() < 0)
throw new IgniteException("Platform cache extension ID cannot be negative: " + cacheExt);
PlatformCacheExtension oldCacheExt = idToExt.put(cacheExt.id(), cacheExt);
if (oldCacheExt != null)
throw new IgniteException("Platform cache extensions cannot have the same ID [" +
"id=" + cacheExt.id() + ", first=" + oldCacheExt + ", second=" + cacheExt + ']');
if (cacheExt.id() > maxExtId)
maxExtId = cacheExt.id();
}
PlatformCacheExtension[] res = new PlatformCacheExtension[maxExtId + 1];
for (PlatformCacheExtension cacheExt : cacheExts)
res[cacheExt.id()]= cacheExt;
return res;
}
else
//noinspection ZeroLengthArrayAllocation
return new PlatformCacheExtension[0];
}
/**
* Prepare extensions.
*
* @param exts Original extensions.
* @return Prepared extensions.
*/
private static PlatformPluginExtension[] prepareExtensions(PlatformPluginExtension[] exts) {
if (!F.isEmpty(exts)) {
int maxExtId = 0;
Map<Integer, PlatformPluginExtension> idToExt = new HashMap<>();
for (PlatformPluginExtension ext : exts) {
if (ext == null)
throw new IgniteException("Platform extension cannot be null.");
if (ext.id() < 0)
throw new IgniteException("Platform extension ID cannot be negative: " + ext);
PlatformPluginExtension oldCacheExt = idToExt.put(ext.id(), ext);
if (oldCacheExt != null)
throw new IgniteException("Platform extensions cannot have the same ID [" +
"id=" + ext.id() + ", first=" + oldCacheExt + ", second=" + ext + ']');
if (ext.id() > maxExtId)
maxExtId = ext.id();
}
PlatformPluginExtension[] res = new PlatformPluginExtension[maxExtId + 1];
for (PlatformPluginExtension ext : exts)
res[ext.id()]= ext;
return res;
}
else
//noinspection ZeroLengthArrayAllocation
return new PlatformPluginExtension[0];
}
/**
* Store and manager pair.
*/
private static class StoreInfo {
/** Store. */
private final PlatformCacheStore store;
/** Convert binary flag. */
private final boolean convertBinary;
/**
* Constructor.
*
* @param store Store.
* @param convertBinary Convert binary flag.
*/
private StoreInfo(PlatformCacheStore store, boolean convertBinary) {
this.store = store;
this.convertBinary = convertBinary;
}
}
}
| ptupitsyn/ignite | modules/core/src/main/java/org/apache/ignite/internal/processors/platform/PlatformProcessorImpl.java | Java | apache-2.0 | 29,379 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy
import java.io._
import java.util.jar.JarFile
import java.util.logging.Level
import java.util.zip.{ZipEntry, ZipOutputStream}
import scala.collection.JavaConversions._
import com.google.common.io.{ByteStreams, Files}
import org.apache.spark.{SparkException, Logging}
import org.apache.spark.api.r.RUtils
import org.apache.spark.util.{RedirectThread, Utils}
private[deploy] object RPackageUtils extends Logging {
/** The key in the MANIFEST.mf that we look for, in case a jar contains R code. */
private final val hasRPackage = "Spark-HasRPackage"
/** Base of the shell command used in order to install R packages. */
private final val baseInstallCmd = Seq("R", "CMD", "INSTALL", "-l")
/** R source code should exist under R/pkg in a jar. */
private final val RJarEntries = "R/pkg"
/** Documentation on how the R source file layout should be in the jar. */
private[deploy] final val RJarDoc =
s"""In order for Spark to build R packages that are parts of Spark Packages, there are a few
|requirements. The R source code must be shipped in a jar, with additional Java/Scala
|classes. The jar must be in the following format:
| 1- The Manifest (META-INF/MANIFEST.mf) must contain the key-value: $hasRPackage: true
| 2- The standard R package layout must be preserved under R/pkg/ inside the jar. More
| information on the standard R package layout can be found in:
| http://cran.r-project.org/doc/contrib/Leisch-CreatingPackages.pdf
| An example layout is given below. After running `jar tf $$JAR_FILE | sort`:
|
|META-INF/MANIFEST.MF
|R/
|R/pkg/
|R/pkg/DESCRIPTION
|R/pkg/NAMESPACE
|R/pkg/R/
|R/pkg/R/myRcode.R
|org/
|org/apache/
|...
""".stripMargin.trim
/** Internal method for logging. We log to a printStream in tests, for debugging purposes. */
private def print(
msg: String,
printStream: PrintStream,
level: Level = Level.FINE,
e: Throwable = null): Unit = {
if (printStream != null) {
// scalastyle:off println
printStream.println(msg)
// scalastyle:on println
if (e != null) {
e.printStackTrace(printStream)
}
} else {
level match {
case Level.INFO => logInfo(msg)
case Level.WARNING => logWarning(msg)
case Level.SEVERE => logError(msg, e)
case _ => logDebug(msg)
}
}
}
/**
* Checks the manifest of the Jar whether there is any R source code bundled with it.
* Exposed for testing.
*/
private[deploy] def checkManifestForR(jar: JarFile): Boolean = {
val manifest = jar.getManifest.getMainAttributes
manifest.getValue(hasRPackage) != null && manifest.getValue(hasRPackage).trim == "true"
}
/**
* Runs the standard R package installation code to build the R package from source.
* Multiple runs don't cause problems.
*/
private def rPackageBuilder(
dir: File,
printStream: PrintStream,
verbose: Boolean,
libDir: String): Boolean = {
// this code should be always running on the driver.
val pathToPkg = Seq(dir, "R", "pkg").mkString(File.separator)
val installCmd = baseInstallCmd ++ Seq(libDir, pathToPkg)
if (verbose) {
print(s"Building R package with the command: $installCmd", printStream)
}
try {
val builder = new ProcessBuilder(installCmd)
builder.redirectErrorStream(true)
// Put the SparkR package directory into R library search paths in case this R package
// may depend on SparkR.
val env = builder.environment()
val rPackageDir = RUtils.sparkRPackagePath(isDriver = true)
env.put("SPARKR_PACKAGE_DIR", rPackageDir.mkString(","))
env.put("R_PROFILE_USER",
Seq(rPackageDir(0), "SparkR", "profile", "general.R").mkString(File.separator))
val process = builder.start()
new RedirectThread(process.getInputStream, printStream, "redirect R packaging").start()
process.waitFor() == 0
} catch {
case e: Throwable =>
print("Failed to build R package.", printStream, Level.SEVERE, e)
false
}
}
/**
* Extracts the files under /R in the jar to a temporary directory for building.
*/
private def extractRFolder(jar: JarFile, printStream: PrintStream, verbose: Boolean): File = {
val tempDir = Utils.createTempDir(null)
val jarEntries = jar.entries()
while (jarEntries.hasMoreElements) {
val entry = jarEntries.nextElement()
val entryRIndex = entry.getName.indexOf(RJarEntries)
if (entryRIndex > -1) {
val entryPath = entry.getName.substring(entryRIndex)
if (entry.isDirectory) {
val dir = new File(tempDir, entryPath)
if (verbose) {
print(s"Creating directory: $dir", printStream)
}
dir.mkdirs
} else {
val inStream = jar.getInputStream(entry)
val outPath = new File(tempDir, entryPath)
Files.createParentDirs(outPath)
val outStream = new FileOutputStream(outPath)
if (verbose) {
print(s"Extracting $entry to $outPath", printStream)
}
Utils.copyStream(inStream, outStream, closeStreams = true)
}
}
}
tempDir
}
/**
* Extracts the files under /R in the jar to a temporary directory for building.
*/
private[deploy] def checkAndBuildRPackage(
jars: String,
printStream: PrintStream = null,
verbose: Boolean = false): Unit = {
jars.split(",").foreach { jarPath =>
val file = new File(Utils.resolveURI(jarPath))
if (file.exists()) {
val jar = new JarFile(file)
if (checkManifestForR(jar)) {
print(s"$file contains R source code. Now installing package.", printStream, Level.INFO)
val rSource = extractRFolder(jar, printStream, verbose)
if (RUtils.rPackages.isEmpty) {
RUtils.rPackages = Some(Utils.createTempDir().getAbsolutePath)
}
try {
if (!rPackageBuilder(rSource, printStream, verbose, RUtils.rPackages.get)) {
print(s"ERROR: Failed to build R package in $file.", printStream)
print(RJarDoc, printStream)
}
} finally {
rSource.delete() // clean up
}
} else {
if (verbose) {
print(s"$file doesn't contain R source code, skipping...", printStream)
}
}
} else {
print(s"WARN: $file resolved as dependency, but not found.", printStream, Level.WARNING)
}
}
}
private def listFilesRecursively(dir: File, excludePatterns: Seq[String]): Set[File] = {
if (!dir.exists()) {
Set.empty[File]
} else {
if (dir.isDirectory) {
val subDir = dir.listFiles(new FilenameFilter {
override def accept(dir: File, name: String): Boolean = {
!excludePatterns.map(name.contains).reduce(_ || _) // exclude files with given pattern
}
})
subDir.flatMap(listFilesRecursively(_, excludePatterns)).toSet
} else {
Set(dir)
}
}
}
/** Zips all the R libraries built for distribution to the cluster. */
private[deploy] def zipRLibraries(dir: File, name: String): File = {
val filesToBundle = listFilesRecursively(dir, Seq(".zip"))
// create a zip file from scratch, do not append to existing file.
val zipFile = new File(dir, name)
zipFile.delete()
val zipOutputStream = new ZipOutputStream(new FileOutputStream(zipFile, false))
try {
filesToBundle.foreach { file =>
// get the relative paths for proper naming in the zip file
val relPath = file.getAbsolutePath.replaceFirst(dir.getAbsolutePath, "")
val fis = new FileInputStream(file)
val zipEntry = new ZipEntry(relPath)
zipOutputStream.putNextEntry(zipEntry)
ByteStreams.copy(fis, zipOutputStream)
zipOutputStream.closeEntry()
fis.close()
}
} finally {
zipOutputStream.close()
}
zipFile
}
}
| practice-vishnoi/dev-spark-1 | core/src/main/scala/org/apache/spark/deploy/RPackageUtils.scala | Scala | apache-2.0 | 8,951 |
//-----------------------------------------------------------------------
// <copyright file="NUnitAssertions.cs" company="Akka.NET Project">
// Copyright (C) 2009-2016 Typesafe Inc. <http://www.typesafe.com>
// Copyright (C) 2013-2016 Akka.NET project <https://github.com/akkadotnet/akka.net>
// </copyright>
//-----------------------------------------------------------------------
using System;
using NUnit.Framework;
namespace Akka.TestKit.NUnit
{
/// <summary>
/// Assertions for NUnit
/// </summary>
public class NUnitAssertions : ITestKitAssertions
{
public void Fail(string format = "", params object[] args)
{
Assert.Fail(format, args);
}
public void AssertTrue(bool condition, string format = "", params object[] args)
{
Assert.IsTrue(condition, format, args);
}
public void AssertFalse(bool condition, string format = "", params object[] args)
{
Assert.IsFalse(condition, format, args);
}
public void AssertEqual<T>(T expected, T actual, string format = "", params object[] args)
{
Assert.AreEqual(expected, actual, format, args);
}
public void AssertEqual<T>(T expected, T actual, Func<T, T, bool> comparer, string format = "", params object[] args)
{
if (!comparer(expected, actual))
throw new AssertionException(string.Format("Assert.AreEqual failed. Expected [{0}]. Actual [{1}]. {2}", FormatValue(expected), FormatValue(actual), string.Format(format, args)));
}
private static string FormatValue<T>(T expected)
{
return ReferenceEquals(expected, null) ? "null" : expected.ToString();
}
}
} | skotzko/akka.net | src/contrib/testkits/Akka.TestKit.NUnit/NUnitAssertions.cs | C# | apache-2.0 | 1,786 |
#!/bin/bash
set -e
readonly url=http://localhost:8080
readonly tmp_file=gerrit
wget --retry-connrefused --waitretry=5 --timeout=10 --tries=20 -O "/tmp/$tmp_file" "$url"
head -n 4 "/tmp/$tmp_file"
| guymers/docker-by-bazel | test/gerrit/test_web.sh | Shell | apache-2.0 | 197 |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/eks/EKS_EXPORTS.h>
#include <aws/eks/model/Cluster.h>
#include <utility>
namespace Aws
{
template<typename RESULT_TYPE>
class AmazonWebServiceResult;
namespace Utils
{
namespace Json
{
class JsonValue;
} // namespace Json
} // namespace Utils
namespace EKS
{
namespace Model
{
class AWS_EKS_API RegisterClusterResult
{
public:
RegisterClusterResult();
RegisterClusterResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
RegisterClusterResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
inline const Cluster& GetCluster() const{ return m_cluster; }
inline void SetCluster(const Cluster& value) { m_cluster = value; }
inline void SetCluster(Cluster&& value) { m_cluster = std::move(value); }
inline RegisterClusterResult& WithCluster(const Cluster& value) { SetCluster(value); return *this;}
inline RegisterClusterResult& WithCluster(Cluster&& value) { SetCluster(std::move(value)); return *this;}
private:
Cluster m_cluster;
};
} // namespace Model
} // namespace EKS
} // namespace Aws
| awslabs/aws-sdk-cpp | aws-cpp-sdk-eks/include/aws/eks/model/RegisterClusterResult.h | C | apache-2.0 | 1,286 |
/**
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2017 the original author or authors.
*/
package org.assertj.core.util.diff;
import java.util.List;
/**
* Initially copied from https://code.google.com/p/java-diff-utils/.
* <p>
* Describes the delete-delta between original and revised texts.
*
* @author <a href="dm.naumenko@gmail.com">Dmitry Naumenko</a>
* @param <T> The type of the compared elements in the 'lines'.
*/
public class DeleteDelta<T> extends Delta<T> {
/**
* Creates a change delta with the two given chunks.
*
* @param original
* The original chunk. Must not be {@code null}.
* @param revised
* The original chunk. Must not be {@code null}.
*/
public DeleteDelta(Chunk<T> original, Chunk<T> revised) {
super(original, revised);
}
/**
* {@inheritDoc}
*/
@Override
public void applyTo(List<T> target) throws IllegalStateException {
verify(target);
int position = getOriginal().getPosition();
int size = getOriginal().size();
for (int i = 0; i < size; i++) {
target.remove(position);
}
}
@Override
public TYPE getType() {
return Delta.TYPE.DELETE;
}
@Override
public void verify(List<T> target) throws IllegalStateException {
getOriginal().verify(target);
}
}
| ChrisCanCompute/assertj-core | src/main/java/org/assertj/core/util/diff/DeleteDelta.java | Java | apache-2.0 | 1,820 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms;
import java.util.HashMap;
import java.util.Map;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.Session;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.support.ExchangeHelper;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Before;
import org.junit.Test;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.core.MessageCreator;
import static org.apache.camel.component.jms.JmsComponent.jmsComponentAutoAcknowledge;
public class ConsumeJmsMapMessageTest extends CamelTestSupport {
protected JmsTemplate jmsTemplate;
private MockEndpoint endpoint;
@Test
public void testConsumeMapMessage() throws Exception {
endpoint.expectedMessageCount(1);
jmsTemplate.setPubSubDomain(false);
jmsTemplate.send("test.map", new MessageCreator() {
public Message createMessage(Session session) throws JMSException {
MapMessage mapMessage = session.createMapMessage();
mapMessage.setString("foo", "abc");
mapMessage.setString("bar", "xyz");
return mapMessage;
}
});
endpoint.assertIsSatisfied();
assertCorrectMapReceived();
}
protected void assertCorrectMapReceived() {
Exchange exchange = endpoint.getReceivedExchanges().get(0);
// This should be a JMS Exchange
assertNotNull(ExchangeHelper.getBinding(exchange, JmsBinding.class));
JmsMessage in = (JmsMessage) exchange.getIn();
assertNotNull(in);
Map<?, ?> map = exchange.getIn().getBody(Map.class);
log.info("Received map: " + map);
assertNotNull("Should have received a map message!", map);
assertIsInstanceOf(MapMessage.class, in.getJmsMessage());
assertEquals("map.foo", "abc", map.get("foo"));
assertEquals("map.bar", "xyz", map.get("bar"));
assertEquals("map.size", 2, map.size());
}
@Test
public void testSendMapMessage() throws Exception {
endpoint.expectedMessageCount(1);
Map<String, String> map = new HashMap<>();
map.put("foo", "abc");
map.put("bar", "xyz");
template.sendBody("direct:test", map);
endpoint.assertIsSatisfied();
assertCorrectMapReceived();
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
endpoint = getMockEndpoint("mock:result");
}
protected CamelContext createCamelContext() throws Exception {
CamelContext camelContext = super.createCamelContext();
ConnectionFactory connectionFactory = CamelJmsTestHelper.createConnectionFactory();
jmsTemplate = new JmsTemplate(connectionFactory);
camelContext.addComponent("activemq", jmsComponentAutoAcknowledge(connectionFactory));
return camelContext;
}
protected RouteBuilder createRouteBuilder() throws Exception {
return new RouteBuilder() {
public void configure() throws Exception {
from("activemq:test.map").to("mock:result");
from("direct:test").to("activemq:test.map");
}
};
}
}
| Fabryprog/camel | components/camel-jms/src/test/java/org/apache/camel/component/jms/ConsumeJmsMapMessageTest.java | Java | apache-2.0 | 4,265 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.constructionheuristic.greedyFit.decider;
public enum ConstructionHeuristicPickEarlyType {
NEVER,
FIRST_LAST_STEP_SCORE_EQUAL_OR_IMPROVING;
}
| psiroky/optaplanner | optaplanner-core/src/main/java/org/optaplanner/core/impl/constructionheuristic/greedyFit/decider/ConstructionHeuristicPickEarlyType.java | Java | apache-2.0 | 779 |
#
# Author:: Adam Jacob (<adam@chef.io>)
# Author:: Tyler Cloke (<tyler@chef.io>)
# Copyright:: Copyright 2008-2017, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
describe Chef::Resource::RemoteFile do
let(:resource) { Chef::Resource::RemoteFile.new("fakey_fakerton") }
describe "name_property" do
it "the path property is the name_property" do
expect(resource.path).to eql("fakey_fakerton")
end
end
describe "Actions" do
it "sets the default action as :create" do
expect(resource.action).to eql([:create])
end
it "supports :create, :create_if_missing, :delete, :touch actions" do
expect { resource.action :create }.not_to raise_error
expect { resource.action :create_if_missing }.not_to raise_error
expect { resource.action :delete }.not_to raise_error
expect { resource.action :touch }.not_to raise_error
end
end
describe "initialize" do
it "is a subclass of Chef::Resource::File" do
expect(resource).to be_a_kind_of(Chef::Resource::File)
end
end
it "says its provider is RemoteFile when the source is an absolute URI" do
resource.source("http://www.google.com/robots.txt")
expect(resource.provider_for_action(:create)).to be_kind_of(Chef::Provider::RemoteFile)
end
it "says its provider is RemoteFile when the source is a network share" do
resource.source("\\\\fakey\\fakerton\\fake.txt")
expect(resource.provider_for_action(:create)).to be_kind_of(Chef::Provider::RemoteFile)
end
describe "source" do
it "does not have a default value for 'source'" do
expect(resource.source).to eql([])
end
it "accepts a URI for the remote file source" do
resource.source "http://opscode.com/"
expect(resource.source).to eql([ "http://opscode.com/" ])
end
it "accepts a windows network share source" do
resource.source "\\\\fakey\\fakerton\\fake.txt"
expect(resource.source).to eql([ "\\\\fakey\\fakerton\\fake.txt" ])
end
it "accepts file URIs with spaces" do
resource.source("file:///C:/foo bar")
expect(resource.source).to eql(["file:///C:/foo bar"])
end
it "accepts a delayed evalutator (string) for the remote file source" do
resource.source Chef::DelayedEvaluator.new { "http://opscode.com/" }
expect(resource.source).to eql([ "http://opscode.com/" ])
end
it "accepts an array of URIs for the remote file source" do
resource.source([ "http://opscode.com/", "http://puppetlabs.com/" ])
expect(resource.source).to eql([ "http://opscode.com/", "http://puppetlabs.com/" ])
end
it "accepts a delated evaluator (array) for the remote file source" do
resource.source Chef::DelayedEvaluator.new { [ "http://opscode.com/", "http://puppetlabs.com/" ] }
expect(resource.source).to eql([ "http://opscode.com/", "http://puppetlabs.com/" ])
end
it "accepts an multiple URIs as arguments for the remote file source" do
resource.source("http://opscode.com/", "http://puppetlabs.com/")
expect(resource.source).to eql([ "http://opscode.com/", "http://puppetlabs.com/" ])
end
it "only accept a single argument if a delayed evalutor is used" do
expect do
resource.source("http://opscode.com/", Chef::DelayedEvaluator.new { "http://opscode.com/" })
end.to raise_error(Chef::Exceptions::InvalidRemoteFileURI)
end
it "only accept a single array item if a delayed evalutor is used" do
expect do
resource.source(["http://opscode.com/", Chef::DelayedEvaluator.new { "http://opscode.com/" }])
end.to raise_error(Chef::Exceptions::InvalidRemoteFileURI)
end
it "does not accept a non-URI as the source" do
expect { resource.source("not-a-uri") }.to raise_error(Chef::Exceptions::InvalidRemoteFileURI)
end
it "does not accept a non-URI as the source when read from a delayed evaluator" do
expect do
resource.source(Chef::DelayedEvaluator.new { "not-a-uri" })
resource.source
end.to raise_error(Chef::Exceptions::InvalidRemoteFileURI)
end
it "raises an exception when source is an empty array" do
expect { resource.source([]) }.to raise_error(ArgumentError)
end
end
describe "checksum" do
it "accepts a string for the checksum object" do
resource.checksum "asdf"
expect(resource.checksum).to eql("asdf")
end
it "defaults to nil" do
expect(resource.checksum).to eq(nil)
end
end
describe "ftp_active_mode" do
it "accepts a boolean for the ftp_active_mode object" do
resource.ftp_active_mode true
expect(resource.ftp_active_mode).to be_truthy
end
it "defaults to false" do
expect(resource.ftp_active_mode).to be_falsey
end
end
describe "conditional get options" do
it "defaults to using etags and last modified" do
expect(resource.use_etags).to be_truthy
expect(resource.use_last_modified).to be_truthy
end
it "enable or disables etag and last modified options as a group" do
resource.use_conditional_get(false)
expect(resource.use_etags).to be_falsey
expect(resource.use_last_modified).to be_falsey
resource.use_conditional_get(true)
expect(resource.use_etags).to be_truthy
expect(resource.use_last_modified).to be_truthy
end
it "disables etags indivdually" do
resource.use_etags(false)
expect(resource.use_etags).to be_falsey
expect(resource.use_last_modified).to be_truthy
end
it "disables last modified individually" do
resource.use_last_modified(false)
expect(resource.use_last_modified).to be_falsey
expect(resource.use_etags).to be_truthy
end
end
describe "when it has group, mode, owner, source, and checksum" do
before do
if Chef::Platform.windows?
resource.path("C:/temp/origin/file.txt")
resource.rights(:read, "Everyone")
resource.deny_rights(:full_control, "Clumsy_Sam")
else
resource.path("/this/path/")
resource.group("pokemon")
resource.mode("0664")
resource.owner("root")
end
resource.source("https://www.google.com/images/srpr/logo3w.png")
resource.checksum("1" * 26)
end
it "describes its state" do
state = resource.state_for_resource_reporter
if Chef::Platform.windows?
puts state
expect(state[:rights]).to eq([{ :permissions => :read, :principals => "Everyone" }])
expect(state[:deny_rights]).to eq([{ :permissions => :full_control, :principals => "Clumsy_Sam" }])
else
expect(state[:group]).to eq("pokemon")
expect(state[:mode]).to eq("0664")
expect(state[:owner]).to eq("root")
expect(state[:checksum]).to eq("1" * 26)
end
end
it "returns the path as its identity" do
if Chef::Platform.windows?
expect(resource.identity).to eq("C:/temp/origin/file.txt")
else
expect(resource.identity).to eq("/this/path/")
end
end
end
end
| Ppjet6/chef | spec/unit/resource/remote_file_spec.rb | Ruby | apache-2.0 | 7,638 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.runners.flink.translation.wrappers.streaming.io;
import com.google.common.annotations.VisibleForTesting;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import org.apache.beam.runners.flink.metrics.FlinkMetricContainer;
import org.apache.beam.runners.flink.metrics.ReaderInvocationUtil;
import org.apache.beam.runners.flink.translation.types.CoderTypeInformation;
import org.apache.beam.runners.flink.translation.utils.SerializedPipelineOptions;
import org.apache.beam.sdk.coders.Coder;
import org.apache.beam.sdk.coders.KvCoder;
import org.apache.beam.sdk.coders.SerializableCoder;
import org.apache.beam.sdk.io.UnboundedSource;
import org.apache.beam.sdk.options.PipelineOptions;
import org.apache.beam.sdk.transforms.windowing.GlobalWindow;
import org.apache.beam.sdk.transforms.windowing.PaneInfo;
import org.apache.beam.sdk.util.WindowedValue;
import org.apache.beam.sdk.values.KV;
import org.apache.beam.sdk.values.TypeDescriptor;
import org.apache.beam.sdk.values.ValueWithRecordId;
import org.apache.flink.api.common.ExecutionConfig;
import org.apache.flink.api.common.functions.StoppableFunction;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.OperatorStateStore;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.state.CheckpointListener;
import org.apache.flink.runtime.state.DefaultOperatorStateBackend;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.runtime.state.FunctionSnapshotContext;
import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction;
import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunction;
import org.apache.flink.streaming.api.operators.StreamingRuntimeContext;
import org.apache.flink.streaming.api.watermark.Watermark;
import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback;
import org.joda.time.Instant;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Wrapper for executing {@link UnboundedSource UnboundedSources} as a Flink Source.
*/
public class UnboundedSourceWrapper<
OutputT, CheckpointMarkT extends UnboundedSource.CheckpointMark>
extends RichParallelSourceFunction<WindowedValue<ValueWithRecordId<OutputT>>>
implements ProcessingTimeCallback, StoppableFunction,
CheckpointListener, CheckpointedFunction {
private static final Logger LOG = LoggerFactory.getLogger(UnboundedSourceWrapper.class);
private final String stepName;
/**
* Keep the options so that we can initialize the localReaders.
*/
private final SerializedPipelineOptions serializedOptions;
/**
* For snapshot and restore.
*/
private final KvCoder<
? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> checkpointCoder;
/**
* The split sources. We split them in the constructor to ensure that all parallel
* sources are consistent about the split sources.
*/
private final List<? extends UnboundedSource<OutputT, CheckpointMarkT>> splitSources;
/**
* The local split sources. Assigned at runtime when the wrapper is executed in parallel.
*/
private transient List<UnboundedSource<OutputT, CheckpointMarkT>> localSplitSources;
/**
* The local split readers. Assigned at runtime when the wrapper is executed in parallel.
* Make it a field so that we can access it in {@link #onProcessingTime(long)} for
* emitting watermarks.
*/
private transient List<UnboundedSource.UnboundedReader<OutputT>> localReaders;
/**
* Flag to indicate whether the source is running.
* Initialize here and not in run() to prevent races where we cancel a job before run() is
* ever called or run() is called after cancel().
*/
private volatile boolean isRunning = true;
/**
* Make it a field so that we can access it in {@link #onProcessingTime(long)} for registering new
* triggers.
*/
private transient StreamingRuntimeContext runtimeContext;
/**
* Make it a field so that we can access it in {@link #onProcessingTime(long)} for emitting
* watermarks.
*/
private transient SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> context;
/**
* Pending checkpoints which have not been acknowledged yet.
*/
private transient LinkedHashMap<Long, List<CheckpointMarkT>> pendingCheckpoints;
/**
* Keep a maximum of 32 checkpoints for {@code CheckpointMark.finalizeCheckpoint()}.
*/
private static final int MAX_NUMBER_PENDING_CHECKPOINTS = 32;
private transient ListState<KV<? extends
UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>> stateForCheckpoint;
/**
* false if checkpointCoder is null or no restore state by starting first.
*/
private transient boolean isRestored = false;
@SuppressWarnings("unchecked")
public UnboundedSourceWrapper(
String stepName,
PipelineOptions pipelineOptions,
UnboundedSource<OutputT, CheckpointMarkT> source,
int parallelism) throws Exception {
this.stepName = stepName;
this.serializedOptions = new SerializedPipelineOptions(pipelineOptions);
if (source.requiresDeduping()) {
LOG.warn("Source {} requires deduping but Flink runner doesn't support this yet.", source);
}
Coder<CheckpointMarkT> checkpointMarkCoder = source.getCheckpointMarkCoder();
if (checkpointMarkCoder == null) {
LOG.info("No CheckpointMarkCoder specified for this source. Won't create snapshots.");
checkpointCoder = null;
} else {
Coder<? extends UnboundedSource<OutputT, CheckpointMarkT>> sourceCoder =
(Coder) SerializableCoder.of(new TypeDescriptor<UnboundedSource>() {
});
checkpointCoder = KvCoder.of(sourceCoder, checkpointMarkCoder);
}
// get the splits early. we assume that the generated splits are stable,
// this is necessary so that the mapping of state to source is correct
// when restoring
splitSources = source.split(parallelism, pipelineOptions);
}
/**
* Initialize and restore state before starting execution of the source.
*/
@Override
public void open(Configuration parameters) throws Exception {
runtimeContext = (StreamingRuntimeContext) getRuntimeContext();
// figure out which split sources we're responsible for
int subtaskIndex = runtimeContext.getIndexOfThisSubtask();
int numSubtasks = runtimeContext.getNumberOfParallelSubtasks();
localSplitSources = new ArrayList<>();
localReaders = new ArrayList<>();
pendingCheckpoints = new LinkedHashMap<>();
if (isRestored) {
// restore the splitSources from the checkpoint to ensure consistent ordering
for (KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> restored:
stateForCheckpoint.get()) {
localSplitSources.add(restored.getKey());
localReaders.add(restored.getKey().createReader(
serializedOptions.getPipelineOptions(), restored.getValue()));
}
} else {
// initialize localReaders and localSources from scratch
for (int i = 0; i < splitSources.size(); i++) {
if (i % numSubtasks == subtaskIndex) {
UnboundedSource<OutputT, CheckpointMarkT> source =
splitSources.get(i);
UnboundedSource.UnboundedReader<OutputT> reader =
source.createReader(serializedOptions.getPipelineOptions(), null);
localSplitSources.add(source);
localReaders.add(reader);
}
}
}
LOG.info("Unbounded Flink Source {}/{} is reading from sources: {}",
subtaskIndex,
numSubtasks,
localSplitSources);
}
@Override
public void run(SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx) throws Exception {
context = ctx;
FlinkMetricContainer metricContainer = new FlinkMetricContainer(getRuntimeContext());
ReaderInvocationUtil<OutputT, UnboundedSource.UnboundedReader<OutputT>> readerInvoker =
new ReaderInvocationUtil<>(
stepName,
serializedOptions.getPipelineOptions(),
metricContainer);
if (localReaders.size() == 0) {
// do nothing, but still look busy ...
// also, output a Long.MAX_VALUE watermark since we know that we're not
// going to emit anything
// we can't return here since Flink requires that all operators stay up,
// otherwise checkpointing would not work correctly anymore
ctx.emitWatermark(new Watermark(Long.MAX_VALUE));
// wait until this is canceled
final Object waitLock = new Object();
while (isRunning) {
try {
// Flink will interrupt us at some point
//noinspection SynchronizationOnLocalVariableOrMethodParameter
synchronized (waitLock) {
// don't wait indefinitely, in case something goes horribly wrong
waitLock.wait(1000);
}
} catch (InterruptedException e) {
if (!isRunning) {
// restore the interrupted state, and fall through the loop
Thread.currentThread().interrupt();
}
}
}
} else if (localReaders.size() == 1) {
// the easy case, we just read from one reader
UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(0);
boolean dataAvailable = readerInvoker.invokeStart(reader);
if (dataAvailable) {
emitElement(ctx, reader);
}
setNextWatermarkTimer(this.runtimeContext);
while (isRunning) {
dataAvailable = readerInvoker.invokeAdvance(reader);
if (dataAvailable) {
emitElement(ctx, reader);
} else {
Thread.sleep(50);
}
}
} else {
// a bit more complicated, we are responsible for several localReaders
// loop through them and sleep if none of them had any data
int numReaders = localReaders.size();
int currentReader = 0;
// start each reader and emit data if immediately available
for (UnboundedSource.UnboundedReader<OutputT> reader : localReaders) {
boolean dataAvailable = readerInvoker.invokeStart(reader);
if (dataAvailable) {
emitElement(ctx, reader);
}
}
// a flag telling us whether any of the localReaders had data
// if no reader had data, sleep for bit
boolean hadData = false;
while (isRunning) {
UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(currentReader);
boolean dataAvailable = readerInvoker.invokeAdvance(reader);
if (dataAvailable) {
emitElement(ctx, reader);
hadData = true;
}
currentReader = (currentReader + 1) % numReaders;
if (currentReader == 0 && !hadData) {
Thread.sleep(50);
} else if (currentReader == 0) {
hadData = false;
}
}
}
}
/**
* Emit the current element from the given Reader. The reader is guaranteed to have data.
*/
private void emitElement(
SourceContext<WindowedValue<ValueWithRecordId<OutputT>>> ctx,
UnboundedSource.UnboundedReader<OutputT> reader) {
// make sure that reader state update and element emission are atomic
// with respect to snapshots
synchronized (ctx.getCheckpointLock()) {
OutputT item = reader.getCurrent();
byte[] recordId = reader.getCurrentRecordId();
Instant timestamp = reader.getCurrentTimestamp();
WindowedValue<ValueWithRecordId<OutputT>> windowedValue =
WindowedValue.of(new ValueWithRecordId<>(item, recordId), timestamp,
GlobalWindow.INSTANCE, PaneInfo.NO_FIRING);
ctx.collectWithTimestamp(windowedValue, timestamp.getMillis());
}
}
@Override
public void close() throws Exception {
super.close();
if (localReaders != null) {
for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) {
reader.close();
}
}
}
@Override
public void cancel() {
isRunning = false;
}
@Override
public void stop() {
isRunning = false;
}
// ------------------------------------------------------------------------
// Checkpoint and restore
// ------------------------------------------------------------------------
@Override
public void snapshotState(FunctionSnapshotContext functionSnapshotContext) throws Exception {
if (!isRunning) {
LOG.debug("snapshotState() called on closed source");
} else {
if (checkpointCoder == null) {
// no checkpoint coder available in this source
return;
}
stateForCheckpoint.clear();
long checkpointId = functionSnapshotContext.getCheckpointId();
// we checkpoint the sources along with the CheckpointMarkT to ensure
// than we have a correct mapping of checkpoints to sources when
// restoring
List<CheckpointMarkT> checkpointMarks = new ArrayList<>(localSplitSources.size());
for (int i = 0; i < localSplitSources.size(); i++) {
UnboundedSource<OutputT, CheckpointMarkT> source = localSplitSources.get(i);
UnboundedSource.UnboundedReader<OutputT> reader = localReaders.get(i);
@SuppressWarnings("unchecked")
CheckpointMarkT mark = (CheckpointMarkT) reader.getCheckpointMark();
checkpointMarks.add(mark);
KV<UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT> kv =
KV.of(source, mark);
stateForCheckpoint.add(kv);
}
// cleanup old pending checkpoints and add new checkpoint
int diff = pendingCheckpoints.size() - MAX_NUMBER_PENDING_CHECKPOINTS;
if (diff >= 0) {
for (Iterator<Long> iterator = pendingCheckpoints.keySet().iterator();
diff >= 0;
diff--) {
iterator.next();
iterator.remove();
}
}
pendingCheckpoints.put(checkpointId, checkpointMarks);
}
}
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
if (checkpointCoder == null) {
// no checkpoint coder available in this source
return;
}
OperatorStateStore stateStore = context.getOperatorStateStore();
CoderTypeInformation<
KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>>
typeInformation = (CoderTypeInformation) new CoderTypeInformation<>(checkpointCoder);
stateForCheckpoint = stateStore.getOperatorState(
new ListStateDescriptor<>(DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME,
typeInformation.createSerializer(new ExecutionConfig())));
if (context.isRestored()) {
isRestored = true;
LOG.info("Having restore state in the UnbounedSourceWrapper.");
} else {
LOG.info("No restore state for UnbounedSourceWrapper.");
}
}
@Override
public void onProcessingTime(long timestamp) throws Exception {
if (this.isRunning) {
synchronized (context.getCheckpointLock()) {
// find minimum watermark over all localReaders
long watermarkMillis = Long.MAX_VALUE;
for (UnboundedSource.UnboundedReader<OutputT> reader: localReaders) {
Instant watermark = reader.getWatermark();
if (watermark != null) {
watermarkMillis = Math.min(watermark.getMillis(), watermarkMillis);
}
}
context.emitWatermark(new Watermark(watermarkMillis));
}
setNextWatermarkTimer(this.runtimeContext);
}
}
private void setNextWatermarkTimer(StreamingRuntimeContext runtime) {
if (this.isRunning) {
long watermarkInterval = runtime.getExecutionConfig().getAutoWatermarkInterval();
long timeToNextWatermark = getTimeToNextWatermark(watermarkInterval);
runtime.getProcessingTimeService().registerTimer(timeToNextWatermark, this);
}
}
private long getTimeToNextWatermark(long watermarkInterval) {
return System.currentTimeMillis() + watermarkInterval;
}
/**
* Visible so that we can check this in tests. Must not be used for anything else.
*/
@VisibleForTesting
public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getSplitSources() {
return splitSources;
}
/**
* Visible so that we can check this in tests. Must not be used for anything else.
*/
@VisibleForTesting
public List<? extends UnboundedSource<OutputT, CheckpointMarkT>> getLocalSplitSources() {
return localSplitSources;
}
@Override
public void notifyCheckpointComplete(long checkpointId) throws Exception {
List<CheckpointMarkT> checkpointMarks = pendingCheckpoints.get(checkpointId);
if (checkpointMarks != null) {
// remove old checkpoints including the current one
Iterator<Long> iterator = pendingCheckpoints.keySet().iterator();
long currentId;
do {
currentId = iterator.next();
iterator.remove();
} while (currentId != checkpointId);
// confirm all marks
for (CheckpointMarkT mark : checkpointMarks) {
mark.finalizeCheckpoint();
}
}
}
}
| dhalperi/beam | runners/flink/src/main/java/org/apache/beam/runners/flink/translation/wrappers/streaming/io/UnboundedSourceWrapper.java | Java | apache-2.0 | 18,014 |
/*
* Copyright (c) 2014 DataTorrent, Inc. ALL Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
angular.module('ui.widgets')
.directive('wtNvd3LineChart', function ($filter) {
return {
restrict: 'A',
replace: true,
templateUrl: 'template/widgets/nvd3LineChart/nvd3LineChart.html',
scope: {
data: '=data',
showLegend: '@',
showTimeRange: '=?',
timeAxisFormat: '=?'
},
controller: function ($scope) {
var filter = $filter('date');
var numberFilter = $filter('number');
$scope.xAxisTickFormatFunction = function () {
return function (d) {
return filter(d, $scope.timeAxisFormat);
};
};
$scope.yAxisTickFormatFunction = function () {
return function (d) {
if (d > 999) {
var value;
var scale;
if (d < 999999) {
value = Math.round(d/1000);
scale = 'k';
} else {
value = Math.round(d/1000000);
scale = 'm';
}
return numberFilter(value) + scale;
} else {
return numberFilter(d);
}
};
};
$scope.xFunction = function () {
return function (d) {
return d.timestamp;
};
};
$scope.yFunction = function () {
return function (d) {
return d.value;
};
};
},
link: function postLink(scope, element, attrs) {
if (!_.has(attrs, 'showTimeRange')) {
scope.showTimeRange = true;
}
scope.timeAxisFormat = scope.timeAxisFormat || 'HH:mm';
scope.$watch('data', function (data) {
if (data && data[0] && data[0].values && (data[0].values.length > 1)) {
var timeseries = _.sortBy(data[0].values, function (item) {
return item.timestamp;
});
var start = timeseries[0].timestamp;
var end = timeseries[timeseries.length - 1].timestamp;
scope.start = start;
scope.end = end;
}
});
}
};
}); | DataTorrent/malhar-angular-widgets | src/widgets/nvd3LineChart/nvd3LineChart.js | JavaScript | apache-2.0 | 2,754 |
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package blb
import (
"k8s.io/autoscaler/cluster-autoscaler/cloudprovider/baiducloud/baiducloud-sdk-go/bce"
)
// Endpoint contains all endpoints of Baidu Cloud BCC.
var Endpoint = map[string]string{
"bj": "blb.bj.baidubce.com",
"gz": "blb.gz.baidubce.com",
"su": "blb.su.baidubce.com",
"hk": "blb.hkg.baidubce.com",
"bd": "blb.bd.baidubce.com",
}
// Client is the BLB client implemention for Baidu Cloud BLB API.
type Client struct {
*bce.Client
}
// NewBLBClient new a client for BLB
func NewBLBClient(config *bce.Config) *Client {
bceClient := bce.NewClient(config)
return &Client{bceClient}
}
// GetURL generates the full URL of http request for Baidu Cloud BLB API.
func (c *Client) GetURL(version string, params map[string]string) string {
host := c.Endpoint
if host == "" {
host = Endpoint[c.GetRegion()]
}
uriPath := version
return c.Client.GetURL(host, uriPath, params)
}
| kubernetes/autoscaler | cluster-autoscaler/cloudprovider/baiducloud/baiducloud-sdk-go/blb/client.go | GO | apache-2.0 | 1,471 |
/*
Copyright 2018 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
try { Object.defineProperty(Array.prototype, "peek", { value: function () { return (this.length > 0 ? this[this.length - 1] : undefined); } }); } catch (e) { }
try { Object.defineProperty(String.prototype, "replaceAll", { value: function replaceAll(oldVal, newVal) { return (this.split(oldVal).join(newVal)); } }); } catch (e) { }
var RSMB = 1381190978;
var memoryLocation = { 0x1: 'Other', 0x2: 'Unknown', 0x3: 'System Board', 0x4: 'ISA', 0x5: 'EISA', 0x6: 'PCI', 0x7: 'MCA', 0x8: 'PCMCIA', 0x9: 'Proprietary', 0xA: 'NuBus', 0xA0: 'PC-98/C20', 0xA1: 'PC-98/C24', 0xA2: 'PC-98/E', 0xA3: 'PC-98/LB' };
var wakeReason = ['Reserved', 'Other', 'Unknown', 'APM Timer', 'Modem Ring', 'LAN', 'Power Switch', 'PCI', 'AC Power'];
// Fill the left with zeros until the string is of a given length
function zeroLeftPad(str, len)
{
if ((len == null) && (typeof (len) != 'number')) { return null; }
if (str == null) str = ''; // If null, this is to generate zero leftpad string
var zlp = '';
for (var i = 0; i < len - str.length; i++) { zlp += '0'; }
return zlp + str;
}
function SMBiosTables()
{
this._ObjectID = 'SMBiosTable';
if (process.platform == 'win32') {
this._marshal = require('_GenericMarshal');
this._native = this._marshal.CreateNativeProxy("Kernel32.dll");
this._native.CreateMethod('EnumSystemFirmwareTables');
this._native.CreateMethod('GetSystemFirmwareTable');
}
if (process.platform == 'linux') {
this._canonicalizeData = function _canonicalizeData(data) {
var lines = data.toString().split('Header and Data:\x0A');
var MemoryStream = require('MemoryStream');
var ms = new MemoryStream();
for (var i = 1; i < lines.length; ++i) {
var tokens = lines[i].split('Strings:\x0A');
var header = tokens[0].split('\x0A\x0A')[0].replaceAll('\x0A', '').trim().replaceAll(' ', '').replaceAll('\x09', '');
ms.write(Buffer.from(header, 'hex'));
if (tokens.length > 1) {
var strings = tokens[1].split('\x0A\x0A')[0].split('\x0A');
var stringsFinal = [];
for (var strx in strings) {
var tmp = strings[strx].trim().replaceAll(' ', '').replaceAll('\x09', '');
if (!(tmp[0] == '"')) { stringsFinal.push(tmp); }
}
ms.write(Buffer.from(stringsFinal.join(''), 'hex'));
ms.write(Buffer.from('00', 'hex'));
}
else {
ms.write(Buffer.from('0000', 'hex'));
}
}
var retVal = ms.buffer;
retVal.ms = ms;
return (retVal);
};
}
this._parse = function _parse(SMData) {
var ret = {};
var pbyte;
var i = 0
var SMData;
var structcount = 0;
while (SMData && i < SMData.length)
{
var SMtype = SMData[i];
var SMlength = SMData[i + 1];
if (!ret[SMtype]) { ret[SMtype] = []; }
ret[SMtype].push(SMData.slice(i + 4, i + SMlength));
if (process.platform == 'win32') { ret[SMtype].peek()._ext = pbyte; }
i += SMlength;
ret[SMtype].peek()._strings = [];
while (SMData[i] != 0 && i <= SMData.length)
{
var strstart = i;
// Start of String, find end of string
while (SMData[i++] != 0 && i <= SMData.length);
try
{
ret[SMtype].peek()._strings.push(SMData.slice(strstart, i).toString().trim());
}
catch (ee)
{
}
}
i += (ret[SMtype].peek()._strings.length == 0) ? 2 : 1;
++structcount;
//console.log('End of Table[' + SMtype + ']: ' + i);
}
//console.log('Struct Count = ' + structcount);
return (ret);
};
this.get = function get(callback) {
if (process.platform == 'win32') {
var size = this._native.GetSystemFirmwareTable(RSMB, 0, 0, 0).Val;
//console.log('Table Size: ' + size);
var PtrSize = this._marshal.CreatePointer()._size;
var buffer = this._marshal.CreateVariable(size);
var written = this._native.GetSystemFirmwareTable(RSMB, 0, buffer, size).Val;
//console.log('Written Size: ' + written);
var rawBuffer = buffer.toBuffer();
var length = buffer.Deref(4, 4).toBuffer().readUInt32LE(0);
pbyte = buffer.Deref(8, length);
SMData = pbyte.toBuffer();
if (callback) { callback.apply(this, [this._parse(SMData)]); return; } else { return (this._parse(SMData)); }
}
if (process.platform == 'linux') {
var MemoryStream = require('MemoryStream');
this.child = require('child_process').execFile('/usr/sbin/dmidecode', ['dmidecode', '-u']);
this.child.SMBiosTable = this;
this.child.ms = new MemoryStream();
this.child.ms.callback = callback;
this.child.ms.child = this.child;
this.child.stdout.on('data', function (buffer) { this.parent.ms.write(buffer); });
this.child.on('exit', function () { this.ms.end(); });
this.child.ms.on('end', function () {
//console.log('read ' + this.buffer.length + ' bytes');
if (this.buffer.length < 300) {
//console.log('Not enough permission to read SMBiosTable');
if (this.callback) { this.callback.apply(this.child.SMBiosTable, []); }
}
else {
var SMData = this.child.SMBiosTable._canonicalizeData(this.buffer);
var j = this.child.SMBiosTable._parse(SMData);
if (this.callback) { this.callback.apply(this.child.SMBiosTable, [j]); }
}
});
return;
}
if (callback) { callback.apply(this, [null]); return; } else { return (null); }
};
this.parse = function parse(data) {
var r = {};
try
{
r.processorInfo = this.processorInfo(data);
}
catch(e)
{
}
try
{
r.memoryInfo = this.memoryInfo(data);
}
catch(e)
{
}
try
{
r.systemInfo = this.systemInfo(data);
}
catch(e)
{
}
try
{
r.systemSlots = this.systemInfo(data);
}
catch(e)
{
}
try
{
r.amtInfo = this.amtInfo(data);
}
catch(e)
{
}
try
{
if (JSON.stringify(r).length > 65535) { r = {}; }
}
catch(ee)
{}
return r;
}
this.processorInfo = function processorInfo(data) {
if (!data) { throw ('no data'); }
var ret = [];
var ptype = ['ERROR', 'Other', 'Unknown', 'CPU', 'ALU', 'DSP', 'GPU'];
var statusString = ['Unknown', 'Enabled', 'Disabled by user', 'Disabled by BIOS', 'Idle', 'Reserved', 'Reserved', 'Other'];
var cpuid = 0;
while (data[4] && data[4].length > 0) {
var p = data[4].pop();
var populated = p[20] & 0x40;
var status = p[20] & 0x07
if (populated) {
var j = { _ObjectID: 'SMBiosTables.processorInfo' };
j.Processor = ptype[p[1]];
j.MaxSpeed = p.readUInt16LE(16) + ' Mhz';
if (p[31]) { j.Cores = p[31]; }
if (p[33]) { j.Threads = p[33]; }
j.Populated = 1;
j.Status = statusString[status];
j.Socket = p._strings[p[0] - 1];
j.Manufacturer = p._strings[p[3] - 1];
j.Version = p._strings[p[12] - 1];
ret.push(j);
}
}
return (ret);
};
this.memoryInfo = function memoryInfo(data) {
if (!data) { throw ('no data'); }
var retVal = { _ObjectID: 'SMBiosTables.memoryInfo' };
if (data[16]) {
var m = data[16].peek();
retVal.location = memoryLocation[m[0]];
if ((retVal.maxCapacityKb = m.readUInt32LE(3)) == 0x80000000) {
retVal.maxCapacityKb = 'A really big number';
}
}
return (retVal);
};
this.systemInfo = function systemInfo(data)
{
if (!data) { throw ('no data'); }
var retVal = { _ObjectID: 'SMBiosTables.systemInfo' };
if (data[1])
{
var si = data[1].peek();
var uuid = si.slice(4, 20);
retVal.uuid = [zeroLeftPad(uuid.readUInt32LE(0).toString(16), 8),
zeroLeftPad(uuid.readUInt16LE(4).toString(16), 4),
zeroLeftPad(uuid.readUInt16LE(6).toString(16), 4),
zeroLeftPad(uuid.readUInt16BE(8).toString(16), 4),
zeroLeftPad(uuid.slice(10).toString('hex').toLowerCase(), 12)].join('-');
retVal.wakeReason = wakeReason[si[20]];
}
return (retVal);
};
this.systemSlots = function systemSlots(data) {
if (!data) { throw ('no data'); }
var retVal = [];
if (data[9]) {
while (data[9].length > 0) {
var ss = data[9].pop();
retVal.push({ name: ss._strings[ss[0] - 1] });
}
}
return (retVal);
};
this.amtInfo = function amtInfo(data) {
if (!data) { throw ('no data'); }
var retVal = { AMT: false };
if (data[130] && data[130].peek().slice(0, 4).toString() == '$AMT') {
var amt = data[130].peek();
retVal.AMT = amt[4] ? true : false;
if (retVal.AMT) {
retVal.enabled = amt[5] ? true : false;
retVal.storageRedirection = amt[6] ? true : false;
retVal.serialOverLan = amt[7] ? true : false;
retVal.kvm = amt[14] ? true : false;
if (data[131].peek() && data[131].peek().slice(52, 56).toString() == 'vPro') {
var settings = data[131].peek();
if (settings[0] & 0x04) { retVal.TXT = (settings[0] & 0x08) ? true : false; }
if (settings[0] & 0x10) { retVal.VMX = (settings[0] & 0x20) ? true : false; }
retVal.MEBX = settings.readUInt16LE(4).toString() + '.' + settings.readUInt16LE(6).toString() + '.' + settings.readUInt16LE(8).toString() + '.' + settings.readUInt16LE(10).toString();
var mecap = settings.slice(20, 32);
retVal.ManagementEngine = mecap.readUInt16LE(6).toString() + '.' + mecap.readUInt16LE(4).toString() + '.' + mecap.readUInt16LE(10).toString() + '.' + mecap.readUInt16LE(8).toString();
//var lan = settings.slice(36, 48);
//console.log(lan.toString('hex'));
//retVal.LAN = (lan.readUInt16LE(10) & 0x03).toString() + '/' + ((lan.readUInt16LE(10) & 0xF8) >> 3).toString();
//console.log(lan.readUInt16LE(3));
//retVal.WLAN = (lan.readUInt16LE(3) & 0x07).toString() + '/' + ((lan.readUInt16LE(3) & 0xF8) >> 3).toString() + '/' + (lan.readUInt16LE(3) >> 8).toString();
}
}
}
return (retVal);
};
this.smTableTypes = {
0: 'BIOS information',
1: 'System information',
2: 'Baseboard (or Module) information',
4: 'Processor information',
5: 'memory controller information',
6: 'Memory module information',
7: 'Cache information',
8: 'Port connector information',
9: 'System slots',
10: 'On board devices information',
11: 'OEM strings',
12: 'System configuration options',
13: 'BIOS language information',
14: 'Group associations',
15: 'System event log',
16: 'Physical memory array',
17: 'Memory device',
18: '32bit memory error information',
19: 'Memory array mapped address',
20: 'Memory device mapped address',
21: 'Built-in pointing device',
22: 'Portable battery',
23: 'System reset',
24: 'Hardware security',
25: 'System power controls',
26: 'Voltage probe',
27: 'Cooling device',
28: 'Temperature probe',
29: 'Electrical current probe',
30: 'Out-of-band remote access',
31: 'Boot integrity services (BIS) entry point',
32: 'System boot information',
33: '64bit memory error information',
34: 'Management device',
35: 'Management device component',
36: 'Management device threshold data',
37: 'Memory channel',
38: 'IPMI device information',
39: 'System power supply',
40: 'Additional information',
41: 'Onboard devices extended information',
42: 'Management controller host interface',
126: 'Inactive',
127: 'End-of-table'
}
}
module.exports = new SMBiosTables(); | Ylianst/MeshCentral | agents/modules_meshcore/smbios.js | JavaScript | apache-2.0 | 13,897 |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.mss.internal.mime;
import org.junit.Assert;
import org.junit.Test;
/**
* Test the functionality of MimeMapper
*/
public class MimeMapperTest {
@Test
public void testMimeMappingForKnownExtension() throws MimeMappingException {
String mimeType = MimeMapper.getMimeType("png");
Assert.assertEquals("image/png", mimeType);
}
@Test(expected = MimeMappingException.class)
public void testMimeMappingForUnknownExtension() throws MimeMappingException {
MimeMapper.getMimeType("unknownext");
}
}
| susinda/product-mss | carbon-mss/components/org.wso2.carbon.mss/src/test/java/org/wso2/carbon/mss/internal/mime/MimeMapperTest.java | Java | apache-2.0 | 1,232 |
form.style {
clear: both;
}
form.style label {
width:100px;
display:block;
float:left;
padding-top:4px;
font-size:14px;
color:#FFF;
text-align:right;
padding-right:30px;
}
form.style label.long {
width: auto;
display: inline;
float:none;
padding: 0;
}
form.style input, form.style textarea {
padding:3px 6px 3px 6px;
font-size:14px;
background-color:#EEE;
border:2px solid #999;
font-family:"Trebuchet MS";
color:#0099FF;
width:200px;
}
form.style textarea { width: 300px}
form.style input.radio {width: 30px;}
form.style input:focus, form.style textarea:focus {
border-color:#00A8FF
}
form.style input.submit {
color:#FFF;
background-color:#0F414F;
border-color:#000000;
width:100px;
}
form.style fieldset {
border:0
}
form.style h2 {
margin-bottom:25px;
margin-top:10px;
width: 60%;
border-bottom: 1px dashed #00A8FF;
padding-bottom: 7px;
padding-left:10px;
font-size:20px
}
.contact_l {
margin-left:80px !important; margin-left: 50px;
width: 250px;
float:left
} .contact_l img { vertical-align: middle;}
.contact_d {
width: 500px;
float:left
} | studiodev/archives | 2010 - Portfolio V4/templates/styles/02_form.css | CSS | apache-2.0 | 1,187 |
# AUTOGENERATED FILE
FROM balenalib/nitrogen6x-debian:stretch-run
# A few reasons for installing distribution-provided OpenJDK:
#
# 1. Oracle. Licensing prevents us from redistributing the official JDK.
#
# 2. Compiling OpenJDK also requires the JDK to be installed, and it gets
# really hairy.
#
# For some sample build times, see Debian's buildd logs:
# https://buildd.debian.org/status/logs.php?pkg=openjdk-11
RUN apt-get update && apt-get install -y --no-install-recommends \
bzip2 \
unzip \
xz-utils \
&& rm -rf /var/lib/apt/lists/*
RUN echo 'deb http://deb.debian.org/debian stretch-backports main' > /etc/apt/sources.list.d/stretch-backports.list
# Default to UTF-8 file.encoding
ENV LANG C.UTF-8
# add a simple script that can auto-detect the appropriate JAVA_HOME value
# based on whether the JDK or only the JRE is installed
RUN { \
echo '#!/bin/sh'; \
echo 'set -e'; \
echo; \
echo 'dirname "$(dirname "$(readlink -f "$(which javac || which java)")")"'; \
} > /usr/local/bin/docker-java-home \
&& chmod +x /usr/local/bin/docker-java-home
# do some fancy footwork to create a JAVA_HOME that's cross-architecture-safe
RUN ln -svT "/usr/lib/jvm/java-11-openjdk-$(dpkg --print-architecture)" /docker-java-home
ENV JAVA_HOME /docker-java-home
RUN set -ex; \
\
# deal with slim variants not having man page directories (which causes "update-alternatives" to fail)
if [ ! -d /usr/share/man/man1 ]; then \
mkdir -p /usr/share/man/man1; \
fi; \
\
apt-get update; \
apt-get install -y --no-install-recommends \
openjdk-11-jre-headless \
; \
rm -rf /var/lib/apt/lists/*; \
\
rm -vf /usr/local/bin/java; \
\
# ca-certificates-java does not work on src:openjdk-11: (https://bugs.debian.org/914424, https://bugs.debian.org/894979, https://salsa.debian.org/java-team/ca-certificates-java/commit/813b8c4973e6c4bb273d5d02f8d4e0aa0b226c50#d4b95d176f05e34cd0b718357c532dc5a6d66cd7_54_56)
keytool -importkeystore -srckeystore /etc/ssl/certs/java/cacerts -destkeystore /etc/ssl/certs/java/cacerts.jks -deststoretype JKS -srcstorepass changeit -deststorepass changeit -noprompt; \
mv /etc/ssl/certs/java/cacerts.jks /etc/ssl/certs/java/cacerts; \
/var/lib/dpkg/info/ca-certificates-java.postinst configure; \
\
# verify that "docker-java-home" returns what we expect
[ "$(readlink -f "$JAVA_HOME")" = "$(docker-java-home)" ]; \
\
# update-alternatives so that future installs of other OpenJDK versions don't change /usr/bin/java
update-alternatives --get-selections | awk -v home="$(readlink -f "$JAVA_HOME")" 'index($3, home) == 1 { $2 = "manual"; print | "update-alternatives --set-selections" }'; \
# ... and verify that it actually worked for one of the alternatives we care about
update-alternatives --query java | grep -q 'Status: manual'
CMD ["echo","'No CMD command was set in Dockerfile! Details about CMD command could be found in Dockerfile Guide section in our Docs. Here's the link: https://balena.io/docs"]
RUN [ ! -d /.balena/messages ] && mkdir -p /.balena/messages; echo 'Here are a few details about this Docker image (For more information please visit https://www.balena.io/docs/reference/base-images/base-images/): \nArchitecture: ARM v7 \nOS: Debian Stretch \nVariant: run variant \nDefault variable(s): UDEV=off \nThe following software stack is preinstalled: \nOpenJDK v11-jre \nExtra features: \n- Easy way to install packages with `install_packages <package-name>` command \n- Run anywhere with cross-build feature (for ARM only) \n- Keep the container idling with `balena-idle` command \n- Show base image details with `balena-info` command' > /.balena/messages/image-info
RUN echo '#!/bin/sh.real\nbalena-info\nrm -f /bin/sh\ncp /bin/sh.real /bin/sh\n/bin/sh "$@"' > /bin/sh-shim \
&& chmod +x /bin/sh-shim \
&& cp /bin/sh /bin/sh.real \
&& mv /bin/sh-shim /bin/sh | nghiant2710/base-images | balena-base-images/openjdk/nitrogen6x/debian/stretch/11-jre/run/Dockerfile | Dockerfile | apache-2.0 | 3,851 |
/**
* vue app
* Created by HC on 2016/7/19.
*/
var header = Vue.extend({
template: '#header'
});
// 全局注册组件
Vue.component('my-header', header);
var footer = Vue.extend({
template: '#footer'
});
// 全局注册组件
Vue.component('my-footer', footer);
var index = Vue.extend({
template: '#index'
});
var App = Vue.extend({});
var router = new VueRouter();
router.map({
'/': {
component: index
},
'/bar': {
component: footer
}
});
// Now we can start the app!
// The router will create an instance of App and mount to
// the element matching the selector #app.
router.start(App, '#app'); | jlkm2010/blog | src/main/resources/static/front/app.js | JavaScript | apache-2.0 | 653 |
<data name="commentPage" th:currentPage="${param.currentPage}"
th:moduleType="${param.moduleType}" th:moduleId="${param.moduleId}" th:mode="${param.mode}" th:asc="${param.asc}"/>
<fragment name="评论" /> | mhlx/mblog | src/main/java/me/qyh/blog/plugin/comment/template/commentWidget.html | HTML | apache-2.0 | 206 |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Threading.Tasks;
using System.Web.UI.WebControls;
using System.Xml;
namespace OpenRiaServices.DomainServices.Server
{
/// <summary>
/// Represents a domain operation method within a DomainService
/// </summary>
public abstract class DomainOperationEntry
{
private DomainOperation _operation;
private ReadOnlyCollection<DomainOperationParameter> _effectiveParameters;
private bool _hasOutCountParameter;
private string _methodName;
private Attribute _operationAttribute;
private AttributeCollection _attributes;
private Type _associatedType;
private Type _actualReturnType;
private Type _returnType;
private Type _domainServiceType;
private bool? _requiresValidation;
private bool? _requiresAuthorization;
private Func<object, object> _unwrapTaskResultFunc;
/// <summary>
/// Initializes a new instance of the DomainOperationEntry class
/// </summary>
/// <param name="domainServiceType">The <see cref="DomainService"/> Type this operation is a member of.</param>
/// <param name="name">The name of the operation</param>
/// <param name="operation">The <see cref="DomainOperation"/></param>
/// <param name="returnType">The return Type of the operation</param>
/// <param name="parameters">The parameter definitions for the operation</param>
/// <param name="attributes">The method level attributes for the operation</param>
protected DomainOperationEntry(Type domainServiceType, string name, DomainOperation operation, Type returnType, IEnumerable<DomainOperationParameter> parameters, AttributeCollection attributes)
{
if (string.IsNullOrEmpty(name))
{
throw new ArgumentNullException("name");
}
if (returnType == null)
{
throw new ArgumentNullException("returnType");
}
if (parameters == null)
{
throw new ArgumentNullException("parameters");
}
if (attributes == null)
{
throw new ArgumentNullException("attributes");
}
if (domainServiceType == null)
{
throw new ArgumentNullException("domainServiceType");
}
if (operation == DomainOperation.None)
{
throw new ArgumentException(string.Format(CultureInfo.CurrentCulture, Resource.InvalidDomainOperationEntryType, Enum.GetName(typeof(DomainOperation), operation)));
}
bool isTaskType = TypeUtility.IsTaskType(returnType);
this._methodName = isTaskType ? RemoveAsyncFromName(name) : name;
this._actualReturnType = returnType;
this._returnType = isTaskType ? TypeUtility.GetTaskReturnType(returnType) : returnType;
this._attributes = attributes;
this._operation = operation;
this._domainServiceType = domainServiceType;
List<DomainOperationParameter> effectiveParameters = parameters.ToList();
int paramCount = effectiveParameters.Count;
if (paramCount > 0)
{
DomainOperationParameter lastParameter = effectiveParameters[paramCount - 1];
if (lastParameter.IsOut && lastParameter.ParameterType.HasElementType && lastParameter.ParameterType.GetElementType() == typeof(int))
{
this._hasOutCountParameter = true;
effectiveParameters = effectiveParameters.Take(paramCount - 1).ToList();
}
}
this._effectiveParameters = effectiveParameters.AsReadOnly();
}
/// <summary>
/// Removes any trailing "Async" from the specific name.
/// </summary>
/// <param name="name">A name.</param>
/// <returns>name, but without "Async" at the end</returns>
private static string RemoveAsyncFromName(string name)
{
const string async = "Async";
if (name.EndsWith(async) && name.Length > async.Length)
return name.Substring(0, name.Length - async.Length);
else
return name;
}
/// <summary>
/// Gets a string value indicating the logical operation type
/// corresponding to the current <see cref="Operation"/> value.
/// </summary>
/// <value>
/// The value returned by this property is used in <see cref="System.ComponentModel.DataAnnotations.AuthorizationContext.OperationType"/>
/// to describe the category of operation being authorized.
/// <para>This helper property exists to avoid the overhead of <see cref="Enum.GetName"/> and
/// to map"Custom" into "Update". These strings are not localized because they are meant
/// to be used in authorization rules that work independent of culture.
/// </para>
/// </value>
internal string OperationType
{
get
{
switch (this.Operation)
{
case DomainOperation.Query:
return "Query";
case DomainOperation.Insert:
return "Insert";
case DomainOperation.Update:
case DomainOperation.Custom:
return "Update";
case DomainOperation.Delete:
return "Delete";
case DomainOperation.Invoke:
return "Invoke";
default:
System.Diagnostics.Debug.Fail("Unknown DomainOperation type");
return "Unknown";
}
}
}
/// <summary>
/// Gets the <see cref="DomainService"/> Type this operation is a member of.
/// </summary>
public Type DomainServiceType
{
get
{
return this._domainServiceType;
}
}
/// <summary>
/// Gets the name of the operation
/// </summary>
public string Name
{
get
{
return this._methodName;
}
}
/// <summary>
/// Gets the attribute that contains metadata about the operation.
/// </summary>
public Attribute OperationAttribute
{
get
{
this.InitializeOperationAttribute();
return this._operationAttribute;
}
}
/// <summary>
/// Gets a value indicating whether this operation requires validation.
/// </summary>
internal bool RequiresValidation
{
get
{
if (!this._requiresValidation.HasValue)
{
// Determine whether this operation requires validation.
this._requiresValidation = this._attributes[typeof(ValidationAttribute)] != null;
if (!this._requiresValidation.Value)
{
this._requiresValidation = this.Parameters.Any(p => p.Attributes[typeof(ValidationAttribute)] != null);
}
if (!this._requiresValidation.Value)
{
this._requiresValidation = this.Parameters.Any(p =>
{
// Complex parameters need to be validated if validation occurs on the
// type itself.
if (TypeUtility.IsSupportedComplexType(p.ParameterType))
{
Type complexType = TypeUtility.GetElementType(p.ParameterType);
MetaType metaType = MetaType.GetMetaType(complexType);
return metaType.RequiresValidation;
}
return false;
});
}
}
return this._requiresValidation.Value;
}
}
/// <summary>
/// Gets a value indicating whether this operation requires authorization.
/// </summary>
internal bool RequiresAuthorization
{
get
{
if (!this._requiresAuthorization.HasValue)
{
// Determine whether this operation requires authorization. AuthorizationAttributes may appear on
// the DomainService type as well as the DomainOperationEntry method.
this._requiresAuthorization = this._attributes[typeof(AuthorizationAttribute)] != null;
if (!this._requiresAuthorization.Value)
{
this._requiresAuthorization = DomainServiceDescription.GetDescription(this._domainServiceType).Attributes[typeof(AuthorizationAttribute)] != null;
}
}
return this._requiresAuthorization.Value;
}
}
/// <summary>
/// Based on the operation type specified, create the default corresponding attribute
/// if it hasn't been specified explicitly, and add it to the attributes collection.
/// </summary>
private void InitializeOperationAttribute()
{
if (this._operationAttribute != null)
{
return;
}
bool attributeCreated = false;
switch (this._operation)
{
case DomainOperation.Query:
this._operationAttribute = this._attributes[typeof(QueryAttribute)];
if (this._operationAttribute == null)
{
QueryAttribute qa = new QueryAttribute();
// singleton returning query methods aren't composable
qa.IsComposable = TypeUtility.FindIEnumerable(this.ReturnType) != null;
this._operationAttribute = qa;
attributeCreated = true;
}
break;
case DomainOperation.Insert:
this._operationAttribute = this._attributes[typeof(InsertAttribute)];
if (this._operationAttribute == null)
{
this._operationAttribute = new InsertAttribute();
attributeCreated = true;
}
break;
case DomainOperation.Update:
this._operationAttribute = this._attributes[typeof(UpdateAttribute)];
if (this._operationAttribute == null)
{
this._operationAttribute = new UpdateAttribute();
attributeCreated = true;
}
break;
case DomainOperation.Delete:
this._operationAttribute = this._attributes[typeof(DeleteAttribute)];
if (this._operationAttribute == null)
{
this._operationAttribute = new DeleteAttribute();
attributeCreated = true;
}
break;
case DomainOperation.Invoke:
this._operationAttribute = this._attributes[typeof(InvokeAttribute)];
if (this._operationAttribute == null)
{
this._operationAttribute = new InvokeAttribute();
attributeCreated = true;
}
break;
case DomainOperation.Custom:
this._operationAttribute = this._attributes[typeof(EntityActionAttribute)];
if (this._operationAttribute == null)
{
this._operationAttribute = new EntityActionAttribute();
attributeCreated = true;
}
break;
default:
break;
}
if (attributeCreated)
{
if (this._attributes == null)
{
this._attributes = new AttributeCollection(this._operationAttribute);
}
else
{
this._attributes = AttributeCollection.FromExisting(this._attributes, this._operationAttribute);
}
}
}
/// <summary>
/// Gets the attributes for the operation
/// </summary>
public AttributeCollection Attributes
{
get
{
this.InitializeOperationAttribute();
return this._attributes;
}
internal set
{
this._attributes = value;
// need to reset computed flags that are based
// on operation attributes so they will be recomputed
this._requiresValidation = null;
this._requiresAuthorization = null;
}
}
/// <summary>
/// Gets the return Type of the operation
/// </summary>
public Type ReturnType
{
get
{
return this._returnType;
}
}
/// <summary>
/// Gets a value indicating whether the actual return type is a Task or Task{T}.
/// </summary>
public bool IsTaskAsync
{
get { return TypeUtility.IsTaskType(this._actualReturnType); }
}
/// <summary>
/// Gets the parameters of the operation
/// </summary>
public ReadOnlyCollection<DomainOperationParameter> Parameters
{
get
{
return this._effectiveParameters;
}
}
/// <summary>
/// Invokes this <see cref="DomainOperationEntry" />.
/// </summary>
/// <param name="domainService">The <see cref="DomainService"/> instance the operation is being invoked on.</param>
/// <param name="parameters">The parameters to pass to the method.</param>
/// <returns>The return value of the invoked method.</returns>
public abstract object Invoke(DomainService domainService, object[] parameters);
/// <summary>
/// Gets the type of domain operation implemented by the method.
/// </summary>
public DomainOperation Operation
{
get
{
return this._operation;
}
internal set
{
this._operation = value;
}
}
/// <summary>
/// Returns the associated Type this DomainOperation operates on. For query methods
/// this will be the element type of the return type (or the singleton return Type),
/// and for all other methods this will be the Type of the first method parameter.
/// </summary>
public Type AssociatedType
{
get
{
if (this._associatedType == null)
{
if (this.Operation == DomainOperation.Query)
{
Type entityType = TypeUtility.FindIEnumerable(this.ReturnType);
if (entityType != null)
{
entityType = entityType.GetGenericArguments()[0];
}
else
{
entityType = this.ReturnType;
}
this._associatedType = entityType;
}
else
{
if (this.Parameters.Count > 0)
{
this._associatedType = this.Parameters[0].ParameterType;
}
}
}
return this._associatedType;
}
}
private bool HasOutCountParameter
{
get
{
return this._hasOutCountParameter;
}
}
/// <summary>
/// Invokes this <see cref="DomainOperationEntry" />.
/// </summary>
/// <param name="domainService">The <see cref="DomainService"/> instance the operation is being invoked on.</param>
/// <param name="parameters">The parameters to pass to the method.</param>
/// <param name="totalCount">The total number of rows for the input query without any paging applied to it.</param>
/// <returns>The return value of the invoked method.</returns>
internal object Invoke(DomainService domainService, object[] parameters, out int totalCount)
{
if (this.HasOutCountParameter)
{
object[] parametersWithCount = new object[parameters.Length + 1];
parameters.CopyTo(parametersWithCount, 0);
parametersWithCount[parameters.Length] = 0;
object result = this.Invoke(domainService, parametersWithCount);
totalCount = (int)parametersWithCount[parameters.Length];
return result;
}
else
{
totalCount = DomainService.TotalCountUndefined;
return this.Invoke(domainService, parameters);
}
}
internal object UnwrapTaskResult(object result)
{
if (!IsTaskAsync)
return result;
if (_unwrapTaskResultFunc == null)
{
if (ReturnType == typeof (void))
_unwrapTaskResultFunc = UnwrapVoidResult;
else
{
_unwrapTaskResultFunc = (Func<object, object>)Delegate.CreateDelegate(typeof(Func<object, object>),
typeof(DomainOperationEntry).GetMethod("UnwrapGenericResult", BindingFlags.Static | BindingFlags.NonPublic)
.MakeGenericMethod(this.ReturnType));
}
}
return _unwrapTaskResultFunc(result);
}
private static object UnwrapVoidResult(object result)
{
if(result == null)
throw new InvalidOperationException("Task method returned null");
((Task) result).Wait();
return null;
}
private static object UnwrapGenericResult<T>(object result)
{
if(result == null)
throw new InvalidOperationException("Task method returned null");
return ((Task<T>) result).Result;
}
/// <summary>
/// Returns a textual description of the <see cref="DomainOperationEntry"/>.
/// </summary>
/// <returns>A string representation of the <see cref="DomainOperationEntry"/>.</returns>
public override string ToString()
{
StringBuilder output = new StringBuilder();
output.AppendFormat(CultureInfo.InvariantCulture, "{0} {1}(", this.ReturnType, this.Name);
for (int i = 0; i < this.Parameters.Count; i++)
{
if (i > 0)
{
output.Append(", ");
}
output.Append(this.Parameters[i].ToString());
}
output.Append(')');
return output.ToString();
}
}
}
| STAH/OpenRiaServices | OpenRiaServices.DomainServices.Server/Framework/Data/DomainOperationEntry.cs | C# | apache-2.0 | 20,260 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.elasticjob.lite.spring.namespace.job;
import lombok.RequiredArgsConstructor;
import org.apache.shardingsphere.elasticjob.infra.concurrent.BlockUtils;
import org.apache.shardingsphere.elasticjob.lite.api.bootstrap.impl.OneOffJobBootstrap;
import org.apache.shardingsphere.elasticjob.lite.internal.schedule.JobRegistry;
import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.DataflowElasticJob;
import org.apache.shardingsphere.elasticjob.lite.spring.namespace.fixture.job.FooSimpleElasticJob;
import org.apache.shardingsphere.elasticjob.lite.spring.namespace.test.AbstractZookeeperJUnit4SpringContextTests;
import org.apache.shardingsphere.elasticjob.reg.base.CoordinatorRegistryCenter;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import javax.annotation.Resource;
import static org.junit.Assert.assertTrue;
@RequiredArgsConstructor
public abstract class AbstractOneOffJobSpringIntegrateTest extends AbstractZookeeperJUnit4SpringContextTests {
private final String simpleJobName;
private final String throughputDataflowJobName;
@Resource
private CoordinatorRegistryCenter regCenter;
@Before
@After
public void reset() {
FooSimpleElasticJob.reset();
DataflowElasticJob.reset();
}
@After
public void tearDown() {
JobRegistry.getInstance().shutdown(simpleJobName);
JobRegistry.getInstance().shutdown(throughputDataflowJobName);
}
@Test
public void assertSpringJobBean() {
assertSimpleElasticJobBean();
assertThroughputDataflowElasticJobBean();
}
private void assertSimpleElasticJobBean() {
OneOffJobBootstrap bootstrap = applicationContext.getBean(simpleJobName, OneOffJobBootstrap.class);
bootstrap.execute();
while (!FooSimpleElasticJob.isCompleted()) {
BlockUtils.waitingShortTime();
}
assertTrue(FooSimpleElasticJob.isCompleted());
assertTrue(regCenter.isExisted("/" + simpleJobName + "/sharding"));
}
private void assertThroughputDataflowElasticJobBean() {
OneOffJobBootstrap bootstrap = applicationContext.getBean(throughputDataflowJobName, OneOffJobBootstrap.class);
bootstrap.execute();
while (!DataflowElasticJob.isCompleted()) {
BlockUtils.waitingShortTime();
}
assertTrue(DataflowElasticJob.isCompleted());
assertTrue(regCenter.isExisted("/" + throughputDataflowJobName + "/sharding"));
}
}
| dangdangdotcom/elastic-job | elasticjob-lite/elasticjob-lite-spring/elasticjob-lite-spring-namespace/src/test/java/org/apache/shardingsphere/elasticjob/lite/spring/namespace/job/AbstractOneOffJobSpringIntegrateTest.java | Java | apache-2.0 | 3,370 |
/*-
* #%L
* ELK Reasoner Core
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2011 - 2016 Department of Computer Science, University of Oxford
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.semanticweb.elk.reasoner.entailments.impl;
import java.util.Collections;
import java.util.List;
import org.semanticweb.elk.owl.interfaces.ElkObjectPropertyAssertionAxiom;
import org.semanticweb.elk.reasoner.entailments.model.DerivedClassInclusionEntailsObjectPropertyAssertionAxiom;
import org.semanticweb.elk.reasoner.entailments.model.Entailment;
import org.semanticweb.elk.reasoner.entailments.model.EntailmentInference;
import org.semanticweb.elk.reasoner.entailments.model.ObjectPropertyAssertionAxiomEntailment;
import org.semanticweb.elk.reasoner.saturation.conclusions.model.SubClassInclusionComposed;
public class DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl
extends
AbstractAxiomEntailmentInference<ElkObjectPropertyAssertionAxiom, ObjectPropertyAssertionAxiomEntailment>
implements DerivedClassInclusionEntailsObjectPropertyAssertionAxiom {
private final SubClassInclusionComposed reason_;
public DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl(
final ObjectPropertyAssertionAxiomEntailment conclusion,
final SubClassInclusionComposed reason) {
super(conclusion);
this.reason_ = reason;
}
@Override
public List<? extends Entailment> getPremises() {
return Collections.emptyList();
}
@Override
public SubClassInclusionComposed getReason() {
return reason_;
}
@Override
public <O> O accept(final EntailmentInference.Visitor<O> visitor) {
return visitor.visit(this);
}
}
| liveontologies/elk-reasoner | elk-reasoner/src/main/java/org/semanticweb/elk/reasoner/entailments/impl/DerivedClassInclusionEntailsObjectPropertyAssertionAxiomImpl.java | Java | apache-2.0 | 2,181 |
// Copyright (C) 2004 Davis E. King (davis@dlib.net)
// License: Boost Software License See LICENSE.txt for the full license.
#ifndef DLIB_MISC_API_KERNEl_1_
#include "misc_api_kernel_2.h"
#endif
| cpearce/HARM | src/dlib/misc_api/posix.h | C | apache-2.0 | 206 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
$:.unshift(File.dirname(__FILE__) + '/../lib')
require 'test/unit'
require File.expand_path(File.join(File.dirname(__FILE__), '../../../../config/environment.rb'))
require 'rubygems'
require 'active_record/fixtures'
config = YAML::load(IO.read( File.join(File.dirname(__FILE__),'database.yml')))
# cleanup logs and databases between test runs
#FileUtils.rm File.join(File.dirname(__FILE__), "debug.log"), :force => true
FileUtils.rm File.join(RAILS_ROOT, config['sqlite3'][:dbfile]), :force => true
ActiveRecord::Base.logger = Logger.new(File.join(File.dirname(__FILE__), "debug.log"))
ActiveRecord::Base.establish_connection(config[ENV['DB'] || 'sqlite3'])
load(File.join(File.dirname(__FILE__), "schema.rb"))
Test::Unit::TestCase.fixture_path = File.dirname(__FILE__) + "/fixtures/"
$LOAD_PATH.unshift(Test::Unit::TestCase.fixture_path)
class Test::Unit::TestCase #:nodoc:
def create_fixtures(*table_names)
if block_given?
Fixtures.create_fixtures(Test::Unit::TestCase.fixture_path, table_names) { yield }
else
Fixtures.create_fixtures(Test::Unit::TestCase.fixture_path, table_names)
end
end
# Turn off transactional fixtures if you're working with MyISAM tables in MySQL
self.use_transactional_fixtures = true
# Instantiated fixtures are slow, but give you @david where you otherwise would need people(:david)
self.use_instantiated_fixtures = false
# Instantiated fixtures are slow, but give you @david where you otherwise would need people(:david)
end | shanti/olio | webapp/rails/trunk/vendor/plugins/acts_as_network/test/test_helper.rb | Ruby | apache-2.0 | 2,297 |
/**
* Copyright (C) 2009-2014 Dell, Inc.
* See annotations for authorship information
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud;
import org.dasein.cloud.admin.AdminServices;
import org.dasein.cloud.ci.CIServices;
import org.dasein.cloud.compute.ComputeServices;
import org.dasein.cloud.identity.IdentityServices;
import org.dasein.cloud.network.NetworkServices;
import org.dasein.cloud.platform.PlatformServices;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
/**
* Simple base implementation of a cloud provider bootstrap object that defaults all services to <code>null</code>.
* @author George Reese
* @version 2013.07 added javadoc, fixed annotations on data center services, made it return an NPE
* @since unknown
*/
public abstract class AbstractCloud extends CloudProvider {
/**
* Constructs a cloud provider instance.
*/
public AbstractCloud() { }
@Override
public @Nullable AdminServices getAdminServices() {
return null;
}
@Override
public @Nullable ComputeServices getComputeServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getComputeServices());
}
@Override
public @Nonnull ContextRequirements getContextRequirements() {
return new ContextRequirements(
new ContextRequirements.Field("apiKeys", ContextRequirements.FieldType.KEYPAIR),
new ContextRequirements.Field("x509", ContextRequirements.FieldType.KEYPAIR, false)
);
}
@Override
public @Nullable CIServices getCIServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getCIServices());
}
@Override
public @Nullable IdentityServices getIdentityServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getIdentityServices());
}
@Override
public @Nullable NetworkServices getNetworkServices() {
CloudProvider compute = getComputeCloud();
return (compute == null ? null : compute.getNetworkServices());
}
@Override
public @Nullable PlatformServices getPlatformServices() {
CloudProvider compute = getComputeCloud();
return ( compute == null ? null : compute.getPlatformServices() );
}
}
| OSS-TheWeatherCompany/dasein-cloud-core | src/main/java/org/dasein/cloud/AbstractCloud.java | Java | apache-2.0 | 3,068 |
package migrations
import "github.com/BurntSushi/migration"
func ReplaceStepLocationWithPlanID(tx migration.LimitedTx) error {
_, err := tx.Exec(`
ALTER TABLE containers DROP COLUMN step_location;
`)
if err != nil {
return err
}
_, err = tx.Exec(`
ALTER TABLE containers ADD COLUMN plan_id text;
`)
return err
}
| homedepot/github-webhook | vendor/github.com/concourse/atc/db/migrations/61_replace_step_location_with_plan_id.go | GO | apache-2.0 | 333 |
#--
# Author:: Daniel DeLeo (<dan@chef.io>)
# Copyright:: Copyright 2012-2018, Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef/node/common_api"
require "chef/node/mixin/state_tracking"
class Chef
class Node
# == AttrArray
# AttrArray is identical to Array, except that it keeps a reference to the
# "root" (Chef::Node::Attribute) object, and will trigger a cache
# invalidation on that object when mutated.
class AttrArray < Array
MUTATOR_METHODS = [
:<<,
:[]=,
:clear,
:collect!,
:compact!,
:default=,
:default_proc=,
:delete_at,
:delete_if,
:fill,
:flatten!,
:insert,
:keep_if,
:map!,
:merge!,
:pop,
:push,
:update,
:reject!,
:reverse!,
:replace,
:select!,
:shift,
:slice!,
:sort!,
:sort_by!,
:uniq!,
:unshift,
]
# For all of the methods that may mutate an Array, we override them to
# also invalidate the cached merged_attributes on the root
# Node::Attribute object.
MUTATOR_METHODS.each do |mutator|
define_method(mutator) do |*args, &block|
ret = super(*args, &block)
send_reset_cache
ret
end
end
def delete(key, &block)
send_reset_cache(__path__, key)
super
end
def initialize(data = [])
super(data)
map! { |e| convert_value(e) }
end
# For elements like Fixnums, true, nil...
def safe_dup(e)
e.dup
rescue TypeError
e
end
def dup
Array.new(map { |e| safe_dup(e) })
end
private
def convert_value(value)
case value
when VividMash
value
when AttrArray
value
when Hash
VividMash.new(value, __root__, __node__, __precedence__)
when Array
AttrArray.new(value, __root__, __node__, __precedence__)
else
value
end
end
# needed for __path__
def convert_key(key)
key
end
prepend Chef::Node::Mixin::StateTracking
end
# == VividMash
# VividMash is identical to a Mash, with a few exceptions:
# * It has a reference to the root Chef::Node::Attribute to which it
# belongs, and will trigger cache invalidation on that object when
# mutated.
# * It auto-vivifies, that is a reference to a missing element will result
# in the creation of a new VividMash for that key. (This only works when
# using the element reference method, `[]` -- other methods, such as
# #fetch, work as normal).
# * attr_accessor style element set and get are supported via method_missing
class VividMash < Mash
include CommonAPI
# Methods that mutate a VividMash. Each of them is overridden so that it
# also invalidates the cached merged_attributes on the root Attribute
# object.
MUTATOR_METHODS = [
:clear,
:delete_if,
:keep_if,
:merge!,
:update,
:reject!,
:replace,
:select!,
:shift,
]
# For all of the mutating methods on Mash, override them so that they
# also invalidate the cached `merged_attributes` on the root Attribute
# object.
def delete(key, &block)
send_reset_cache(__path__, key)
super
end
MUTATOR_METHODS.each do |mutator|
define_method(mutator) do |*args, &block|
send_reset_cache
super(*args, &block)
end
end
def initialize(data = {})
super(data)
end
def [](key)
value = super
if !key?(key)
value = self.class.new({}, __root__)
self[key] = value
else
value
end
end
def []=(key, value)
ret = super
send_reset_cache(__path__, key)
ret # rubocop:disable Lint/Void
end
alias :attribute? :has_key?
def convert_key(key)
super
end
# Mash uses #convert_value to mashify values on input.
# We override it here to convert hash or array values to VividMash or
# AttrArray for consistency and to ensure that the added parts of the
# attribute tree will have the correct cache invalidation behavior.
def convert_value(value)
case value
when VividMash
value
when AttrArray
value
when Hash
VividMash.new(value, __root__, __node__, __precedence__)
when Array
AttrArray.new(value, __root__, __node__, __precedence__)
else
value
end
end
def dup
Mash.new(self)
end
prepend Chef::Node::Mixin::StateTracking
end
end
end
| juliandunn/chef | lib/chef/node/attribute_collections.rb | Ruby | apache-2.0 | 5,477 |
////////////////////////////////////////////////////////////////////////////////
/// DISCLAIMER
///
/// Copyright 2014-2020 ArangoDB GmbH, Cologne, Germany
/// Copyright 2004-2014 triAGENS GmbH, Cologne, Germany
///
/// Licensed under the Apache License, Version 2.0 (the "License");
/// you may not use this file except in compliance with the License.
/// You may obtain a copy of the License at
///
/// http://www.apache.org/licenses/LICENSE-2.0
///
/// Unless required by applicable law or agreed to in writing, software
/// distributed under the License is distributed on an "AS IS" BASIS,
/// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
/// See the License for the specific language governing permissions and
/// limitations under the License.
///
/// Copyright holder is ArangoDB GmbH, Cologne, Germany
///
/// @author Lars Maier
////////////////////////////////////////////////////////////////////////////////
#pragma once
#include <iostream>
#include <memory>
#include <utility>
#include "velocypack/Buffer.h"
#include "velocypack/Iterator.h"
#include "velocypack/Slice.h"
namespace arangodb::tests::deserializer {
struct slice_access {
enum class type {
GET,
HAS_KEY,
COPY_STRING,
IS_NUMBER,
IS_ARRAY,
IS_OBJECT,
IS_NONE,
LENGTH,
AT,
GET_NUMBER,
IS_STRING,
IS_BOOL,
GET_BOOL,
GET_NUMERIC_VALUE,
IS_EQUAL_STRING,
STRING_VIEW,
ARRAY_ITER_ACCESS,
OBJECT_ITER_ACCESS,
};
std::string key;
std::string parameter;
type what;
slice_access(std::string key, type what) : key(std::move(key)), what(what) {}
slice_access(std::string key, type what, std::string parameter)
: key(std::move(key)), parameter(std::move(parameter)), what(what) {}
};
struct slice_access_tape {
std::vector<slice_access> tape;
template<typename... S>
void record(S&&... s) {
tape.emplace_back(slice_access{std::forward<S>(s)...});
}
};
static std::ostream& operator<<(std::ostream& os, slice_access::type type) {
#define enum_to_string(s) \
case slice_access::type::s: \
os << #s; \
break;
switch (type) {
enum_to_string(GET) enum_to_string(HAS_KEY) enum_to_string(COPY_STRING)
enum_to_string(IS_NUMBER) enum_to_string(IS_ARRAY)
enum_to_string(IS_OBJECT) enum_to_string(IS_NONE)
enum_to_string(LENGTH) enum_to_string(AT)
enum_to_string(GET_NUMBER) enum_to_string(IS_STRING)
enum_to_string(IS_BOOL) enum_to_string(GET_BOOL)
enum_to_string(GET_NUMERIC_VALUE)
enum_to_string(IS_EQUAL_STRING)
enum_to_string(STRING_VIEW)
enum_to_string(ARRAY_ITER_ACCESS)
enum_to_string(OBJECT_ITER_ACCESS)
}
return os;
#undef enum_to_string
}
static inline std::ostream& operator<<(std::ostream& os,
slice_access_tape const& tape) {
for (auto const& e : tape.tape) {
os << e.key << ' ' << e.what << ' ' << e.parameter << std::endl;
}
return os;
}
struct recording_slice {
explicit recording_slice() = default;
explicit recording_slice(arangodb::velocypack::Slice slice,
std::shared_ptr<slice_access_tape> tape)
: tape(std::move(tape)), slice(slice) {}
explicit recording_slice(arangodb::velocypack::Slice slice,
std::shared_ptr<slice_access_tape> tape,
std::string prefix)
: tape(std::move(tape)), slice(slice), prefix(std::move(prefix)) {}
std::shared_ptr<slice_access_tape> tape;
arangodb::velocypack::Slice slice;
std::string prefix = "$";
bool isNumber() const;
bool isArray() const {
tape->record(prefix, slice_access::type::IS_ARRAY);
return slice.isArray();
}
bool isString() const {
tape->record(prefix, slice_access::type::IS_STRING);
return slice.isString();
}
bool isBool() const {
tape->record(prefix, slice_access::type::IS_BOOL);
return slice.isBool();
}
bool isObject() const {
tape->record(prefix, slice_access::type::IS_OBJECT);
return slice.isObject();
}
auto length() const {
tape->record(prefix, slice_access::type::LENGTH);
return slice.length();
}
template<typename T>
auto at(T t) const {
tape->record(prefix + '[' + std::to_string(t) + ']',
slice_access::type::AT);
return recording_slice(slice.at(t), tape,
prefix + '[' + std::to_string(t) + ']');
}
template<typename T>
auto hasKey(T&& t) const {
tape->record(prefix, slice_access::type::HAS_KEY);
return slice.hasKey(std::forward<T>(t));
}
template<typename T>
auto getNumber() const {
tape->record(prefix, slice_access::type::GET_NUMBER);
return slice.getNumber<T>();
}
auto copyString() const {
tape->record(prefix, slice_access::type::COPY_STRING);
return slice.copyString();
}
auto getBool() const {
tape->record(prefix, slice_access::type::GET_BOOL);
return slice.getBool();
}
template<typename T>
auto isNumber() const {
tape->record(prefix, slice_access::type::IS_NUMBER);
return slice.isNumber<T>();
}
template<typename... Ts>
auto isEqualString(Ts&&... ts) const {
tape->record(prefix, slice_access::type::IS_EQUAL_STRING);
return slice.isEqualString(std::forward<Ts>(ts)...);
}
constexpr static auto nullSlice = arangodb::velocypack::Slice::nullSlice;
template<typename T>
auto get(T&& t) const {
tape->record(prefix, slice_access::type::GET, t);
return recording_slice(slice.get(std::forward<T>(t)), tape,
prefix + '.' + t);
}
auto toJson() const { return slice.toJson(); }
auto isNone() const {
tape->record(prefix, slice_access::type::IS_NONE);
return slice.isNone();
}
auto stringView() const {
tape->record(prefix, slice_access::type::STRING_VIEW);
return slice.stringView();
}
static recording_slice from_buffer(
arangodb::velocypack::Buffer<uint8_t> const& b) {
return recording_slice(arangodb::velocypack::Slice(b.data()),
std::make_shared<slice_access_tape>());
}
};
struct object_iterator {
object_iterator(arangodb::velocypack::ObjectIterator const& o,
std::shared_ptr<slice_access_tape> tape, std::string prefix)
: iter(o), tape(std::move(tape)), prefix(std::move(prefix)) {}
object_iterator(recording_slice& slice, bool useSequentialIteration = false)
: iter(slice.slice, useSequentialIteration),
tape(slice.tape),
prefix(slice.prefix){};
struct pair {
recording_slice key, value;
};
object_iterator begin() const { return {iter.begin(), tape, prefix}; }
object_iterator end() const { return {iter.end(), tape, prefix}; }
object_iterator& operator++() {
iter.operator++();
return *this;
}
bool operator!=(object_iterator const& other) const {
return iter.operator!=(other.iter);
}
pair operator*() const {
auto internal = iter.operator*();
tape->record(prefix, slice_access::type::OBJECT_ITER_ACCESS,
internal.key.copyString());
return pair{
recording_slice(internal.key, tape,
prefix + "@key[" + internal.key.copyString() + ']'),
recording_slice(internal.value, tape,
prefix + '.' + internal.key.copyString())};
}
arangodb::velocypack::ObjectIterator iter;
std::shared_ptr<slice_access_tape> tape;
std::string prefix;
};
struct array_iterator {
array_iterator(arangodb::velocypack::ArrayIterator const& o,
std::shared_ptr<slice_access_tape> tape, std::string prefix)
: iter(o), tape(std::move(tape)), prefix(std::move(prefix)), index(0) {}
explicit array_iterator(recording_slice& slice)
: iter(slice.slice), tape(slice.tape), prefix(slice.prefix), index(0){};
array_iterator begin() const { return {iter.begin(), tape, prefix}; }
array_iterator end() const { return {iter.end(), tape, prefix}; }
recording_slice operator*() const {
tape->record(prefix, slice_access::type::ARRAY_ITER_ACCESS,
std::to_string(index));
auto internal = iter.operator*();
return recording_slice(internal, tape,
prefix + "[" + std::to_string(index) + ']');
}
bool operator!=(array_iterator const& other) const {
return iter.operator!=(other.iter);
}
array_iterator& operator++() {
++index;
iter.operator++();
return *this;
}
array_iterator operator++(int) {
array_iterator result(*this);
++(*this);
return result;
}
arangodb::velocypack::ArrayIterator iter;
std::shared_ptr<slice_access_tape> tape;
std::string prefix;
std::size_t index;
};
} // namespace arangodb::tests::deserializer
#ifdef DESERIALIZER_SET_TEST_TYPES
namespace deserializer {
using slice_type = ::deserializer::test::recording_slice;
using object_iterator = ::deserializer::test::object_iterator;
using array_iterator = ::deserializer::test::array_iterator;
} // namespace deserializer
#endif
| arangodb/arangodb | tests/VPackDeserializer/test-types.h | C | apache-2.0 | 9,276 |