code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9 values | license stringclasses 15 values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*
*/
package org.wso2.andes.client.message;
import java.nio.charset.CharacterCodingException;
import java.nio.charset.Charset;
import javax.jms.JMSException;
import javax.jms.MessageEOFException;
import javax.jms.MessageFormatException;
import javax.jms.MessageNotReadableException;
import javax.jms.MessageNotWriteableException;
import org.apache.mina.common.ByteBuffer;
import org.wso2.andes.AMQException;
import org.wso2.andes.framing.AMQShortString;
import org.wso2.andes.framing.BasicContentHeaderProperties;
/**
* @author Apache Software Foundation
*/
public abstract class AbstractBytesTypedMessage extends AbstractBytesMessage
{
protected static final byte BOOLEAN_TYPE = (byte) 1;
protected static final byte BYTE_TYPE = (byte) 2;
protected static final byte BYTEARRAY_TYPE = (byte) 3;
protected static final byte SHORT_TYPE = (byte) 4;
protected static final byte CHAR_TYPE = (byte) 5;
protected static final byte INT_TYPE = (byte) 6;
protected static final byte LONG_TYPE = (byte) 7;
protected static final byte FLOAT_TYPE = (byte) 8;
protected static final byte DOUBLE_TYPE = (byte) 9;
protected static final byte STRING_TYPE = (byte) 10;
protected static final byte NULL_STRING_TYPE = (byte) 11;
/**
* This is set when reading a byte array. The readBytes(byte[]) method supports multiple calls to read
* a byte array in multiple chunks, hence this is used to track how much is left to be read
*/
private int _byteArrayRemaining = -1;
AbstractBytesTypedMessage(AMQMessageDelegateFactory delegateFactory)
{
this(delegateFactory, null);
}
/**
* Construct a stream message with existing data.
*
* @param delegateFactory
* @param data the data that comprises this message. If data is null, you get a 1024 byte buffer that is
*/
AbstractBytesTypedMessage(AMQMessageDelegateFactory delegateFactory, ByteBuffer data)
{
super(delegateFactory, data); // this instanties a content header
}
AbstractBytesTypedMessage(AMQMessageDelegate delegate, ByteBuffer data) throws AMQException
{
super(delegate, data);
}
protected byte readWireType() throws MessageFormatException, MessageEOFException,
MessageNotReadableException
{
checkReadable();
checkAvailable(1);
return _data.get();
}
protected void writeTypeDiscriminator(byte type) throws MessageNotWriteableException
{
checkWritable();
_data.put(type);
_changedData = true;
}
protected boolean readBoolean() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
boolean result;
try
{
switch (wireType)
{
case BOOLEAN_TYPE:
checkAvailable(1);
result = readBooleanImpl();
break;
case STRING_TYPE:
checkAvailable(1);
result = Boolean.parseBoolean(readStringImpl());
break;
default:
_data.position(position);
throw new MessageFormatException("Unable to convert " + wireType + " to a boolean");
}
return result;
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
}
private boolean readBooleanImpl()
{
return _data.get() != 0;
}
protected byte readByte() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
byte result;
try
{
switch (wireType)
{
case BYTE_TYPE:
checkAvailable(1);
result = readByteImpl();
break;
case STRING_TYPE:
checkAvailable(1);
result = Byte.parseByte(readStringImpl());
break;
default:
_data.position(position);
throw new MessageFormatException("Unable to convert " + wireType + " to a byte");
}
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
return result;
}
private byte readByteImpl()
{
return _data.get();
}
protected short readShort() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
short result;
try
{
switch (wireType)
{
case SHORT_TYPE:
checkAvailable(2);
result = readShortImpl();
break;
case STRING_TYPE:
checkAvailable(1);
result = Short.parseShort(readStringImpl());
break;
case BYTE_TYPE:
checkAvailable(1);
result = readByteImpl();
break;
default:
_data.position(position);
throw new MessageFormatException("Unable to convert " + wireType + " to a short");
}
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
return result;
}
private short readShortImpl()
{
return _data.getShort();
}
/**
* Note that this method reads a unicode character as two bytes from the stream
*
* @return the character read from the stream
* @throws javax.jms.JMSException
*/
protected char readChar() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
try
{
if(wireType == NULL_STRING_TYPE){
throw new NullPointerException();
}
if (wireType != CHAR_TYPE)
{
_data.position(position);
throw new MessageFormatException("Unable to convert " + wireType + " to a char");
}
else
{
checkAvailable(2);
return readCharImpl();
}
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
}
private char readCharImpl()
{
return _data.getChar();
}
protected int readInt() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
int result;
try
{
switch (wireType)
{
case INT_TYPE:
checkAvailable(4);
result = readIntImpl();
break;
case SHORT_TYPE:
checkAvailable(2);
result = readShortImpl();
break;
case STRING_TYPE:
checkAvailable(1);
result = Integer.parseInt(readStringImpl());
break;
case BYTE_TYPE:
checkAvailable(1);
result = readByteImpl();
break;
default:
_data.position(position);
throw new MessageFormatException("Unable to convert " + wireType + " to an int");
}
return result;
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
}
protected int readIntImpl()
{
return _data.getInt();
}
protected long readLong() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
long result;
try
{
switch (wireType)
{
case LONG_TYPE:
checkAvailable(8);
result = readLongImpl();
break;
case INT_TYPE:
checkAvailable(4);
result = readIntImpl();
break;
case SHORT_TYPE:
checkAvailable(2);
result = readShortImpl();
break;
case STRING_TYPE:
checkAvailable(1);
result = Long.parseLong(readStringImpl());
break;
case BYTE_TYPE:
checkAvailable(1);
result = readByteImpl();
break;
default:
_data.position(position);
throw new MessageFormatException("Unable to convert " + wireType + " to a long");
}
return result;
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
}
private long readLongImpl()
{
return _data.getLong();
}
protected float readFloat() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
float result;
try
{
switch (wireType)
{
case FLOAT_TYPE:
checkAvailable(4);
result = readFloatImpl();
break;
case STRING_TYPE:
checkAvailable(1);
result = Float.parseFloat(readStringImpl());
break;
default:
_data.position(position);
throw new MessageFormatException("Unable to convert " + wireType + " to a float");
}
return result;
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
}
private float readFloatImpl()
{
return _data.getFloat();
}
protected double readDouble() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
double result;
try
{
switch (wireType)
{
case DOUBLE_TYPE:
checkAvailable(8);
result = readDoubleImpl();
break;
case FLOAT_TYPE:
checkAvailable(4);
result = readFloatImpl();
break;
case STRING_TYPE:
checkAvailable(1);
result = Double.parseDouble(readStringImpl());
break;
default:
_data.position(position);
throw new MessageFormatException("Unable to convert " + wireType + " to a double");
}
return result;
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
}
private double readDoubleImpl()
{
return _data.getDouble();
}
protected String readString() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
String result;
try
{
switch (wireType)
{
case STRING_TYPE:
checkAvailable(1);
result = readStringImpl();
break;
case NULL_STRING_TYPE:
result = null;
throw new NullPointerException("data is null");
case BOOLEAN_TYPE:
checkAvailable(1);
result = String.valueOf(readBooleanImpl());
break;
case LONG_TYPE:
checkAvailable(8);
result = String.valueOf(readLongImpl());
break;
case INT_TYPE:
checkAvailable(4);
result = String.valueOf(readIntImpl());
break;
case SHORT_TYPE:
checkAvailable(2);
result = String.valueOf(readShortImpl());
break;
case BYTE_TYPE:
checkAvailable(1);
result = String.valueOf(readByteImpl());
break;
case FLOAT_TYPE:
checkAvailable(4);
result = String.valueOf(readFloatImpl());
break;
case DOUBLE_TYPE:
checkAvailable(8);
result = String.valueOf(readDoubleImpl());
break;
case CHAR_TYPE:
checkAvailable(2);
result = String.valueOf(readCharImpl());
break;
default:
_data.position(position);
throw new MessageFormatException("Unable to convert " + wireType + " to a String");
}
return result;
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
}
protected String readStringImpl() throws JMSException
{
try
{
return _data.getString(Charset.forName("UTF-8").newDecoder());
}
catch (CharacterCodingException e)
{
JMSException jmse = new JMSException("Error decoding byte stream as a UTF8 string: " + e);
jmse.setLinkedException(e);
jmse.initCause(e);
throw jmse;
}
}
protected int readBytes(byte[] bytes) throws JMSException
{
if (bytes == null)
{
throw new IllegalArgumentException("byte array must not be null");
}
checkReadable();
// first call
if (_byteArrayRemaining == -1)
{
// type discriminator checked separately so you get a MessageFormatException rather than
// an EOF even in the case where both would be applicable
checkAvailable(1);
byte wireType = readWireType();
if (wireType != BYTEARRAY_TYPE)
{
throw new MessageFormatException("Unable to convert " + wireType + " to a byte array");
}
checkAvailable(4);
int size = _data.getInt();
// length of -1 indicates null
if (size == -1)
{
return -1;
}
else
{
if (size > _data.remaining())
{
throw new MessageEOFException("Byte array has stated length " + size + " but message only contains " +
_data.remaining() + " bytes");
}
else
{
_byteArrayRemaining = size;
}
}
}
else if (_byteArrayRemaining == 0)
{
_byteArrayRemaining = -1;
return -1;
}
int returnedSize = readBytesImpl(bytes);
if (returnedSize < bytes.length)
{
_byteArrayRemaining = -1;
}
return returnedSize;
}
private int readBytesImpl(byte[] bytes)
{
int count = (_byteArrayRemaining >= bytes.length ? bytes.length : _byteArrayRemaining);
_byteArrayRemaining -= count;
if (count == 0)
{
return 0;
}
else
{
_data.get(bytes, 0, count);
return count;
}
}
protected Object readObject() throws JMSException
{
int position = _data.position();
byte wireType = readWireType();
Object result = null;
try
{
switch (wireType)
{
case BOOLEAN_TYPE:
checkAvailable(1);
result = readBooleanImpl();
break;
case BYTE_TYPE:
checkAvailable(1);
result = readByteImpl();
break;
case BYTEARRAY_TYPE:
checkAvailable(4);
int size = _data.getInt();
if (size == -1)
{
result = null;
}
else
{
_byteArrayRemaining = size;
byte[] bytesResult = new byte[size];
readBytesImpl(bytesResult);
result = bytesResult;
}
break;
case SHORT_TYPE:
checkAvailable(2);
result = readShortImpl();
break;
case CHAR_TYPE:
checkAvailable(2);
result = readCharImpl();
break;
case INT_TYPE:
checkAvailable(4);
result = readIntImpl();
break;
case LONG_TYPE:
checkAvailable(8);
result = readLongImpl();
break;
case FLOAT_TYPE:
checkAvailable(4);
result = readFloatImpl();
break;
case DOUBLE_TYPE:
checkAvailable(8);
result = readDoubleImpl();
break;
case NULL_STRING_TYPE:
result = null;
break;
case STRING_TYPE:
checkAvailable(1);
result = readStringImpl();
break;
}
return result;
}
catch (RuntimeException e)
{
_data.position(position);
throw e;
}
}
protected void writeBoolean(boolean b) throws JMSException
{
writeTypeDiscriminator(BOOLEAN_TYPE);
_data.put(b ? (byte) 1 : (byte) 0);
}
protected void writeByte(byte b) throws JMSException
{
writeTypeDiscriminator(BYTE_TYPE);
_data.put(b);
}
protected void writeShort(short i) throws JMSException
{
writeTypeDiscriminator(SHORT_TYPE);
_data.putShort(i);
}
protected void writeChar(char c) throws JMSException
{
writeTypeDiscriminator(CHAR_TYPE);
_data.putChar(c);
}
protected void writeInt(int i) throws JMSException
{
writeTypeDiscriminator(INT_TYPE);
writeIntImpl(i);
}
protected void writeIntImpl(int i)
{
_data.putInt(i);
}
protected void writeLong(long l) throws JMSException
{
writeTypeDiscriminator(LONG_TYPE);
_data.putLong(l);
}
protected void writeFloat(float v) throws JMSException
{
writeTypeDiscriminator(FLOAT_TYPE);
_data.putFloat(v);
}
protected void writeDouble(double v) throws JMSException
{
writeTypeDiscriminator(DOUBLE_TYPE);
_data.putDouble(v);
}
protected void writeString(String string) throws JMSException
{
if (string == null)
{
writeTypeDiscriminator(NULL_STRING_TYPE);
}
else
{
writeTypeDiscriminator(STRING_TYPE);
try
{
writeStringImpl(string);
}
catch (CharacterCodingException e)
{
JMSException jmse = new JMSException("Unable to encode string: " + e);
jmse.setLinkedException(e);
jmse.initCause(e);
throw jmse;
}
}
}
protected void writeStringImpl(String string)
throws CharacterCodingException
{
_data.putString(string, Charset.forName("UTF-8").newEncoder());
// we must write the null terminator ourselves
_data.put((byte) 0);
}
protected void writeBytes(byte[] bytes) throws JMSException
{
writeBytes(bytes, 0, bytes == null ? 0 : bytes.length);
}
protected void writeBytes(byte[] bytes, int offset, int length) throws JMSException
{
writeTypeDiscriminator(BYTEARRAY_TYPE);
if (bytes == null)
{
_data.putInt(-1);
}
else
{
_data.putInt(length);
_data.put(bytes, offset, length);
}
}
protected void writeObject(Object object) throws JMSException
{
checkWritable();
Class clazz;
if (object == null)
{
// string handles the output of null values
clazz = String.class;
}
else
{
clazz = object.getClass();
}
if (clazz == Byte.class)
{
writeByte((Byte) object);
}
else if (clazz == Boolean.class)
{
writeBoolean((Boolean) object);
}
else if (clazz == byte[].class)
{
writeBytes((byte[]) object);
}
else if (clazz == Short.class)
{
writeShort((Short) object);
}
else if (clazz == Character.class)
{
writeChar((Character) object);
}
else if (clazz == Integer.class)
{
writeInt((Integer) object);
}
else if (clazz == Long.class)
{
writeLong((Long) object);
}
else if (clazz == Float.class)
{
writeFloat((Float) object);
}
else if (clazz == Double.class)
{
writeDouble((Double) object);
}
else if (clazz == String.class)
{
writeString((String) object);
}
else
{
throw new MessageFormatException("Only primitives plus byte arrays and String are valid types");
}
}
}
| hastef88/andes | modules/andes-core/client/src/main/java/org/wso2/andes/client/message/AbstractBytesTypedMessage.java | Java | apache-2.0 | 23,169 |
package main
import (
"os"
_ "k8s.io/kubernetes/pkg/client/metrics/prometheus" // for client metric registration
"k8s.io/kubernetes/pkg/util/logs"
_ "k8s.io/kubernetes/pkg/version/prometheus" // for version metric registration
"github.com/openshift/origin/pkg/federation/kubefed"
)
func main() {
logs.InitLogs()
defer logs.FlushLogs()
cmd := kubefed.NewKubeFedCommand(os.Stdin, os.Stdout, os.Stderr)
if err := cmd.Execute(); err != nil {
os.Exit(1)
}
}
| kedgeproject/kedge | vendor/github.com/openshift/origin/cmd/kubefed/kubefed.go | GO | apache-2.0 | 470 |
/*********************************************************************
* *
* ConfigFemtoAnalysis.C - configuration macro for the femtoscopic *
* analysis, meant as a QA process for two-particle effects *
* *
* Author: Adam Kisiel (Adam.Kisiel@cern.ch) *
* *
*********************************************************************/
#if !defined(__CINT__) || defined(__MAKECINT_)
#include "AliFemtoManager.h"
#include "AliFemtoEventReaderESDChain.h"
#include "AliFemtoEventReaderESDChainKine.h"
#include "AliFemtoEventReaderAODChain.h"
#include "AliFemtoSimpleAnalysis.h"
#include "AliFemtoSphericityEventCut.h"
//#include "AliFemtoBasicEventCut.h"
#include "AliFemtoESDTrackCut.h"
//#include "AliFemtoKKTrackCut.h"
#include "AliFemtoKpm45TrackCut.h"
#include "AliFemtoCorrFctn.h"
#include "AliFemtoCutMonitorParticleYPt.h"
#include "AliFemtoCutMonitorParticleVertPos.h"
#include "AliFemtoCutMonitorParticleMomRes.h"
#include "AliFemtoCutMonitorParticlePID.h"
#include "AliFemtoCutMonitorEventMult.h"
#include "AliFemtoCutMonitorEventVertex.h"
#include "AliFemtoCutMonitorEventSphericity.h"
#include "AliFemtoShareQualityTPCEntranceSepPairCut.h"
#include "AliFemtoPairCutAntiGamma.h"
#include "AliFemtoPairCutRadialDistance.h"
#include "AliFemtoPairCutRadialDistanceKK.h"
#include "AliFemtoPairCutRadialDistanceKKdist.h"
#include "AliFemtoQinvCorrFctn.h"
#include "AliFemtoShareQualityCorrFctn.h"
#include "AliFemtoTPCInnerCorrFctn.h"
#include "AliFemtoVertexMultAnalysis.h"
#include "AliFemtoCorrFctn3DSpherical.h"
#include "AliFemtoChi2CorrFctn.h"
#include "AliFemtoCorrFctnTPCNcls.h"
#include "AliFemtoBPLCMS3DCorrFctn.h"
#include "AliFemtoBPLCMS3DCorrFctnKK.h"
#include "AliFemtoCorrFctn3DLCMSSym.h"
#include "AliFemtoModelBPLCMSCorrFctn.h"
#include "AliFemtoModelCorrFctn3DSpherical.h"
#include "AliFemtoModelGausLCMSFreezeOutGenerator.h"
#include "AliFemtoModelGausRinvFreezeOutGenerator.h"
#include "AliFemtoModelManager.h"
#include "AliFemtoModelWeightGeneratorBasic.h"
#include "AliFemtoModelWeightGeneratorLednicky.h"
#include "AliFemtoCorrFctnDirectYlm.h"
#include "AliFemtoModelCorrFctnDirectYlm.h"
#include "AliFemtoModelCorrFctnSource.h"
#include "AliFemtoKTPairCut.h"
#endif
//________________________________________________________________________
AliFemtoManager* ConfigFemtoAnalysis() {
double PionMass = 0.13956995;
double KaonMass = 0.493677;
///// const int cMu=4;
//////// const int cKt=3;
const int cMu=3;
const int cKt=2;
//-------Single track cuts------------------------------------------------->
double DCAxy=0.3;//2.4;// cm // our standard is 0.20 cm; super narrow was 0.015cm
double DCAz =0.3;//3.0;// cm // our standard is 0.15 cm;
//-------Single track cuts-------------------------------------------------<
//=======Double track cuts=================================================>
//Dhevan's : PhiStarDifferenceMinimum=0.06; EtaDifferenceMinimum=0.02;
//standart
//double PhiStarDifferenceMinimum=0.017; //[radian]
// double EtaDifferenceMinimum=0.015; //[radian]
//for test
//double PhiStarDifferenceMinimum=0.03; //[radian]
// double EtaDifferenceMinimum=0.02; //[radian]
// double PhiStarDifferenceMinimum=0.04; //[radian]
// double EtaDifferenceMinimum=0.02; //[radian]
//=======Double track cuts=================================================<
// Switches for QA analyses
int runmults[4] = {1, 1, 1, 0};
// int multbins[5] = {0, 900, 300, 500, 900};
//old pp
int multbins[5] = {1, 18, 30, 100, 1000};
//.................................................
int runch[2] = {1, 1};
const char *chrgs[2] = { "Kp", "Km"};
int runktdep = 1;
//YS double ktrng[cKt+1] = {0.2, 0.36, 0.48, 0.6, 1.0, 1.5};
// double ktrng[cKt+1] = {0.2, 0.4, 0.6, 1.5};
//double ktrng[5] = {0.2, 0.4, 0.6, 0.8, 1.3};
// double ktrng[3] = {0.1, 0.4, 1.5};
//double ktrng[10] = {0.1, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.5};
//double ktrng[6] = {0.1, 0.3, 0.5, 0.7, 0.9, 1.5};
// double ktrng[4] = {0.15, 0.45, 0.8, 1.2};
double ktrng[3] = {0.15, 0.5, 1.2};
// double ktrng[8] = {0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 2.0};
int run3d = 0;
int runshlcms = 0;
int runtype = 2; // Types 0 - global, 1 - ITS only, 2 - TPC Inner
int isrealdata = 1;
// AliFemtoEventReaderESDChainKine* Reader=new AliFemtoEventReaderESDChainKine();
// Reader->SetConstrained(true);
// Reader->SetUseTPCOnly(false);
double shqmax;
double shqmaxSH;
int nbinssh = 100;
int nbinsh3D = 160;
//ml if (runshlcms) shqmax = 0.25;
// else shqmax = 0.9;
if (runshlcms) shqmaxSH = 0.25;
shqmax = 0.9;
// AliFemtoEventReaderESDChain* Reader=new AliFemtoEventReaderESDChain();
// Reader->SetConstrained(true);
// Reader->SetUseTPCOnly(false);
// Reader->SetReadTPCInner(false);
// Reader->SetUseMultiplicity(AliFemtoEventReaderESDChain::kV0Centrality);
// if (runtype == 0)
// Reader->SetReadTrackType(AliFemtoEventReaderESDChain::kGlobal);
// else if (runtype == 1)
// Reader->SetReadTrackType(AliFemtoEventReaderESDChain::kITSOnly);
// else if (runtype == 2)
// Reader->SetReadTrackType(AliFemtoEventReaderESDChain::kTPCOnly);
// if (isrealdata)
// Reader->SetUsePhysicsSelection(kTRUE);
// else
// Reader->SetUsePhysicsSelection(kFALSE);
// Reader->SetUseMultiplicity(AliFemtoEventReaderESDChain::kV0Centrality);
/*
AliFemtoEventReaderAODChain *Reader = new AliFemtoEventReaderAODChain();
Reader->SetFilterBit(7);
Reader->SetCentralityPreSelection(0, 900);
Reader->SetDCAglobalTrack(kTRUE);//option the DCA information from global tracks (ITS+TPC)
*/
/*
AliFemtoEventReaderAODChain *Reader = new AliFemtoEventReaderAODChain();
Reader->SetFilterBit(7);
Reader->SetNoCentrality(kTRUE);
*/
/* Run2 PbPb
*/
AliFemtoEventReaderAODChain *Reader = new AliFemtoEventReaderAODChain();
//AliFemtoEventReaderAOD *Reader = new AliFemtoEventReaderAODMultSelection();
// Reader->SetFilterMask(96);
Reader->SetFilterBit(7);
//// Reader->SetEPVZERO(kTRUE);
// Reader->SetUseMultiplicity(AliFemtoEventReaderAOD::kCentrality);
Reader->SetNoCentrality(kTRUE);
// Reader->SetUseMultiplicity(AliFemtoEventReaderAOD::kTPCOnlyRef);
// Reader->SetUseMultiplicity(AliFemtoEventReaderAOD::kReference);
Reader->SetUseMultiplicity(AliFemtoEventReaderAOD::kGlobalCount);
Reader->SetReadMC(kTRUE);
Reader->SetCentralityFlattening(kFALSE);
Reader->SetReadV0(0);
// rdr->SetPrimaryVertexCorrectionTPCPoints(kTRUE);
Reader->SetDCAglobalTrack(kTRUE);
AliFemtoManager* Manager=new AliFemtoManager();
Manager->SetEventReader(Reader);
AliFemtoVertexMultAnalysis *anetaphitpc[20];
// AliFemtoBasicEventCut *mecetaphitpc[20];
AliFemtoSphericityEventCut *mecetaphitpc[20];
AliFemtoCutMonitorEventMult *cutPassEvMetaphitpc[20];
AliFemtoCutMonitorEventMult *cutFailEvMetaphitpc[20];
AliFemtoCutMonitorEventVertex *cutPassEvVetaphitpc[20];
AliFemtoCutMonitorEventVertex *cutFailEvVetaphitpc[20];
AliFemtoCutMonitorCollections *cutPassColletaphitpc[20];
AliFemtoCutMonitorCollections *cutFailColletaphitpc[20];
AliFemtoCutMonitorEventSphericity *cutPassEvSpher[20];
AliFemtoCutMonitorEventSphericity *cutFailEvSpher[20];
// AliFemtoKKTrackCut *dtc1etaphitpc[20];
// AliFemtoKKTrackCut *dtc2etaphitpc[20];
AliFemtoKpm45TrackCut *dtc1etaphitpc[20];
AliFemtoKpm45TrackCut *dtc2etaphitpc[20];
//AliFemtoESDTrackCut *dtc1etaphitpc[20];
//AliFemtoESDTrackCut *dtc2etaphitpc[20];
AliFemtoCutMonitorParticleYPt *cutPass1YPtetaphitpc[20];
AliFemtoCutMonitorParticleYPt *cutFail1YPtetaphitpc[20];
AliFemtoCutMonitorParticlePID *cutPass1PIDetaphitpc[20];
AliFemtoCutMonitorParticlePID *cutFail1PIDetaphitpc[20];
AliFemtoCutMonitorParticleYPt *cutPass2YPtetaphitpc[20];
AliFemtoCutMonitorParticleYPt *cutFail2YPtetaphitpc[20];
AliFemtoCutMonitorParticlePID *cutPass2PIDetaphitpc[20];
AliFemtoCutMonitorParticlePID *cutFail2PIDetaphitpc[20];
//AliFemtoPairCutAntiGamma *sqpcetaphitpc[20];
// AliFemtoShareQualityTPCEntranceSepPairCut *sqpcetaphitpc[20];
// AliFemtoPairCutRadialDistance *sqpcetaphitpc[20];//AliFemto dphi* cut
//// AliFemtoPairCutRadialDistanceKK *sqpcetaphitpc[20];//Dhevan's dphi* cut
AliFemtoPairCutRadialDistanceKKdist *sqpcetaphitpc[20];//Dhevan's dphi* cut
AliFemtoCorrFctnDirectYlm *cylmetaphitpc[20];
AliFemtoCorrFctnDEtaDPhi *cdedpetaphi[20*10];//20->20*10 due to kT
AliFemtoChi2CorrFctn *cchiqinvetaphitpc[20];
AliFemtoKTPairCut *ktpcuts[20*8];
AliFemtoCorrFctnDirectYlm *cylmkttpc[20*8];
AliFemtoQinvCorrFctn *cqinvkttpc[20*8];
// AliFemtoCorrFctn3DLCMSSym *cq3dlcmskttpc[20*8];
AliFemtoBPLCMS3DCorrFctnKK *cq3dlcmskttpc[20*8];
// AliFemtoBPLCMS3DCorrFctn *cq3dlcmskttpc[20*8];
AliFemtoCorrFctnTPCNcls *cqinvnclstpc[20];
AliFemtoShareQualityCorrFctn *cqinvsqtpc[20*10];
AliFemtoChi2CorrFctn *cqinvchi2tpc[20];
AliFemtoTPCInnerCorrFctn *cqinvinnertpc[20*10];
// *** Third QA task - HBT analysis with all pair cuts off, TPC only ***
// *** Begin Kaon-Kaon (positive) analysis ***
int aniter = 0;
bool verbose=false;
for (int imult=0; imult<cMu/*4*/; imult++) {
if (runmults[imult]) {
for (int ichg=0; ichg<2; ichg++) {
if (runch[ichg]) {
aniter = ichg*cMu+imult; //0, 1(ich=0) ,2,3
anetaphitpc[aniter] = new AliFemtoVertexMultAnalysis(10, -10.0, 10.0, 8, multbins[imult], multbins[imult+1]);
anetaphitpc[aniter]->SetNumEventsToMix(10);
anetaphitpc[aniter]->SetMinSizePartCollection(1);
anetaphitpc[aniter]->SetVerboseMode(verbose);
/*
mecetaphitpc[aniter] = new AliFemtoBasicEventCut();
mecetaphitpc[aniter]->SetEventMult(0.01,100000);
// mecetaphitpc[aniter]->SetVertZPos(-10.0,10.0);
mecetaphitpc[aniter]->SetVertZPos(-10.0,10.0);
*/
mecetaphitpc[aniter] = new AliFemtoSphericityEventCut();
mecetaphitpc[aniter]->SetEventMult(0.01,100000);
mecetaphitpc[aniter]->SetVertZPos(-10.0,10.0);
mecetaphitpc[aniter]->SetStMin(0.0);
mecetaphitpc[aniter]->SetStMax(0.3);
/* //was in aliroot 5.03.76
if (isrealdata)
mecetaphitpc[aniter]->SetAcceptOnlyPhysics(kTRUE);
*/
// mecetaphitpc->SetAcceptBadVertex(kTRUE);
cutPassEvMetaphitpc[aniter] = new AliFemtoCutMonitorEventMult(Form("cutPass%stpcM%i", chrgs[ichg], imult));
cutFailEvMetaphitpc[aniter] = new AliFemtoCutMonitorEventMult(Form("cutFail%stpcM%i", chrgs[ichg], imult));
mecetaphitpc[aniter]->AddCutMonitor(cutPassEvMetaphitpc[aniter], cutFailEvMetaphitpc[aniter]);
// cutPassEvVetaphitpc[aniter] = new AliFemtoCutMonitorEventVertex(Form("cutPass%stpcM%i", chrgs[ichg], imult));
// cutFailEvVetaphitpc[aniter] = new AliFemtoCutMonitorEventVertex(Form("cutFail%stpcM%i", chrgs[ichg], imult));
// mecetaphitpc[aniter]->AddCutMonitor(cutPassEvVetaphitpc[aniter], cutFailEvVetaphitpc[aniter]);
cutPassEvSpher[aniter] = new AliFemtoCutMonitorEventSphericity(Form("cutPass%stpcM%i", chrgs[ichg], imult));
cutFailEvSpher[aniter] = new AliFemtoCutMonitorEventSphericity(Form("cutFail%stpcM%i", chrgs[ichg], imult));
mecetaphitpc[aniter]->AddCutMonitor(cutPassEvSpher[aniter], cutFailEvSpher[aniter]);
//Study the collection multiplicity distribution
// cutPassColletaphitpc[aniter] = new AliFemtoCutMonitorCollections(Form("cutPass%stpcM%i", chrgs[ichg], imult));
// cutFailColletaphitpc[aniter] = new AliFemtoCutMonitorCollections(Form("cutFail%stpcM%i", chrgs[ichg], imult));
// mecetaphitpc[aniter]->AddCutMonitor(cutPassColletaphitpc[aniter], cutFailColletaphitpc[aniter]);
dtc1etaphitpc[aniter] = new AliFemtoKpm45TrackCut();
// dtc1etaphitpc[aniter] = new AliFemtoESDTrackCut();
// dtc1etaphitpc[aniter]->SetPidProbPion(0.0,1.001);
// dtc1etaphitpc[aniter]->SetPidProbMuon(0.0,1.0);
// dtc1etaphitpc[aniter]->SetPidProbKaon(0.0,1.0);
// dtc1etaphitpc[aniter]->SetPidProbProton(0.0,1.0);
if (ichg == 0)
dtc1etaphitpc[aniter]->SetCharge(1.0);
else if (ichg == 1)
dtc1etaphitpc[aniter]->SetCharge(-1.0);
dtc1etaphitpc[aniter]->SetPt(0.14,1.5);
// dtc1etaphitpc[aniter]->SetEta(-1.2,1.2);
dtc1etaphitpc[aniter]->SetEta(-0.8,0.8); //0.5
// // dtc1etaphitpc[aniter]->SetEta(-0.5,0.5);
/// dtc1etaphitpc[aniter]->SetMass(PionMass);
dtc1etaphitpc[aniter]->SetMass(KaonMass);
//// dtc1etaphitpc[aniter]->SetminTPCncls(80);
/////// ----!!!!!!
dtc1etaphitpc[aniter]->SetMostProbableKaon(); //!!!!!!
//------------------- November 2013 -----------------------------------<
//New class in AliFemo: PWGCF/FEMTOSCOPY/AliFemtoUser/AliFemtoKKTrackCut.cxx
dtc1etaphitpc[aniter]->SetNsigmaTPCle250(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTPC250_400(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTPC400_450(1.0);
dtc1etaphitpc[aniter]->SetNsigmaTPC450_500(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTPCge500(3.0);
// new cuts are stronger, better separation of pion in TOF
// when momentum is greater then 800 MeV/c
dtc1etaphitpc[aniter]->SetNsigmaTOF500_800(2.0);
dtc1etaphitpc[aniter]->SetNsigmaTOF800_1000(1.5);
dtc1etaphitpc[aniter]->SetNsigmaTOFge1000(1.0);
//------------------- November 2013 ----------------------------------->
//// dtc1etaphitpc[aniter]->SetMostProbablePion();
// // Track quality cuts
if (runtype == 0) {
/*
dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit|AliESDtrack::kITSrefit);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kITSrefit);
dtc1etaphitpc[aniter]->SetminTPCncls(80);
dtc1etaphitpc[aniter]->SetRemoveKinks(kTRUE);
dtc1etaphitpc[aniter]->SetLabel(kFALSE);
// dtc1etaphitpc[aniter]->SetMaxITSChiNdof(6.0);
dtc1etaphitpc[aniter]->SetMaxTPCChiNdof(4.0);
dtc1etaphitpc[aniter]->SetMaxImpactXY(DCAxy);
//Poland: dtc1etaphitpc[aniter]->SetMaxImpactXYPtDep(0.0182, 0.0350, -1.01);
dtc1etaphitpc[aniter]->SetMaxImpactZ(DCAz);
// dtc1etaphitpc[aniter]->SetMaxSigmaToVertex(6.0);
*/
}
else if (runtype == 1) {
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit|AliESDtrack::kITSrefit);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kITSrefit|AliESDtrack::kITSpureSA);
// dtc1etaphitpc[aniter]->SetminTPCncls(70);
dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kITSrefit);
dtc1etaphitpc[aniter]->SetRemoveKinks(kTRUE);
dtc1etaphitpc[aniter]->SetLabel(kFALSE);
// dtc1etaphitpc[aniter]->SetMaxITSChiNdof(6.0);
// dtc1etaphitpc[aniter]->SetMaxTPCChiNdof(6.0);
dtc1etaphitpc[aniter]->SetMaxImpactXY(DCAxy);
dtc1etaphitpc[aniter]->SetMaxImpactZ(DCAz);
// dtc1etaphitpc[aniter]->SetMaxSigmaToVertex(6.0);
}
else if (runtype == 2) {
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit|AliESDtrack::kITSrefit);
dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCin);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kTPCrefit);
// dtc1etaphitpc[aniter]->SetStatus(AliESDtrack::kITSrefit);
dtc1etaphitpc[aniter]->SetminTPCncls(80); //was "0"
dtc1etaphitpc[aniter]->SetRemoveKinks(kTRUE);
dtc1etaphitpc[aniter]->SetLabel(kFALSE);
// dtc1etaphitpc[aniter]->SetMaxITSChiNdof(6.0);
dtc1etaphitpc[aniter]->SetMaxTPCChiNdof(4.0);
dtc1etaphitpc[aniter]->SetMaxImpactXY(DCAxy);
//dtc1etaphitpc[aniter]->SetMaxImpactXYPtDep(0.0182, 0.0350, -1.01);
dtc1etaphitpc[aniter]->SetMaxImpactZ(DCAz); //3.0
// dtc1etaphitpc[aniter]->SetMaxSigmaToVertex(6.0);
}
cutPass1YPtetaphitpc[aniter] = new AliFemtoCutMonitorParticleYPt(Form("cutPass1%stpcM%i", chrgs[ichg], imult), 0.493677);
cutFail1YPtetaphitpc[aniter] = new AliFemtoCutMonitorParticleYPt(Form("cutFail1%stpcM%i", chrgs[ichg], imult), 0.493677);
dtc1etaphitpc[aniter]->AddCutMonitor(cutPass1YPtetaphitpc[aniter], cutFail1YPtetaphitpc[aniter]);
cutPass1PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutPass1%stpcM%i", chrgs[ichg], imult),1);
cutFail1PIDetaphitpc[aniter] = new AliFemtoCutMonitorParticlePID(Form("cutFail1%stpcM%i", chrgs[ichg], imult),1);
dtc1etaphitpc[aniter]->AddCutMonitor(cutPass1PIDetaphitpc[aniter], cutFail1PIDetaphitpc[aniter]);
// sqpcetaphitpc[aniter] = new AliFemtoPairCutAntiGamma();
// sqpcetaphitpc[aniter] = new AliFemtoShareQualityTPCEntranceSepPairCut();
if (ichg < 2) {
sqpcetaphitpc[aniter] = new AliFemtoPairCutRadialDistanceKKdist();//AliFemto dphi* cut
// sqpcetaphitpc[aniter] = new AliFemtoPairCutRadialDistanceKK(); //Dhevan's dphi* cut
if (runtype == 0) {
sqpcetaphitpc[aniter]->SetShareQualityMax(1.0);
sqpcetaphitpc[aniter]->SetShareFractionMax(0.05);
sqpcetaphitpc[aniter]->SetRemoveSameLabel(kFALSE);
sqpcetaphitpc[aniter]->SetAverageSeparation(3.0); //0.8
}
else if (runtype == 1) {
sqpcetaphitpc[aniter]->SetShareQualityMax(1.0);
sqpcetaphitpc[aniter]->SetShareFractionMax(1.05);
sqpcetaphitpc[aniter]->SetRemoveSameLabel(kFALSE);
sqpcetaphitpc[aniter]->SetAverageSeparation(3.0); //0.8
}
else if (runtype == 2) {
sqpcetaphitpc[aniter]->SetDataType(AliFemtoPairCut::kAOD);
sqpcetaphitpc[aniter]->SetShareQualityMax(1.0);
sqpcetaphitpc[aniter]->SetShareFractionMax(0.05);
sqpcetaphitpc[aniter]->SetRemoveSameLabel(kFALSE);
sqpcetaphitpc[aniter]->SetAverageSeparation(8.0); //0.8
}
}
anetaphitpc[aniter]->SetEventCut(mecetaphitpc[aniter]);
anetaphitpc[aniter]->SetFirstParticleCut(dtc1etaphitpc[aniter]);
anetaphitpc[aniter]->SetSecondParticleCut(dtc1etaphitpc[aniter]);
anetaphitpc[aniter]->SetPairCut(sqpcetaphitpc[aniter]);
// cylmetaphitpc[aniter] = new AliFemtoCorrFctnDirectYlm(Form("cylm%stpcM%i", chrgs[ichg], imult),3,nbinssh,0.0,shqmaxSH,runshlcms);
/// anetaphitpc[aniter]->AddCorrFctn(cylmetaphitpc[aniter]);
// cqinvnclstpc[aniter] = new AliFemtoCorrFctnTPCNcls(Form("cqinvncls%stpcM%i", chrgs[ichg], imult),nbinssh,0.0,shqmax);
// anetaphitpc[aniter]->AddCorrFctn(cqinvnclstpc[aniter]);
// cqinvchi2tpc[aniter] = new AliFemtoChi2CorrFctn(Form("cqinvchi2%stpcM%i", chrgs[ichg], imult),nbinssh,0.0,shqmax);
// anetaphitpc[aniter]->AddCorrFctn(cqinvchi2tpc[aniter]);
if (runktdep) {
int ktm;
for (int ikt=0; ikt<cKt/*8*/; ikt++) {
ktm = aniter*cKt/*8*/ + ikt;
ktpcuts[ktm] = new AliFemtoKTPairCut(ktrng[ikt], ktrng[ikt+1]);
// cqinvkttpc[ktm] = new AliFemtoQinvCorrFctn(Form("cqinv%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinssh,0.0,(imult>6)?shqmax*2.5:shqmax);
cqinvkttpc[ktm] = new AliFemtoQinvCorrFctn(Form("cqinv%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinssh,0.0,2.0);
cqinvkttpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
anetaphitpc[aniter]->AddCorrFctn(cqinvkttpc[ktm]);
// cqinvsqtpc[ktm] = new AliFemtoShareQualityCorrFctn(Form("cqinvsq%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinssh,0.0,shqmax);
// cqinvsqtpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
// anetaphitpc[aniter]->AddCorrFctn(cqinvsqtpc[ktm]);
// cqinvinnertpc[ktm] = new AliFemtoTPCInnerCorrFctn(Form("cqinvinner%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinssh,0.0,shqmax);
// cqinvinnertpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
// cqinvinnertpc[ktm]->SetRadius(1.2);
// anetaphitpc[aniter]->AddCorrFctn(cqinvinnertpc[ktm]);
//---- Correlation Function vs Delta_Eta and Delta_Phi (not Phi*)---->>>
// cdedpetaphi[ktm] = new AliFemtoCorrFctnDEtaDPhi(Form("cdedp%stpcM%ikT%i", chrgs[ichg], imult, ikt),100,100);
// anetaphitpc[aniter]->AddCorrFctn(cdedpetaphi[ktm]);
//---- Correlation Function vs Delta_Eta and Delta_Phi (not Phi*)----<<<
if (run3d) {
// cq3dlcmskttpc[ktm] = new AliFemtoCorrFctn3DLCMSSym(Form("cq3d%stpcM%ikT%i", chrgs[ichg], imult, ikt),60,(imult>3)?((imult>6)?((imult>7)?0.6:0.4):0.25):0.15);
// AliFemtoBPLCMS3DCorrFctn *cq3dallpiptpc = new AliFemtoBPLCMS3DCorrFctn("cq3dallpiptpc",100,-1.5,1.5);
//our
cq3dlcmskttpc[ktm] = new AliFemtoBPLCMS3DCorrFctnKK(Form("cq3d%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinsh3D,-0.3,0.3);
// cq3dlcmskttpc[ktm] = new AliFemtoBPLCMS3DCorrFctn(Form("cq3d%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinsh3D,-0.3,0.3);
// cq3dlcmskttpc[ktm] = new AliFemtoCorrFctn3DLCMSSym(Form("cq3d%stpcM%ikT%i", chrgs[ichg], imult, ikt),nbinsh3D,0.5);
cq3dlcmskttpc[ktm]->SetPairSelectionCut(ktpcuts[ktm]);
anetaphitpc[aniter]->AddCorrFctn(cq3dlcmskttpc[ktm]);
}
}
}
// cdedpetaphi[aniter] = new AliFemtoCorrFctnDEtaDPhi(Form("cdedp%stpcM%i", chrgs[ichg], imult),24, 24);
// anetaphitpc[aniter]->AddCorrFctn(cdedpetaphi[aniter]);
Manager->AddAnalysis(anetaphitpc[aniter]);
}
}
}
}
// *** End Kaon-Kaon (positive) analysis
return Manager;
}
| fcolamar/AliPhysics | PWGCF/FEMTOSCOPY/macros/Train/malinina/Oct19/MC/KK_13TeV_jets/ConfigFemtoAnalysis.C | C++ | bsd-3-clause | 21,676 |
<?php
namespace Concrete\Controller\SinglePage\Dashboard\Pages\Types;
use Concrete\Core\Error\Error;
use \Concrete\Core\Page\Controller\DashboardPageController;
use Loader;
use PageType;
use PageTemplate;
use \Concrete\Core\Page\Type\PublishTarget\Type\Type as PageTypePublishTargetType;
class Add extends DashboardPageController {
public function submit() {
$vs = Loader::helper('validation/strings');
$sec = Loader::helper('security');
$name = $sec->sanitizeString($this->post('ptName'));
$handle = $sec->sanitizeString($this->post('ptHandle'));
if (!$this->token->validate('add_page_type')) {
$this->error->add(t($this->token->getErrorMessage()));
}
if (!$vs->notempty($name)) {
$this->error->add(t('You must specify a valid name for your page type.'));
}
if (!$vs->handle($handle)) {
$this->error->add(t('You must specify a valid handle for your page type.'));
} else {
$_pt = PageType::getByHandle($handle);
if (is_object($_pt)) {
$this->error->add(t('You must specify a unique handle for your page type.'));
}
unset($_pt);
}
$defaultTemplate = PageTemplate::getByID($this->post('ptDefaultPageTemplateID'));
if (!is_object($defaultTemplate)) {
$this->error->add(t('You must choose a valid default page template.'));
}
$templates = array();
if (is_array($_POST['ptPageTemplateID'])) {
foreach($this->post('ptPageTemplateID') as $pageTemplateID) {
$pt = PageTemplate::getByID($pageTemplateID);
if (is_object($pt)) {
$templates[] = $pt;
}
}
}
if (count($templates) == 0 && $this->post('ptAllowedPageTemplates') == 'C') {
$this->error->add(t('You must specify at least one page template.'));
}
$target = PageTypePublishTargetType::getByID($this->post('ptPublishTargetTypeID'));
if (!is_object($target)) {
$this->error->add(t('Invalid page type publish target type.'));
} else {
$pe = $target->validatePageTypeRequest($this->request);
if ($pe instanceof Error) {
$this->error->add($pe);
}
}
if (!$this->error->has()) {
$data = array(
'handle' => $handle,
'name' => $name,
'defaultTemplate' => $defaultTemplate,
'ptLaunchInComposer' => $this->post('ptLaunchInComposer'),
'ptIsFrequentlyAdded' => $this->post('ptIsFrequentlyAdded'),
'allowedTemplates' => $this->post('ptAllowedPageTemplates'),
'templates' => $templates
);
$pt = PageType::add($data);
$configuredTarget = $target->configurePageTypePublishTarget($pt, $this->post());
$pt->setConfiguredPageTypePublishTargetObject($configuredTarget);
$this->redirect('/dashboard/pages/types', 'page_type_added');
}
}
}
| lifejuggler/audrey_site | updates/concrete5.7.5.6/concrete/controllers/single_page/dashboard/pages/types/add.php | PHP | mit | 2,667 |
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
datablock PhysicsShapeData( PhysicsCube )
{
category = "Physics";
shapeName = "art/shapes/cube/cube.dae";
emap = true;
//physics properties
mass = "0.5";
friction = "0.4";
staticFriction = "0.5";
restitution = "0.3";
linearDamping = "0.1";
angularDamping = "0.2";
linearSleepThreshold = "1.0";
angularSleepThreshold = "1.0";
buoyancyDensity = "0.9";
waterDampingScale = "10";
//damage - dynamic fields
radiusDamage = 0;
damageRadius = 0;
areaImpulse = 0;
invulnerable = true;
};
datablock PhysicsShapeData( PhysicsBoulder )
{
category = "Physics";
shapeName = "art/shapes/rocks/boulder.dts";
emap = true;
//physics properties
mass = "20";
friction = "0.2";
staticFriction = "0.3";
restitution = "0.8";
linearDamping = "0.1";
angularDamping = "0.2";
linearSleepThreshold = "1.0";
angularSleepThreshold = "1.0";
buoyancyDensity = "0.9";
waterDampingScale = "10";
//damage - dynamic fields
radiusDamage = 0;
damageRadius = 0;
areaImpulse = 0;
invulnerable = false;
};
| elfprince13/Torque3D | Templates/Modules/FPSGameplay/scripts/datablocks/physics.cs | C# | mit | 2,409 |
/*
* The MIT License
* Copyright (c) 2012 Microsoft Corporation
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package microsoft.exchange.webservices.data.property.complex;
import microsoft.exchange.webservices.data.core.EwsServiceXmlReader;
import microsoft.exchange.webservices.data.core.EwsServiceXmlWriter;
import microsoft.exchange.webservices.data.core.XmlElementNames;
import microsoft.exchange.webservices.data.core.enumeration.property.StandardUser;
import microsoft.exchange.webservices.data.core.enumeration.misc.XmlNamespace;
import microsoft.exchange.webservices.data.core.exception.service.local.ServiceValidationException;
/**
* Represents a delegate user.
*/
public final class DelegateUser extends ComplexProperty {
/**
* The user id.
*/
private UserId userId = new UserId();
/**
* The permissions.
*/
private DelegatePermissions permissions = new DelegatePermissions();
/**
* The receive copies of meeting messages.
*/
private boolean receiveCopiesOfMeetingMessages;
/**
* The view private item.
*/
private boolean viewPrivateItems;
/**
* Initializes a new instance of the <see cref="DelegateUser"/> class.
*/
public DelegateUser() {
super();
this.receiveCopiesOfMeetingMessages = false;
this.viewPrivateItems = false;
}
/**
* Initializes a new instance of the <see cref="DelegateUser"/> class.
*
* @param primarySmtpAddress the primary smtp address
*/
public DelegateUser(String primarySmtpAddress) {
this();
this.userId.setPrimarySmtpAddress(primarySmtpAddress);
}
/**
* Initializes a new instance of the <see cref="DelegateUser"/> class.
*
* @param standardUser the standard user
*/
public DelegateUser(StandardUser standardUser) {
this();
this.userId.setStandardUser(standardUser);
}
/**
* Gets the user Id of the delegate user.
*
* @return the user id
*/
public UserId getUserId() {
return this.userId;
}
/**
* Gets the list of delegate user's permissions.
*
* @return the permissions
*/
public DelegatePermissions getPermissions() {
return this.permissions;
}
/**
* Gets a value indicating if the delegate user should receive
* copies of meeting request.
*
* @return the receive copies of meeting messages
*/
public boolean getReceiveCopiesOfMeetingMessages() {
return this.receiveCopiesOfMeetingMessages;
}
/**
* Sets the receive copies of meeting messages.
*
* @param value the new receive copies of meeting messages
*/
public void setReceiveCopiesOfMeetingMessages(boolean value) {
this.receiveCopiesOfMeetingMessages = value;
}
/**
* Gets a value indicating if the delegate user should be
* able to view the principal's private item.
*
* @return the view private item
*/
public boolean getViewPrivateItems() {
return this.viewPrivateItems;
}
/**
* Gets a value indicating if the delegate user should be able to
* view the principal's private item.
*
* @param value the new view private item
*/
public void setViewPrivateItems(boolean value) {
this.viewPrivateItems = value;
}
/**
* Tries to read element from XML.
*
* @param reader the reader
* @return true, if successful
* @throws Exception the exception
*/
public boolean tryReadElementFromXml(EwsServiceXmlReader reader)
throws Exception {
if (reader.getLocalName().equals(XmlElementNames.UserId)) {
this.userId = new UserId();
this.userId.loadFromXml(reader, reader.getLocalName());
return true;
} else if (reader.getLocalName().equals(XmlElementNames.UserId)) {
this.permissions.reset();
this.permissions.loadFromXml(reader, reader.getLocalName());
return true;
} else if (reader.getLocalName().equals(
XmlElementNames.ReceiveCopiesOfMeetingMessages)) {
this.receiveCopiesOfMeetingMessages = reader
.readElementValue(Boolean.class);
return true;
} else if (reader.getLocalName().equals(
XmlElementNames.ViewPrivateItems)) {
this.viewPrivateItems = reader.readElementValue(Boolean.class);
return true;
} else {
return false;
}
}
/**
* Writes elements to XML.
*
* @param writer the writer
* @throws Exception the exception
*/
public void writeElementsToXml(EwsServiceXmlWriter writer)
throws Exception {
this.getUserId().writeToXml(writer, XmlElementNames.UserId);
this.getPermissions().writeToXml(writer,
XmlElementNames.DelegatePermissions);
writer.writeElementValue(XmlNamespace.Types,
XmlElementNames.ReceiveCopiesOfMeetingMessages,
this.receiveCopiesOfMeetingMessages);
writer.writeElementValue(XmlNamespace.Types,
XmlElementNames.ViewPrivateItems, this.viewPrivateItems);
}
/**
* Validates this instance.
*
* @throws ServiceValidationException the service validation exception
*/
protected void internalValidate() throws ServiceValidationException {
if (this.getUserId() == null) {
throw new ServiceValidationException("The UserId in the DelegateUser hasn't been specified.");
} else if (!this.getUserId().isValid()) {
throw new ServiceValidationException(
"The UserId in the DelegateUser is invalid. The StandardUser, PrimarySmtpAddress or SID property must be set.");
}
}
/**
* Validates this instance for AddDelegate.
*
* @throws Exception
* @throws ServiceValidationException
*/
protected void validateAddDelegate() throws ServiceValidationException,
Exception {
{
this.permissions.validateAddDelegate();
}
}
/**
* Validates this instance for UpdateDelegate.
*/
public void validateUpdateDelegate() throws Exception {
{
this.permissions.validateUpdateDelegate();
}
}
}
| KatharineYe/ews-java-api | src/main/java/microsoft/exchange/webservices/data/property/complex/DelegateUser.java | Java | mit | 6,929 |
import Vue from 'vue'
describe('Options render', () => {
it('basic usage', () => {
const vm = new Vue({
render (h) {
const children = []
for (let i = 0; i < this.items.length; i++) {
children.push(h('li', { staticClass: 'task' }, [this.items[i].name]))
}
return h('ul', { staticClass: 'tasks' }, children)
},
data: {
items: [{ id: 1, name: 'task1' }, { id: 2, name: 'task2' }]
}
}).$mount()
expect(vm.$el.tagName).toBe('UL')
for (let i = 0; i < vm.$el.children.length; i++) {
const li = vm.$el.children[i]
expect(li.tagName).toBe('LI')
expect(li.textContent).toBe(vm.items[i].name)
}
})
it('allow null data', () => {
const vm = new Vue({
render (h) {
return h('div', null, 'hello' /* string as children*/)
}
}).$mount()
expect(vm.$el.tagName).toBe('DIV')
expect(vm.$el.textContent).toBe('hello')
})
it('should warn non `render` option and non `template` option', () => {
new Vue().$mount()
expect('Failed to mount component: template or render function not defined.').toHaveBeenWarned()
})
})
| new4/Vue4Fun | vue-2.5.16/test/unit/features/options/render.spec.js | JavaScript | mit | 1,164 |
// Copyright 2013 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/sync_file_system/drive_backend_v1/drive_metadata_store.h"
#include <utility>
#include <vector>
#include "base/bind.h"
#include "base/callback.h"
#include "base/files/file_path.h"
#include "base/location.h"
#include "base/message_loop/message_loop_proxy.h"
#include "base/sequenced_task_runner.h"
#include "base/stl_util.h"
#include "base/strings/string_number_conversions.h"
#include "base/strings/string_util.h"
#include "base/strings/stringprintf.h"
#include "base/task_runner_util.h"
#include "base/values.h"
#include "chrome/browser/sync_file_system/drive_backend/metadata_db_migration_util.h"
#include "chrome/browser/sync_file_system/drive_backend_v1/drive_file_sync_service.h"
#include "chrome/browser/sync_file_system/drive_backend_v1/drive_file_sync_util.h"
#include "chrome/browser/sync_file_system/logger.h"
#include "chrome/browser/sync_file_system/sync_file_system.pb.h"
#include "chrome/browser/sync_file_system/syncable_file_system_util.h"
#include "third_party/leveldatabase/src/include/leveldb/db.h"
#include "third_party/leveldatabase/src/include/leveldb/write_batch.h"
#include "url/gurl.h"
#include "webkit/browser/fileapi/file_system_url.h"
#include "webkit/common/fileapi/file_system_util.h"
using fileapi::FileSystemURL;
namespace sync_file_system {
typedef DriveMetadataStore::MetadataMap MetadataMap;
typedef DriveMetadataStore::OriginByResourceId OriginByResourceId;
typedef DriveMetadataStore::PathToMetadata PathToMetadata;
typedef DriveMetadataStore::ResourceIdByOrigin ResourceIdByOrigin;
const base::FilePath::CharType DriveMetadataStore::kDatabaseName[] =
FILE_PATH_LITERAL("DriveMetadata");
struct DBContents {
SyncStatusCode status;
scoped_ptr<leveldb::DB> db;
bool created;
int64 largest_changestamp;
DriveMetadataStore::MetadataMap metadata_map;
std::string sync_root_directory_resource_id;
ResourceIdByOrigin incremental_sync_origins;
ResourceIdByOrigin disabled_origins;
DBContents()
: status(SYNC_STATUS_UNKNOWN),
created(false),
largest_changestamp(0) {
}
};
namespace {
const char kDatabaseVersionKey[] = "VERSION";
const int64 kCurrentDatabaseVersion = 2;
const char kChangeStampKey[] = "CHANGE_STAMP";
const char kSyncRootDirectoryKey[] = "SYNC_ROOT_DIR";
const char kDriveMetadataKeyPrefix[] = "METADATA: ";
const char kMetadataKeySeparator = ' ';
const char kDriveIncrementalSyncOriginKeyPrefix[] = "ISYNC_ORIGIN: ";
const char kDriveDisabledOriginKeyPrefix[] = "DISABLED_ORIGIN: ";
enum OriginSyncType {
INCREMENTAL_SYNC_ORIGIN,
DISABLED_ORIGIN
};
std::string RemovePrefix(const std::string& str, const std::string& prefix) {
if (StartsWithASCII(str, prefix, true))
return str.substr(prefix.size());
return str;
}
std::string OriginAndPathToMetadataKey(const GURL& origin,
const base::FilePath& path) {
return kDriveMetadataKeyPrefix + origin.spec() +
kMetadataKeySeparator + path.AsUTF8Unsafe();
}
std::string FileSystemURLToMetadataKey(const FileSystemURL& url) {
return OriginAndPathToMetadataKey(url.origin(), url.path());
}
void MetadataKeyToOriginAndPath(const std::string& metadata_key,
GURL* origin,
base::FilePath* path) {
std::string key_body(RemovePrefix(metadata_key, kDriveMetadataKeyPrefix));
size_t separator_position = key_body.find(kMetadataKeySeparator);
*origin = GURL(key_body.substr(0, separator_position));
*path = base::FilePath::FromUTF8Unsafe(
key_body.substr(separator_position + 1));
}
bool UpdateResourceIdMap(ResourceIdByOrigin* map,
OriginByResourceId* reverse_map,
const GURL& origin,
const std::string& resource_id) {
ResourceIdByOrigin::iterator found = map->find(origin);
if (found == map->end())
return false;
reverse_map->erase(found->second);
reverse_map->insert(std::make_pair(resource_id, origin));
found->second = resource_id;
return true;
}
////////////////////////////////////////////////////////////////////////////////
bool IsDBEmpty(leveldb::DB* db) {
DCHECK(db);
scoped_ptr<leveldb::Iterator> itr(db->NewIterator(leveldb::ReadOptions()));
itr->SeekToFirst();
return !itr->Valid();
}
scoped_ptr<leveldb::DB> OpenDatabase(const base::FilePath& path,
SyncStatusCode* status,
bool* created) {
DCHECK(status);
DCHECK(created);
leveldb::Options options;
options.max_open_files = 0; // Use minimum.
options.create_if_missing = true;
leveldb::DB* db = NULL;
leveldb::Status db_status = leveldb::DB::Open(
options, path.AsUTF8Unsafe(), &db);
if (db_status.ok()) {
*created = IsDBEmpty(db);
} else {
delete db;
db = NULL;
}
*status = LevelDBStatusToSyncStatusCode(db_status);
return make_scoped_ptr(db);
}
SyncStatusCode WriteInitialData(leveldb::DB* db) {
DCHECK(db);
return LevelDBStatusToSyncStatusCode(db->Put(
leveldb::WriteOptions(),
kDatabaseVersionKey,
base::Int64ToString(kCurrentDatabaseVersion)));
}
SyncStatusCode MigrateDatabaseIfNeeded(leveldb::DB* db) {
DCHECK(db);
std::string value;
leveldb::Status status = db->Get(leveldb::ReadOptions(),
kDatabaseVersionKey, &value);
int64 version = 0;
if (status.ok()) {
if (!base::StringToInt64(value, &version))
return SYNC_DATABASE_ERROR_FAILED;
} else {
if (!status.IsNotFound())
return SYNC_DATABASE_ERROR_FAILED;
}
switch (version) {
case 0:
drive_backend::MigrateDatabaseFromV0ToV1(db);
// fall-through
case 1:
drive_backend::MigrateDatabaseFromV1ToV2(db);
// fall-through
case 2:
DCHECK_EQ(2, kCurrentDatabaseVersion);
return SYNC_STATUS_OK;
default:
return SYNC_DATABASE_ERROR_FAILED;
}
}
SyncStatusCode ReadContents(DBContents* contents) {
DCHECK(contents);
DCHECK(contents->db);
contents->largest_changestamp = 0;
contents->metadata_map.clear();
contents->incremental_sync_origins.clear();
scoped_ptr<leveldb::Iterator> itr(
contents->db->NewIterator(leveldb::ReadOptions()));
for (itr->SeekToFirst(); itr->Valid(); itr->Next()) {
std::string key = itr->key().ToString();
if (key == kChangeStampKey) {
bool success = base::StringToInt64(itr->value().ToString(),
&contents->largest_changestamp);
DCHECK(success);
continue;
}
if (key == kSyncRootDirectoryKey) {
std::string resource_id = itr->value().ToString();
if (IsDriveAPIDisabled())
resource_id = drive_backend::AddWapiFolderPrefix(resource_id);
contents->sync_root_directory_resource_id = resource_id;
continue;
}
if (StartsWithASCII(key, kDriveMetadataKeyPrefix, true)) {
GURL origin;
base::FilePath path;
MetadataKeyToOriginAndPath(key, &origin, &path);
DriveMetadata metadata;
bool success = metadata.ParseFromString(itr->value().ToString());
DCHECK(success);
if (IsDriveAPIDisabled()) {
metadata.set_resource_id(drive_backend::AddWapiIdPrefix(
metadata.resource_id(), metadata.type()));
}
success = contents->metadata_map[origin].insert(
std::make_pair(path, metadata)).second;
DCHECK(success);
continue;
}
if (StartsWithASCII(key, kDriveIncrementalSyncOriginKeyPrefix, true)) {
GURL origin(RemovePrefix(key, kDriveIncrementalSyncOriginKeyPrefix));
DCHECK(origin.is_valid());
std::string origin_resource_id = IsDriveAPIDisabled()
? drive_backend::AddWapiFolderPrefix(itr->value().ToString())
: itr->value().ToString();
DCHECK(!ContainsKey(contents->incremental_sync_origins, origin));
contents->incremental_sync_origins[origin] = origin_resource_id;
continue;
}
if (StartsWithASCII(key, kDriveDisabledOriginKeyPrefix, true)) {
GURL origin(RemovePrefix(key, kDriveDisabledOriginKeyPrefix));
DCHECK(origin.is_valid());
std::string origin_resource_id = IsDriveAPIDisabled()
? drive_backend::AddWapiFolderPrefix(itr->value().ToString())
: itr->value().ToString();
DCHECK(!ContainsKey(contents->disabled_origins, origin));
contents->disabled_origins[origin] = origin_resource_id;
continue;
}
}
return SYNC_STATUS_OK;
}
scoped_ptr<DBContents> LoadDBContents(const base::FilePath& db_path) {
scoped_ptr<DBContents> contents(new DBContents);
contents->db = OpenDatabase(db_path,
&contents->status,
&contents->created);
if (contents->status != SYNC_STATUS_OK)
return contents.Pass();
if (contents->created) {
contents->status = WriteInitialData(contents->db.get());
if (contents->status != SYNC_STATUS_OK)
return contents.Pass();
} else {
contents->status = MigrateDatabaseIfNeeded(contents->db.get());
if (contents->status != SYNC_STATUS_OK)
return contents.Pass();
}
contents->status = ReadContents(contents.get());
return contents.Pass();
}
////////////////////////////////////////////////////////////////////////////////
// Returns a key string for the given origin.
// For example, when |origin| is "http://www.example.com" and |sync_type| is
// BATCH_SYNC_ORIGIN, returns "BSYNC_ORIGIN: http://www.example.com".
std::string CreateKeyForOriginRoot(const GURL& origin,
OriginSyncType sync_type) {
DCHECK(origin.is_valid());
switch (sync_type) {
case INCREMENTAL_SYNC_ORIGIN:
return kDriveIncrementalSyncOriginKeyPrefix + origin.spec();
case DISABLED_ORIGIN:
return kDriveDisabledOriginKeyPrefix + origin.spec();
}
NOTREACHED();
return std::string();
}
void AddOriginsToVector(std::vector<GURL>* all_origins,
const ResourceIdByOrigin& resource_map) {
for (ResourceIdByOrigin::const_iterator itr = resource_map.begin();
itr != resource_map.end();
++itr) {
all_origins->push_back(itr->first);
}
}
void InsertReverseMap(const ResourceIdByOrigin& forward_map,
OriginByResourceId* backward_map) {
for (ResourceIdByOrigin::const_iterator itr = forward_map.begin();
itr != forward_map.end(); ++itr)
backward_map->insert(std::make_pair(itr->second, itr->first));
}
bool EraseIfExists(ResourceIdByOrigin* map,
const GURL& origin,
std::string* resource_id) {
ResourceIdByOrigin::iterator found = map->find(origin);
if (found == map->end())
return false;
*resource_id = found->second;
map->erase(found);
return true;
}
void AppendMetadataDeletionToBatch(const MetadataMap& metadata_map,
const GURL& origin,
leveldb::WriteBatch* batch) {
MetadataMap::const_iterator found = metadata_map.find(origin);
if (found == metadata_map.end())
return;
for (PathToMetadata::const_iterator itr = found->second.begin();
itr != found->second.end(); ++itr)
batch->Delete(OriginAndPathToMetadataKey(origin, itr->first));
}
std::string DriveTypeToString(DriveMetadata_ResourceType drive_type) {
switch (drive_type) {
case DriveMetadata_ResourceType_RESOURCE_TYPE_FILE:
return "file";
case DriveMetadata_ResourceType_RESOURCE_TYPE_FOLDER:
return "folder";
}
NOTREACHED();
return "unknown";
}
} // namespace
DriveMetadataStore::DriveMetadataStore(
const base::FilePath& base_dir,
base::SequencedTaskRunner* file_task_runner)
: file_task_runner_(file_task_runner),
base_dir_(base_dir),
db_status_(SYNC_STATUS_UNKNOWN),
largest_changestamp_(0) {
DCHECK(file_task_runner);
}
DriveMetadataStore::~DriveMetadataStore() {
DCHECK(CalledOnValidThread());
file_task_runner_->DeleteSoon(FROM_HERE, db_.release());
}
void DriveMetadataStore::Initialize(const InitializationCallback& callback) {
DCHECK(CalledOnValidThread());
base::PostTaskAndReplyWithResult(
file_task_runner_.get(), FROM_HERE,
base::Bind(&LoadDBContents, base_dir_.Append(kDatabaseName)),
base::Bind(&DriveMetadataStore::DidInitialize, AsWeakPtr(), callback));
}
void DriveMetadataStore::DidInitialize(const InitializationCallback& callback,
scoped_ptr<DBContents> contents) {
DCHECK(CalledOnValidThread());
DCHECK(contents);
db_status_ = contents->status;
if (db_status_ != SYNC_STATUS_OK) {
callback.Run(db_status_, false);
file_task_runner_->DeleteSoon(FROM_HERE, contents.release());
return;
}
db_ = contents->db.Pass();
largest_changestamp_ = contents->largest_changestamp;
metadata_map_.swap(contents->metadata_map);
sync_root_directory_resource_id_ = contents->sync_root_directory_resource_id;
incremental_sync_origins_.swap(contents->incremental_sync_origins);
disabled_origins_.swap(contents->disabled_origins);
origin_by_resource_id_.clear();
InsertReverseMap(incremental_sync_origins_, &origin_by_resource_id_);
InsertReverseMap(disabled_origins_, &origin_by_resource_id_);
callback.Run(db_status_, contents->created);
}
void DriveMetadataStore::SetLargestChangeStamp(
int64 largest_changestamp,
const SyncStatusCallback& callback) {
DCHECK(CalledOnValidThread());
DCHECK_EQ(SYNC_STATUS_OK, db_status_);
largest_changestamp_ = largest_changestamp;
scoped_ptr<leveldb::WriteBatch> batch(new leveldb::WriteBatch);
batch->Put(kChangeStampKey, base::Int64ToString(largest_changestamp));
return WriteToDB(batch.Pass(), callback);
}
int64 DriveMetadataStore::GetLargestChangeStamp() const {
DCHECK(CalledOnValidThread());
DCHECK_EQ(SYNC_STATUS_OK, db_status_);
return largest_changestamp_;
}
void DriveMetadataStore::UpdateEntry(
const FileSystemURL& url,
const DriveMetadata& metadata,
const SyncStatusCallback& callback) {
DCHECK(CalledOnValidThread());
DCHECK_EQ(SYNC_STATUS_OK, db_status_);
DCHECK(!metadata.conflicted() || !metadata.to_be_fetched());
std::pair<PathToMetadata::iterator, bool> result =
metadata_map_[url.origin()].insert(std::make_pair(url.path(), metadata));
if (!result.second)
result.first->second = metadata;
std::string value;
if (IsDriveAPIDisabled()) {
DriveMetadata metadata_in_db(metadata);
metadata_in_db.set_resource_id(
drive_backend::RemoveWapiIdPrefix(metadata.resource_id()));
bool success = metadata_in_db.SerializeToString(&value);
DCHECK(success);
} else {
bool success = metadata.SerializeToString(&value);
DCHECK(success);
}
scoped_ptr<leveldb::WriteBatch> batch(new leveldb::WriteBatch);
batch->Put(FileSystemURLToMetadataKey(url), value);
WriteToDB(batch.Pass(), callback);
}
void DriveMetadataStore::DeleteEntry(
const FileSystemURL& url,
const SyncStatusCallback& callback) {
DCHECK(CalledOnValidThread());
MetadataMap::iterator found = metadata_map_.find(url.origin());
if (found == metadata_map_.end()) {
RunSoon(FROM_HERE, base::Bind(callback, SYNC_DATABASE_ERROR_NOT_FOUND));
return;
}
if (found->second.erase(url.path()) == 1) {
scoped_ptr<leveldb::WriteBatch> batch(new leveldb::WriteBatch);
batch->Delete(FileSystemURLToMetadataKey(url));
WriteToDB(batch.Pass(), callback);
return;
}
RunSoon(FROM_HERE, base::Bind(callback, SYNC_DATABASE_ERROR_NOT_FOUND));
}
SyncStatusCode DriveMetadataStore::ReadEntry(const FileSystemURL& url,
DriveMetadata* metadata) const {
DCHECK(CalledOnValidThread());
DCHECK(metadata);
MetadataMap::const_iterator found_origin = metadata_map_.find(url.origin());
if (found_origin == metadata_map_.end())
return SYNC_DATABASE_ERROR_NOT_FOUND;
PathToMetadata::const_iterator found = found_origin->second.find(url.path());
if (found == found_origin->second.end())
return SYNC_DATABASE_ERROR_NOT_FOUND;
*metadata = found->second;
return SYNC_STATUS_OK;
}
void DriveMetadataStore::AddIncrementalSyncOrigin(
const GURL& origin,
const std::string& resource_id) {
DCHECK(CalledOnValidThread());
DCHECK(!IsIncrementalSyncOrigin(origin));
DCHECK(!IsOriginDisabled(origin));
DCHECK_EQ(SYNC_STATUS_OK, db_status_);
incremental_sync_origins_.insert(std::make_pair(origin, resource_id));
origin_by_resource_id_.insert(std::make_pair(resource_id, origin));
scoped_ptr<leveldb::WriteBatch> batch(new leveldb::WriteBatch);
batch->Delete(CreateKeyForOriginRoot(origin, DISABLED_ORIGIN));
batch->Put(CreateKeyForOriginRoot(origin, INCREMENTAL_SYNC_ORIGIN),
drive_backend::RemoveWapiIdPrefix(resource_id));
WriteToDB(batch.Pass(),
base::Bind(&DriveMetadataStore::UpdateDBStatus, AsWeakPtr()));
}
void DriveMetadataStore::SetSyncRootDirectory(const std::string& resource_id) {
DCHECK(CalledOnValidThread());
sync_root_directory_resource_id_ = resource_id;
scoped_ptr<leveldb::WriteBatch> batch(new leveldb::WriteBatch);
batch->Put(kSyncRootDirectoryKey,
drive_backend::RemoveWapiIdPrefix(resource_id));
return WriteToDB(batch.Pass(),
base::Bind(&DriveMetadataStore::UpdateDBStatus,
AsWeakPtr()));
}
void DriveMetadataStore::SetOriginRootDirectory(
const GURL& origin,
const std::string& resource_id) {
DCHECK(CalledOnValidThread());
DCHECK(IsKnownOrigin(origin));
OriginSyncType sync_type;
if (UpdateResourceIdMap(
&incremental_sync_origins_, &origin_by_resource_id_,
origin, resource_id)) {
sync_type = INCREMENTAL_SYNC_ORIGIN;
} else if (UpdateResourceIdMap(&disabled_origins_, &origin_by_resource_id_,
origin, resource_id)) {
sync_type = DISABLED_ORIGIN;
} else {
return;
}
std::string key = CreateKeyForOriginRoot(origin, sync_type);
DCHECK(!key.empty());
scoped_ptr<leveldb::WriteBatch> batch(new leveldb::WriteBatch);
batch->Put(key, drive_backend::RemoveWapiIdPrefix(resource_id));
WriteToDB(batch.Pass(),
base::Bind(&DriveMetadataStore::UpdateDBStatus, AsWeakPtr()));
}
bool DriveMetadataStore::IsKnownOrigin(const GURL& origin) const {
DCHECK(CalledOnValidThread());
return IsIncrementalSyncOrigin(origin) || IsOriginDisabled(origin);
}
bool DriveMetadataStore::IsIncrementalSyncOrigin(const GURL& origin) const {
DCHECK(CalledOnValidThread());
return ContainsKey(incremental_sync_origins_, origin);
}
bool DriveMetadataStore::IsOriginDisabled(const GURL& origin) const {
DCHECK(CalledOnValidThread());
return ContainsKey(disabled_origins_, origin);
}
void DriveMetadataStore::EnableOrigin(
const GURL& origin,
const SyncStatusCallback& callback) {
DCHECK(CalledOnValidThread());
std::map<GURL, std::string>::iterator found = disabled_origins_.find(origin);
if (found == disabled_origins_.end()) {
RunSoon(FROM_HERE, base::Bind(callback, SYNC_DATABASE_ERROR_NOT_FOUND));
// |origin| has not been registered yet.
return;
}
disabled_origins_.erase(found);
// |origin| goes back to DriveFileSyncService::pending_batch_sync_origins_
// only and is not stored in drive_metadata_store.
found = incremental_sync_origins_.find(origin);
if (found != incremental_sync_origins_.end())
incremental_sync_origins_.erase(found);
scoped_ptr<leveldb::WriteBatch> batch(new leveldb::WriteBatch);
batch->Delete(CreateKeyForOriginRoot(origin, INCREMENTAL_SYNC_ORIGIN));
batch->Delete(CreateKeyForOriginRoot(origin, DISABLED_ORIGIN));
WriteToDB(batch.Pass(), callback);
}
void DriveMetadataStore::DisableOrigin(
const GURL& origin,
const SyncStatusCallback& callback) {
DCHECK(CalledOnValidThread());
std::string resource_id;
if (!EraseIfExists(&incremental_sync_origins_, origin, &resource_id)) {
RunSoon(FROM_HERE, base::Bind(callback, SYNC_DATABASE_ERROR_NOT_FOUND));
return;
}
disabled_origins_[origin] = resource_id;
scoped_ptr<leveldb::WriteBatch> batch(new leveldb::WriteBatch);
batch->Delete(CreateKeyForOriginRoot(origin, INCREMENTAL_SYNC_ORIGIN));
batch->Put(CreateKeyForOriginRoot(origin, DISABLED_ORIGIN),
drive_backend::RemoveWapiIdPrefix(resource_id));
AppendMetadataDeletionToBatch(metadata_map_, origin, batch.get());
metadata_map_.erase(origin);
WriteToDB(batch.Pass(), callback);
}
void DriveMetadataStore::RemoveOrigin(
const GURL& origin,
const SyncStatusCallback& callback) {
DCHECK(CalledOnValidThread());
std::string resource_id;
if (!EraseIfExists(&incremental_sync_origins_, origin, &resource_id) &&
!EraseIfExists(&disabled_origins_, origin, &resource_id)) {
RunSoon(FROM_HERE, base::Bind(callback, SYNC_DATABASE_ERROR_NOT_FOUND));
return;
}
origin_by_resource_id_.erase(resource_id);
scoped_ptr<leveldb::WriteBatch> batch(new leveldb::WriteBatch);
batch->Delete(CreateKeyForOriginRoot(origin, INCREMENTAL_SYNC_ORIGIN));
batch->Delete(CreateKeyForOriginRoot(origin, DISABLED_ORIGIN));
AppendMetadataDeletionToBatch(metadata_map_, origin, batch.get());
metadata_map_.erase(origin);
WriteToDB(batch.Pass(), callback);
}
void DriveMetadataStore::WriteToDB(scoped_ptr<leveldb::WriteBatch> batch,
const SyncStatusCallback& callback) {
DCHECK(CalledOnValidThread());
if (db_status_ != SYNC_STATUS_OK &&
db_status_ != SYNC_DATABASE_ERROR_NOT_FOUND) {
RunSoon(FROM_HERE, base::Bind(callback, SYNC_DATABASE_ERROR_FAILED));
return;
}
DCHECK(db_);
base::PostTaskAndReplyWithResult(
file_task_runner_.get(),
FROM_HERE,
base::Bind(&leveldb::DB::Write,
base::Unretained(db_.get()),
leveldb::WriteOptions(),
base::Owned(batch.release())),
base::Bind(&DriveMetadataStore::UpdateDBStatusAndInvokeCallback,
AsWeakPtr(),
callback));
}
void DriveMetadataStore::UpdateDBStatus(SyncStatusCode status) {
DCHECK(CalledOnValidThread());
if (db_status_ != SYNC_STATUS_OK &&
db_status_ != SYNC_DATABASE_ERROR_NOT_FOUND) {
// TODO(tzik): Handle database corruption. http://crbug.com/153709
db_status_ = status;
util::Log(logging::LOG_WARNING,
FROM_HERE,
"DriveMetadataStore turned to wrong state: %s",
SyncStatusCodeToString(status));
return;
}
db_status_ = SYNC_STATUS_OK;
}
void DriveMetadataStore::UpdateDBStatusAndInvokeCallback(
const SyncStatusCallback& callback,
const leveldb::Status& leveldb_status) {
SyncStatusCode status = LevelDBStatusToSyncStatusCode(leveldb_status);
UpdateDBStatus(status);
callback.Run(status);
}
SyncStatusCode DriveMetadataStore::GetConflictURLs(
fileapi::FileSystemURLSet* urls) const {
DCHECK(CalledOnValidThread());
DCHECK_EQ(SYNC_STATUS_OK, db_status_);
urls->clear();
for (MetadataMap::const_iterator origin_itr = metadata_map_.begin();
origin_itr != metadata_map_.end();
++origin_itr) {
for (PathToMetadata::const_iterator itr = origin_itr->second.begin();
itr != origin_itr->second.end();
++itr) {
if (itr->second.conflicted()) {
urls->insert(CreateSyncableFileSystemURL(
origin_itr->first, itr->first));
}
}
}
return SYNC_STATUS_OK;
}
SyncStatusCode DriveMetadataStore::GetToBeFetchedFiles(
URLAndDriveMetadataList* list) const {
DCHECK(CalledOnValidThread());
DCHECK_EQ(SYNC_STATUS_OK, db_status_);
list->clear();
for (MetadataMap::const_iterator origin_itr = metadata_map_.begin();
origin_itr != metadata_map_.end();
++origin_itr) {
for (PathToMetadata::const_iterator itr = origin_itr->second.begin();
itr != origin_itr->second.end();
++itr) {
if (itr->second.to_be_fetched()) {
FileSystemURL url = CreateSyncableFileSystemURL(
origin_itr->first, itr->first);
list->push_back(std::make_pair(url, itr->second));
}
}
}
return SYNC_STATUS_OK;
}
std::string DriveMetadataStore::GetResourceIdForOrigin(
const GURL& origin) const {
DCHECK(CalledOnValidThread());
// If we don't have valid root directory (this could be reset even after
// initialized) just return empty string, as the origin directories
// in the root directory must have become invalid now too.
if (sync_root_directory().empty())
return std::string();
ResourceIdByOrigin::const_iterator found =
incremental_sync_origins_.find(origin);
if (found != incremental_sync_origins_.end())
return found->second;
found = disabled_origins_.find(origin);
if (found != disabled_origins_.end())
return found->second;
return std::string();
}
void DriveMetadataStore::GetAllOrigins(std::vector<GURL>* origins) {
DCHECK(CalledOnValidThread());
DCHECK(origins);
origins->clear();
origins->reserve(incremental_sync_origins_.size() +
disabled_origins_.size());
AddOriginsToVector(origins, incremental_sync_origins_);
AddOriginsToVector(origins, disabled_origins_);
}
bool DriveMetadataStore::GetOriginByOriginRootDirectoryId(
const std::string& resource_id,
GURL* origin) {
DCHECK(CalledOnValidThread());
DCHECK(origin);
OriginByResourceId::iterator found = origin_by_resource_id_.find(resource_id);
if (found == origin_by_resource_id_.end())
return false;
*origin = found->second;
return true;
}
scoped_ptr<base::ListValue> DriveMetadataStore::DumpFiles(const GURL& origin) {
DCHECK(CalledOnValidThread());
scoped_ptr<base::ListValue> files(new base::ListValue);
MetadataMap::const_iterator found = metadata_map_.find(origin);
if (found == metadata_map_.end())
return make_scoped_ptr(new base::ListValue);
for (PathToMetadata::const_iterator itr = found->second.begin();
itr != found->second.end();
++itr) {
// Convert Drive specific metadata to Common File metadata object.
const DriveMetadata& metadata = itr->second;
base::DictionaryValue* file = new DictionaryValue;
file->SetString("path", itr->first.AsUTF8Unsafe());
file->SetString("title", itr->first.BaseName().AsUTF8Unsafe());
file->SetString("type", DriveTypeToString(metadata.type()));
base::DictionaryValue* details = new DictionaryValue;
details->SetString("resource_id", metadata.resource_id());
details->SetString("md5", metadata.md5_checksum());
details->SetString("dirty", metadata.to_be_fetched() ? "true" : "false");
file->Set("details", details);
files->Append(file);
}
return files.Pass();
}
} // namespace sync_file_system
| qtekfun/htcDesire820Kernel | external/chromium_org/chrome/browser/sync_file_system/drive_backend_v1/drive_metadata_store.cc | C++ | gpl-2.0 | 26,981 |
/*
Copyright (c) 2003-2021, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang("easyimage","it",{commands:{fullImage:"Immagine a dimensione intera",sideImage:"Immagine laterale",altText:"Cambia testo alternativo dell'immagine",upload:"Carica immagine"},uploadFailed:"L'immagine non può essere caricata a causa di un errore di rete."}); | stweil/TYPO3.CMS | typo3/sysext/rte_ckeditor/Resources/Public/JavaScript/Contrib/plugins/easyimage/lang/it.js | JavaScript | gpl-2.0 | 450 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "ash/shelf/shelf_view.h"
#include <algorithm>
#include <vector>
#include "ash/ash_switches.h"
#include "ash/launcher/launcher.h"
#include "ash/launcher/launcher_types.h"
#include "ash/root_window_controller.h"
#include "ash/shelf/overflow_bubble.h"
#include "ash/shelf/overflow_bubble_view.h"
#include "ash/shelf/shelf_button.h"
#include "ash/shelf/shelf_icon_observer.h"
#include "ash/shelf/shelf_item_delegate_manager.h"
#include "ash/shelf/shelf_layout_manager.h"
#include "ash/shelf/shelf_model.h"
#include "ash/shelf/shelf_tooltip_manager.h"
#include "ash/shelf/shelf_widget.h"
#include "ash/shell.h"
#include "ash/shell_window_ids.h"
#include "ash/test/ash_test_base.h"
#include "ash/test/launcher_test_api.h"
#include "ash/test/overflow_bubble_view_test_api.h"
#include "ash/test/shelf_view_test_api.h"
#include "ash/test/shell_test_api.h"
#include "ash/test/test_shelf_delegate.h"
#include "ash/test/test_shelf_item_delegate.h"
#include "ash/wm/coordinate_conversion.h"
#include "base/basictypes.h"
#include "base/command_line.h"
#include "base/compiler_specific.h"
#include "base/memory/scoped_ptr.h"
#include "base/strings/string_number_conversions.h"
#include "grit/ash_resources.h"
#include "ui/aura/root_window.h"
#include "ui/aura/test/aura_test_base.h"
#include "ui/aura/test/event_generator.h"
#include "ui/aura/window.h"
#include "ui/base/l10n/l10n_util.h"
#include "ui/compositor/layer.h"
#include "ui/events/event.h"
#include "ui/events/event_constants.h"
#include "ui/views/view_model.h"
#include "ui/views/widget/widget.h"
#include "ui/views/widget/widget_delegate.h"
namespace ash {
namespace test {
////////////////////////////////////////////////////////////////////////////////
// ShelfIconObserver tests.
class TestShelfIconObserver : public ShelfIconObserver {
public:
explicit TestShelfIconObserver(Launcher* launcher)
: launcher_(launcher),
change_notified_(false) {
if (launcher_)
launcher_->AddIconObserver(this);
}
virtual ~TestShelfIconObserver() {
if (launcher_)
launcher_->RemoveIconObserver(this);
}
// ShelfIconObserver implementation.
virtual void OnShelfIconPositionsChanged() OVERRIDE {
change_notified_ = true;
}
int change_notified() const { return change_notified_; }
void Reset() { change_notified_ = false; }
private:
Launcher* launcher_;
bool change_notified_;
DISALLOW_COPY_AND_ASSIGN(TestShelfIconObserver);
};
class ShelfViewIconObserverTest : public AshTestBase {
public:
ShelfViewIconObserverTest() {}
virtual ~ShelfViewIconObserverTest() {}
virtual void SetUp() OVERRIDE {
AshTestBase::SetUp();
Launcher* launcher = Launcher::ForPrimaryDisplay();
observer_.reset(new TestShelfIconObserver(launcher));
shelf_view_test_.reset(new ShelfViewTestAPI(
LauncherTestAPI(launcher).shelf_view()));
shelf_view_test_->SetAnimationDuration(1);
}
virtual void TearDown() OVERRIDE {
observer_.reset();
AshTestBase::TearDown();
}
TestShelfIconObserver* observer() { return observer_.get(); }
ShelfViewTestAPI* shelf_view_test() {
return shelf_view_test_.get();
}
Launcher* LauncherForSecondaryDisplay() {
return Launcher::ForWindow(Shell::GetAllRootWindows()[1]);
}
private:
scoped_ptr<TestShelfIconObserver> observer_;
scoped_ptr<ShelfViewTestAPI> shelf_view_test_;
DISALLOW_COPY_AND_ASSIGN(ShelfViewIconObserverTest);
};
TEST_F(ShelfViewIconObserverTest, AddRemove) {
TestShelfDelegate* shelf_delegate = TestShelfDelegate::instance();
ASSERT_TRUE(shelf_delegate);
views::Widget::InitParams params(views::Widget::InitParams::TYPE_WINDOW);
params.ownership = views::Widget::InitParams::WIDGET_OWNS_NATIVE_WIDGET;
params.bounds = gfx::Rect(0, 0, 200, 200);
params.context = CurrentContext();
scoped_ptr<views::Widget> widget(new views::Widget());
widget->Init(params);
shelf_delegate->AddLauncherItem(widget->GetNativeWindow());
shelf_view_test()->RunMessageLoopUntilAnimationsDone();
EXPECT_TRUE(observer()->change_notified());
observer()->Reset();
widget->Show();
widget->GetNativeWindow()->parent()->RemoveChild(widget->GetNativeWindow());
shelf_view_test()->RunMessageLoopUntilAnimationsDone();
EXPECT_TRUE(observer()->change_notified());
observer()->Reset();
}
// Sometimes fails on trybots on win7_aura. http://crbug.com/177135
#if defined(OS_WIN)
#define MAYBE_AddRemoveWithMultipleDisplays \
DISABLED_AddRemoveWithMultipleDisplays
#else
#define MAYBE_AddRemoveWithMultipleDisplays \
AddRemoveWithMultipleDisplays
#endif
// Make sure creating/deleting an window on one displays notifies a
// launcher on external display as well as one on primary.
TEST_F(ShelfViewIconObserverTest, MAYBE_AddRemoveWithMultipleDisplays) {
UpdateDisplay("400x400,400x400");
TestShelfIconObserver second_observer(LauncherForSecondaryDisplay());
TestShelfDelegate* shelf_delegate = TestShelfDelegate::instance();
ASSERT_TRUE(shelf_delegate);
views::Widget::InitParams params(views::Widget::InitParams::TYPE_WINDOW);
params.ownership = views::Widget::InitParams::WIDGET_OWNS_NATIVE_WIDGET;
params.bounds = gfx::Rect(0, 0, 200, 200);
params.context = CurrentContext();
scoped_ptr<views::Widget> widget(new views::Widget());
widget->Init(params);
shelf_delegate->AddLauncherItem(widget->GetNativeWindow());
shelf_view_test()->RunMessageLoopUntilAnimationsDone();
EXPECT_TRUE(observer()->change_notified());
EXPECT_TRUE(second_observer.change_notified());
observer()->Reset();
second_observer.Reset();
widget->GetNativeWindow()->parent()->RemoveChild(widget->GetNativeWindow());
shelf_view_test()->RunMessageLoopUntilAnimationsDone();
EXPECT_TRUE(observer()->change_notified());
EXPECT_TRUE(second_observer.change_notified());
observer()->Reset();
second_observer.Reset();
}
TEST_F(ShelfViewIconObserverTest, BoundsChanged) {
ShelfWidget* shelf = Shell::GetPrimaryRootWindowController()->shelf();
Launcher* launcher = Launcher::ForPrimaryDisplay();
gfx::Size shelf_size =
shelf->GetWindowBoundsInScreen().size();
shelf_size.set_width(shelf_size.width() / 2);
ASSERT_GT(shelf_size.width(), 0);
launcher->SetShelfViewBounds(gfx::Rect(shelf_size));
// No animation happens for ShelfView bounds change.
EXPECT_TRUE(observer()->change_notified());
observer()->Reset();
}
////////////////////////////////////////////////////////////////////////////////
// ShelfView tests.
// Simple ShelfDelegate implmentation for ShelfViewTest.OverflowBubbleSize
// and CheckDragAndDropFromOverflowBubbleToShelf
class TestShelfDelegateForShelfView : public ShelfDelegate {
public:
explicit TestShelfDelegateForShelfView(ShelfModel* model)
: model_(model) {}
virtual ~TestShelfDelegateForShelfView() {}
// ShelfDelegate overrides:
virtual void OnLauncherCreated(Launcher* launcher) OVERRIDE {}
virtual void OnLauncherDestroyed(Launcher* launcher) OVERRIDE {}
virtual LauncherID GetLauncherIDForAppID(const std::string& app_id) OVERRIDE {
LauncherID id = 0;
EXPECT_TRUE(base::StringToInt(app_id, &id));
return id;
}
virtual const std::string& GetAppIDForLauncherID(LauncherID id) OVERRIDE {
// Use |app_id_| member variable because returning a reference to local
// variable is not allowed.
app_id_ = base::IntToString(id);
return app_id_;
}
virtual void PinAppWithID(const std::string& app_id) OVERRIDE {
}
virtual bool IsAppPinned(const std::string& app_id) OVERRIDE {
// Returns true for ShelfViewTest.OverflowBubbleSize. To test ripping off in
// that test, an item is already pinned state.
return true;
}
virtual bool CanPin() const OVERRIDE {
return true;
}
virtual void UnpinAppWithID(const std::string& app_id) OVERRIDE {
LauncherID id = 0;
EXPECT_TRUE(base::StringToInt(app_id, &id));
ASSERT_GT(id, 0);
int index = model_->ItemIndexByID(id);
ASSERT_GE(index, 0);
model_->RemoveItemAt(index);
}
private:
ShelfModel* model_;
// Temp member variable for returning a value. See the comment in the
// GetAppIDForLauncherID().
std::string app_id_;
DISALLOW_COPY_AND_ASSIGN(TestShelfDelegateForShelfView);
};
class ShelfViewTest : public AshTestBase {
public:
ShelfViewTest() : model_(NULL), shelf_view_(NULL), browser_index_(1) {}
virtual ~ShelfViewTest() {}
virtual void SetUp() OVERRIDE {
AshTestBase::SetUp();
test::ShellTestApi test_api(Shell::GetInstance());
model_ = test_api.shelf_model();
Launcher* launcher = Launcher::ForPrimaryDisplay();
shelf_view_ = test::LauncherTestAPI(launcher).shelf_view();
// The bounds should be big enough for 4 buttons + overflow chevron.
shelf_view_->SetBounds(0, 0, 500,
internal::ShelfLayoutManager::GetPreferredShelfSize());
test_api_.reset(new ShelfViewTestAPI(shelf_view_));
test_api_->SetAnimationDuration(1); // Speeds up animation for test.
item_manager_ = Shell::GetInstance()->shelf_item_delegate_manager();
DCHECK(item_manager_);
// Add browser shortcut launcher item at index 0 for test.
AddBrowserShortcut();
}
virtual void TearDown() OVERRIDE {
test_api_.reset();
AshTestBase::TearDown();
}
protected:
void CreateAndSetShelfItemDelegateForID(LauncherID id) {
scoped_ptr<ShelfItemDelegate> delegate(new TestShelfItemDelegate(NULL));
item_manager_->SetShelfItemDelegate(id, delegate.Pass());
}
LauncherID AddBrowserShortcut() {
LauncherItem browser_shortcut;
browser_shortcut.type = TYPE_BROWSER_SHORTCUT;
LauncherID id = model_->next_id();
model_->AddAt(browser_index_, browser_shortcut);
CreateAndSetShelfItemDelegateForID(id);
test_api_->RunMessageLoopUntilAnimationsDone();
return id;
}
LauncherID AddAppShortcut() {
LauncherItem item;
item.type = TYPE_APP_SHORTCUT;
item.status = STATUS_CLOSED;
LauncherID id = model_->next_id();
model_->Add(item);
CreateAndSetShelfItemDelegateForID(id);
test_api_->RunMessageLoopUntilAnimationsDone();
return id;
}
LauncherID AddPanel() {
LauncherID id = AddPanelNoWait();
test_api_->RunMessageLoopUntilAnimationsDone();
return id;
}
LauncherID AddPlatformAppNoWait() {
LauncherItem item;
item.type = TYPE_PLATFORM_APP;
item.status = STATUS_RUNNING;
LauncherID id = model_->next_id();
model_->Add(item);
CreateAndSetShelfItemDelegateForID(id);
return id;
}
LauncherID AddPanelNoWait() {
LauncherItem item;
item.type = TYPE_APP_PANEL;
item.status = STATUS_RUNNING;
LauncherID id = model_->next_id();
model_->Add(item);
CreateAndSetShelfItemDelegateForID(id);
return id;
}
LauncherID AddPlatformApp() {
LauncherID id = AddPlatformAppNoWait();
test_api_->RunMessageLoopUntilAnimationsDone();
return id;
}
void RemoveByID(LauncherID id) {
model_->RemoveItemAt(model_->ItemIndexByID(id));
test_api_->RunMessageLoopUntilAnimationsDone();
}
internal::ShelfButton* GetButtonByID(LauncherID id) {
int index = model_->ItemIndexByID(id);
return test_api_->GetButton(index);
}
LauncherItem GetItemByID(LauncherID id) {
LauncherItems::const_iterator items = model_->ItemByID(id);
return *items;
}
void CheckModelIDs(
const std::vector<std::pair<LauncherID, views::View*> >& id_map) {
size_t map_index = 0;
for (size_t model_index = 0;
model_index < model_->items().size();
++model_index) {
LauncherItem item = model_->items()[model_index];
LauncherID id = item.id;
EXPECT_EQ(id_map[map_index].first, id);
EXPECT_EQ(id_map[map_index].second, GetButtonByID(id));
++map_index;
}
ASSERT_EQ(map_index, id_map.size());
}
void VerifyLauncherItemBoundsAreValid() {
for (int i=0;i <= test_api_->GetLastVisibleIndex(); ++i) {
if (test_api_->GetButton(i)) {
gfx::Rect shelf_view_bounds = shelf_view_->GetLocalBounds();
gfx::Rect item_bounds = test_api_->GetBoundsByIndex(i);
EXPECT_TRUE(item_bounds.x() >= 0);
EXPECT_TRUE(item_bounds.y() >= 0);
EXPECT_TRUE(item_bounds.right() <= shelf_view_bounds.width());
EXPECT_TRUE(item_bounds.bottom() <= shelf_view_bounds.height());
}
}
}
views::View* SimulateButtonPressed(
internal::ShelfButtonHost::Pointer pointer,
int button_index) {
internal::ShelfButtonHost* button_host = shelf_view_;
views::View* button = test_api_->GetButton(button_index);
ui::MouseEvent click_event(ui::ET_MOUSE_PRESSED,
button->bounds().origin(),
button->GetBoundsInScreen().origin(), 0);
button_host->PointerPressedOnButton(button, pointer, click_event);
return button;
}
views::View* SimulateClick(internal::ShelfButtonHost::Pointer pointer,
int button_index) {
internal::ShelfButtonHost* button_host = shelf_view_;
views::View* button = SimulateButtonPressed(pointer, button_index);
button_host->PointerReleasedOnButton(button,
internal::ShelfButtonHost::MOUSE,
false);
return button;
}
views::View* SimulateDrag(internal::ShelfButtonHost::Pointer pointer,
int button_index,
int destination_index) {
internal::ShelfButtonHost* button_host = shelf_view_;
views::View* button = SimulateButtonPressed(pointer, button_index);
// Drag.
views::View* destination = test_api_->GetButton(destination_index);
ui::MouseEvent drag_event(ui::ET_MOUSE_DRAGGED,
destination->bounds().origin(),
destination->GetBoundsInScreen().origin(), 0);
button_host->PointerDraggedOnButton(button, pointer, drag_event);
return button;
}
void SetupForDragTest(
std::vector<std::pair<LauncherID, views::View*> >* id_map) {
// Initialize |id_map| with the automatically-created launcher buttons.
for (size_t i = 0; i < model_->items().size(); ++i) {
internal::ShelfButton* button = test_api_->GetButton(i);
id_map->push_back(std::make_pair(model_->items()[i].id, button));
}
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(*id_map));
// Add 5 app launcher buttons for testing.
for (int i = 0; i < 5; ++i) {
LauncherID id = AddAppShortcut();
// App Icon is located at index 0, and browser shortcut is located at
// index 1. So we should start to add app shortcut at index 2.
id_map->insert(id_map->begin() + (i + browser_index_ + 1),
std::make_pair(id, GetButtonByID(id)));
}
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(*id_map));
}
views::View* GetTooltipAnchorView() {
return shelf_view_->tooltip_manager()->anchor_;
}
void AddButtonsUntilOverflow() {
int items_added = 0;
while (!test_api_->IsOverflowButtonVisible()) {
AddAppShortcut();
++items_added;
ASSERT_LT(items_added, 10000);
}
}
void ShowTooltip() {
shelf_view_->tooltip_manager()->ShowInternal();
}
void TestDraggingAnItemFromOverflowToShelf(bool cancel) {
test_api_->ShowOverflowBubble();
ASSERT_TRUE(test_api_->overflow_bubble() &&
test_api_->overflow_bubble()->IsShowing());
ash::test::ShelfViewTestAPI test_api_for_overflow(
test_api_->overflow_bubble()->shelf_view());
int total_item_count = model_->item_count();
int last_visible_item_id_in_shelf =
model_->items()[test_api_->GetLastVisibleIndex()].id;
int second_last_visible_item_id_in_shelf =
model_->items()[test_api_->GetLastVisibleIndex() - 1].id;
int first_visible_item_id_in_overflow =
model_->items()[test_api_for_overflow.GetFirstVisibleIndex()].id;
int second_last_visible_item_id_in_overflow =
model_->items()[test_api_for_overflow.GetLastVisibleIndex() - 1].id;
int drag_item_index =
test_api_for_overflow.GetLastVisibleIndex();
LauncherID drag_item_id = model_->items()[drag_item_index].id;
internal::ShelfButton* drag_button =
test_api_for_overflow.GetButton(drag_item_index);
gfx::Point center_point_of_drag_item =
drag_button->GetBoundsInScreen().CenterPoint();
aura::test::EventGenerator generator(ash::Shell::GetPrimaryRootWindow(),
center_point_of_drag_item);
// Rip an item off to OverflowBubble.
generator.PressLeftButton();
gfx::Point rip_off_point(center_point_of_drag_item.x(), 0);
generator.MoveMouseTo(rip_off_point);
test_api_for_overflow.RunMessageLoopUntilAnimationsDone();
ASSERT_TRUE(test_api_for_overflow.IsRippedOffFromShelf());
ASSERT_FALSE(test_api_for_overflow.DraggedItemFromOverflowToShelf());
// Move a dragged item into Shelf at |drop_index|.
int drop_index = 1;
gfx::Point drop_point =
test_api_->GetButton(drop_index)->GetBoundsInScreen().CenterPoint();
int item_width = test_api_for_overflow.GetButtonSize();
// To insert at |drop_index|, more smaller x-axis value of |drop_point|
// should be used.
gfx::Point modified_drop_point(drop_point.x() - item_width / 4,
drop_point.y());
generator.MoveMouseTo(modified_drop_point);
test_api_for_overflow.RunMessageLoopUntilAnimationsDone();
test_api_->RunMessageLoopUntilAnimationsDone();
ASSERT_TRUE(test_api_for_overflow.IsRippedOffFromShelf());
ASSERT_TRUE(test_api_for_overflow.DraggedItemFromOverflowToShelf());
if (cancel)
drag_button->OnMouseCaptureLost();
else
generator.ReleaseLeftButton();
test_api_for_overflow.RunMessageLoopUntilAnimationsDone();
test_api_->RunMessageLoopUntilAnimationsDone();
ASSERT_FALSE(test_api_for_overflow.IsRippedOffFromShelf());
ASSERT_FALSE(test_api_for_overflow.DraggedItemFromOverflowToShelf());
// Compare pre-stored items' id with newly positioned items' after dragging
// is canceled or finished.
if (cancel) {
EXPECT_EQ(model_->items()[test_api_->GetLastVisibleIndex()].id,
last_visible_item_id_in_shelf);
EXPECT_EQ(model_->items()[test_api_->GetLastVisibleIndex() - 1].id,
second_last_visible_item_id_in_shelf);
EXPECT_EQ(
model_->items()[test_api_for_overflow.GetFirstVisibleIndex()].id,
first_visible_item_id_in_overflow);
EXPECT_EQ(
model_->items()[test_api_for_overflow.GetLastVisibleIndex() - 1].id,
second_last_visible_item_id_in_overflow);
} else {
LauncherID drop_item_id = model_->items()[drop_index].id;
EXPECT_EQ(drop_item_id, drag_item_id);
EXPECT_EQ(model_->item_count(), total_item_count);
EXPECT_EQ(
model_->items()[test_api_for_overflow.GetFirstVisibleIndex()].id,
last_visible_item_id_in_shelf);
EXPECT_EQ(model_->items()[test_api_->GetLastVisibleIndex()].id,
second_last_visible_item_id_in_shelf);
EXPECT_EQ(
model_->items()[test_api_for_overflow.GetFirstVisibleIndex() + 1].id,
first_visible_item_id_in_overflow);
EXPECT_EQ(model_->items()[test_api_for_overflow.GetLastVisibleIndex()].id,
second_last_visible_item_id_in_overflow);
}
}
ShelfModel* model_;
internal::ShelfView* shelf_view_;
int browser_index_;
ShelfItemDelegateManager* item_manager_;
scoped_ptr<ShelfViewTestAPI> test_api_;
private:
DISALLOW_COPY_AND_ASSIGN(ShelfViewTest);
};
class ShelfViewLegacyShelfLayoutTest : public ShelfViewTest {
public:
ShelfViewLegacyShelfLayoutTest() : ShelfViewTest() {
browser_index_ = 0;
}
virtual ~ShelfViewLegacyShelfLayoutTest() {}
virtual void SetUp() OVERRIDE {
CommandLine::ForCurrentProcess()->AppendSwitch(
ash::switches::kAshDisableAlternateShelfLayout);
ShelfViewTest::SetUp();
}
private:
DISALLOW_COPY_AND_ASSIGN(ShelfViewLegacyShelfLayoutTest);
};
class ScopedTextDirectionChange {
public:
ScopedTextDirectionChange(bool is_rtl)
: is_rtl_(is_rtl) {
original_locale_ = l10n_util::GetApplicationLocale(std::string());
if (is_rtl_)
base::i18n::SetICUDefaultLocale("he");
CheckTextDirectionIsCorrect();
}
~ScopedTextDirectionChange() {
if (is_rtl_)
base::i18n::SetICUDefaultLocale(original_locale_);
}
private:
void CheckTextDirectionIsCorrect() {
ASSERT_EQ(is_rtl_, base::i18n::IsRTL());
}
bool is_rtl_;
std::string original_locale_;
};
class ShelfViewTextDirectionTest
: public ShelfViewTest,
public testing::WithParamInterface<bool> {
public:
ShelfViewTextDirectionTest() : text_direction_change_(GetParam()) {}
virtual ~ShelfViewTextDirectionTest() {}
virtual void SetUp() OVERRIDE {
ShelfViewTest::SetUp();
}
virtual void TearDown() OVERRIDE {
ShelfViewTest::TearDown();
}
private:
ScopedTextDirectionChange text_direction_change_;
DISALLOW_COPY_AND_ASSIGN(ShelfViewTextDirectionTest);
};
// Checks that the ideal item icon bounds match the view's bounds in the screen
// in both LTR and RTL.
TEST_P(ShelfViewTextDirectionTest, IdealBoundsOfItemIcon) {
LauncherID id = AddPlatformApp();
internal::ShelfButton* button = GetButtonByID(id);
gfx::Rect item_bounds = button->GetBoundsInScreen();
gfx::Point icon_offset = button->GetIconBounds().origin();
item_bounds.Offset(icon_offset.OffsetFromOrigin());
gfx::Rect ideal_bounds = shelf_view_->GetIdealBoundsOfItemIcon(id);
gfx::Point screen_origin;
views::View::ConvertPointToScreen(shelf_view_, &screen_origin);
ideal_bounds.Offset(screen_origin.x(), screen_origin.y());
EXPECT_EQ(item_bounds.x(), ideal_bounds.x());
EXPECT_EQ(item_bounds.y(), ideal_bounds.y());
}
// Checks that shelf view contents are considered in the correct drag group.
TEST_F(ShelfViewTest, EnforceDragType) {
EXPECT_TRUE(test_api_->SameDragType(TYPE_PLATFORM_APP, TYPE_PLATFORM_APP));
EXPECT_FALSE(test_api_->SameDragType(TYPE_PLATFORM_APP, TYPE_APP_SHORTCUT));
EXPECT_FALSE(test_api_->SameDragType(TYPE_PLATFORM_APP,
TYPE_BROWSER_SHORTCUT));
EXPECT_FALSE(test_api_->SameDragType(TYPE_PLATFORM_APP, TYPE_WINDOWED_APP));
EXPECT_FALSE(test_api_->SameDragType(TYPE_PLATFORM_APP, TYPE_APP_LIST));
EXPECT_FALSE(test_api_->SameDragType(TYPE_PLATFORM_APP, TYPE_APP_PANEL));
EXPECT_TRUE(test_api_->SameDragType(TYPE_APP_SHORTCUT, TYPE_APP_SHORTCUT));
EXPECT_TRUE(test_api_->SameDragType(TYPE_APP_SHORTCUT,
TYPE_BROWSER_SHORTCUT));
EXPECT_FALSE(test_api_->SameDragType(TYPE_APP_SHORTCUT,
TYPE_WINDOWED_APP));
EXPECT_FALSE(test_api_->SameDragType(TYPE_APP_SHORTCUT, TYPE_APP_LIST));
EXPECT_FALSE(test_api_->SameDragType(TYPE_APP_SHORTCUT, TYPE_APP_PANEL));
EXPECT_TRUE(test_api_->SameDragType(TYPE_BROWSER_SHORTCUT,
TYPE_BROWSER_SHORTCUT));
EXPECT_FALSE(test_api_->SameDragType(TYPE_BROWSER_SHORTCUT,
TYPE_WINDOWED_APP));
EXPECT_FALSE(test_api_->SameDragType(TYPE_BROWSER_SHORTCUT, TYPE_APP_LIST));
EXPECT_FALSE(test_api_->SameDragType(TYPE_BROWSER_SHORTCUT, TYPE_APP_PANEL));
EXPECT_TRUE(test_api_->SameDragType(TYPE_WINDOWED_APP, TYPE_WINDOWED_APP));
EXPECT_FALSE(test_api_->SameDragType(TYPE_WINDOWED_APP, TYPE_APP_LIST));
EXPECT_FALSE(test_api_->SameDragType(TYPE_WINDOWED_APP, TYPE_APP_PANEL));
EXPECT_TRUE(test_api_->SameDragType(TYPE_APP_LIST, TYPE_APP_LIST));
EXPECT_FALSE(test_api_->SameDragType(TYPE_APP_LIST, TYPE_APP_PANEL));
EXPECT_TRUE(test_api_->SameDragType(TYPE_APP_PANEL, TYPE_APP_PANEL));
}
// Adds platform app button until overflow and verifies that the last added
// platform app button is hidden.
TEST_F(ShelfViewTest, AddBrowserUntilOverflow) {
// All buttons should be visible.
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add platform app button until overflow.
int items_added = 0;
LauncherID last_added = AddPlatformApp();
while (!test_api_->IsOverflowButtonVisible()) {
// Added button is visible after animation while in this loop.
EXPECT_TRUE(GetButtonByID(last_added)->visible());
last_added = AddPlatformApp();
++items_added;
ASSERT_LT(items_added, 10000);
}
// The last added button should be invisible.
EXPECT_FALSE(GetButtonByID(last_added)->visible());
}
// Adds one platform app button then adds app shortcut until overflow. Verifies
// that the browser button gets hidden on overflow and last added app shortcut
// is still visible.
TEST_F(ShelfViewTest, AddAppShortcutWithBrowserButtonUntilOverflow) {
// All buttons should be visible.
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
LauncherID browser_button_id = AddPlatformApp();
// Add app shortcut until overflow.
int items_added = 0;
LauncherID last_added = AddAppShortcut();
while (!test_api_->IsOverflowButtonVisible()) {
// Added button is visible after animation while in this loop.
EXPECT_TRUE(GetButtonByID(last_added)->visible());
last_added = AddAppShortcut();
++items_added;
ASSERT_LT(items_added, 10000);
}
// And the platform app button is invisible.
EXPECT_FALSE(GetButtonByID(browser_button_id)->visible());
}
TEST_F(ShelfViewLegacyShelfLayoutTest,
AddAppShortcutWithBrowserButtonUntilOverflow) {
// All buttons should be visible.
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
LauncherID browser_button_id = AddPlatformApp();
// Add app shortcut until overflow.
int items_added = 0;
LauncherID last_added = AddAppShortcut();
while (!test_api_->IsOverflowButtonVisible()) {
// Added button is visible after animation while in this loop.
EXPECT_TRUE(GetButtonByID(last_added)->visible());
last_added = AddAppShortcut();
++items_added;
ASSERT_LT(items_added, 10000);
}
// The last added app short button should be visible.
EXPECT_TRUE(GetButtonByID(last_added)->visible());
// And the platform app button is invisible.
EXPECT_FALSE(GetButtonByID(browser_button_id)->visible());
}
TEST_F(ShelfViewTest, AddPanelHidesPlatformAppButton) {
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add platform app button until overflow, remember last visible platform app
// button.
int items_added = 0;
LauncherID first_added = AddPlatformApp();
EXPECT_TRUE(GetButtonByID(first_added)->visible());
while (true) {
LauncherID added = AddPlatformApp();
if (test_api_->IsOverflowButtonVisible()) {
EXPECT_FALSE(GetButtonByID(added)->visible());
RemoveByID(added);
break;
}
++items_added;
ASSERT_LT(items_added, 10000);
}
LauncherID panel = AddPanel();
EXPECT_TRUE(test_api_->IsOverflowButtonVisible());
RemoveByID(panel);
EXPECT_FALSE(test_api_->IsOverflowButtonVisible());
}
TEST_F(ShelfViewLegacyShelfLayoutTest, AddPanelHidesPlatformAppButton) {
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add platform app button until overflow, remember last visible platform app
// button.
int items_added = 0;
LauncherID first_added = AddPlatformApp();
EXPECT_TRUE(GetButtonByID(first_added)->visible());
LauncherID last_visible = first_added;
while (true) {
LauncherID added = AddPlatformApp();
if (test_api_->IsOverflowButtonVisible()) {
EXPECT_FALSE(GetButtonByID(added)->visible());
break;
}
last_visible = added;
++items_added;
ASSERT_LT(items_added, 10000);
}
LauncherID panel = AddPanel();
EXPECT_TRUE(GetButtonByID(panel)->visible());
EXPECT_FALSE(GetButtonByID(last_visible)->visible());
RemoveByID(panel);
EXPECT_TRUE(GetButtonByID(last_visible)->visible());
}
// When there are more panels then platform app buttons we should hide panels
// rather than platform apps.
TEST_F(ShelfViewTest, PlatformAppHidesExcessPanels) {
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add platform app button.
LauncherID platform_app = AddPlatformApp();
LauncherID first_panel = AddPanel();
EXPECT_TRUE(GetButtonByID(platform_app)->visible());
EXPECT_TRUE(GetButtonByID(first_panel)->visible());
// Add panels until there is an overflow.
LauncherID last_panel = first_panel;
int items_added = 0;
while (!test_api_->IsOverflowButtonVisible()) {
last_panel = AddPanel();
++items_added;
ASSERT_LT(items_added, 10000);
}
// The first panel should now be hidden by the new platform apps needing
// space.
EXPECT_FALSE(GetButtonByID(first_panel)->visible());
EXPECT_TRUE(GetButtonByID(last_panel)->visible());
EXPECT_TRUE(GetButtonByID(platform_app)->visible());
// Adding platform apps should eventually begin to hide platform apps. We will
// add platform apps until either the last panel or platform app is hidden.
items_added = 0;
while (GetButtonByID(platform_app)->visible() &&
GetButtonByID(last_panel)->visible()) {
platform_app = AddPlatformApp();
++items_added;
ASSERT_LT(items_added, 10000);
}
EXPECT_TRUE(GetButtonByID(last_panel)->visible());
EXPECT_FALSE(GetButtonByID(platform_app)->visible());
}
// Adds button until overflow then removes first added one. Verifies that
// the last added one changes from invisible to visible and overflow
// chevron is gone.
TEST_F(ShelfViewTest, RemoveButtonRevealsOverflowed) {
// All buttons should be visible.
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add platform app buttons until overflow.
int items_added = 0;
LauncherID first_added = AddPlatformApp();
LauncherID last_added = first_added;
while (!test_api_->IsOverflowButtonVisible()) {
last_added = AddPlatformApp();
++items_added;
ASSERT_LT(items_added, 10000);
}
// Expect add more than 1 button. First added is visible and last is not.
EXPECT_NE(first_added, last_added);
EXPECT_TRUE(GetButtonByID(first_added)->visible());
EXPECT_FALSE(GetButtonByID(last_added)->visible());
// Remove first added.
RemoveByID(first_added);
// Last added button becomes visible and overflow chevron is gone.
EXPECT_TRUE(GetButtonByID(last_added)->visible());
EXPECT_EQ(1.0f, GetButtonByID(last_added)->layer()->opacity());
EXPECT_FALSE(test_api_->IsOverflowButtonVisible());
}
// Verifies that remove last overflowed button should hide overflow chevron.
TEST_F(ShelfViewTest, RemoveLastOverflowed) {
// All buttons should be visible.
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add platform app button until overflow.
int items_added = 0;
LauncherID last_added = AddPlatformApp();
while (!test_api_->IsOverflowButtonVisible()) {
last_added = AddPlatformApp();
++items_added;
ASSERT_LT(items_added, 10000);
}
RemoveByID(last_added);
EXPECT_FALSE(test_api_->IsOverflowButtonVisible());
}
// Adds platform app button without waiting for animation to finish and verifies
// that all added buttons are visible.
TEST_F(ShelfViewTest, AddButtonQuickly) {
// All buttons should be visible.
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add a few platform buttons quickly without wait for animation.
int added_count = 0;
while (!test_api_->IsOverflowButtonVisible()) {
AddPlatformAppNoWait();
++added_count;
ASSERT_LT(added_count, 10000);
}
// ShelfView should be big enough to hold at least 3 new buttons.
ASSERT_GE(added_count, 3);
// Wait for the last animation to finish.
test_api_->RunMessageLoopUntilAnimationsDone();
// Verifies non-overflow buttons are visible.
for (int i = 0; i <= test_api_->GetLastVisibleIndex(); ++i) {
internal::ShelfButton* button = test_api_->GetButton(i);
if (button) {
EXPECT_TRUE(button->visible()) << "button index=" << i;
EXPECT_EQ(1.0f, button->layer()->opacity()) << "button index=" << i;
}
}
}
// Check that model changes are handled correctly while a launcher icon is being
// dragged.
TEST_F(ShelfViewTest, ModelChangesWhileDragging) {
internal::ShelfButtonHost* button_host = shelf_view_;
std::vector<std::pair<LauncherID, views::View*> > id_map;
SetupForDragTest(&id_map);
// Dragging browser shortcut at index 1.
EXPECT_TRUE(model_->items()[1].type == TYPE_BROWSER_SHORTCUT);
views::View* dragged_button = SimulateDrag(
internal::ShelfButtonHost::MOUSE, 1, 3);
std::rotate(id_map.begin() + 1,
id_map.begin() + 2,
id_map.begin() + 4);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
false);
EXPECT_TRUE(model_->items()[3].type == TYPE_BROWSER_SHORTCUT);
// Dragging changes model order.
dragged_button = SimulateDrag(internal::ShelfButtonHost::MOUSE, 1, 3);
std::rotate(id_map.begin() + 1,
id_map.begin() + 2,
id_map.begin() + 4);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
// Cancelling the drag operation restores previous order.
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
true);
std::rotate(id_map.begin() + 1,
id_map.begin() + 3,
id_map.begin() + 4);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
// Deleting an item keeps the remaining intact.
dragged_button = SimulateDrag(internal::ShelfButtonHost::MOUSE, 1, 3);
model_->RemoveItemAt(1);
id_map.erase(id_map.begin() + 1);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
false);
// Adding a launcher item cancels the drag and respects the order.
dragged_button = SimulateDrag(internal::ShelfButtonHost::MOUSE, 1, 3);
LauncherID new_id = AddAppShortcut();
id_map.insert(id_map.begin() + 6,
std::make_pair(new_id, GetButtonByID(new_id)));
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
false);
// Adding a launcher item at the end (i.e. a panel) canels drag and respects
// the order.
dragged_button = SimulateDrag(internal::ShelfButtonHost::MOUSE, 1, 3);
new_id = AddPanel();
id_map.insert(id_map.begin() + 7,
std::make_pair(new_id, GetButtonByID(new_id)));
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
false);
}
TEST_F(ShelfViewLegacyShelfLayoutTest, ModelChangesWhileDragging) {
internal::ShelfButtonHost* button_host = shelf_view_;
std::vector<std::pair<LauncherID, views::View*> > id_map;
SetupForDragTest(&id_map);
// Dragging browser shortcut at index 0.
EXPECT_TRUE(model_->items()[0].type == TYPE_BROWSER_SHORTCUT);
views::View* dragged_button = SimulateDrag(
internal::ShelfButtonHost::MOUSE, 0, 2);
std::rotate(id_map.begin(),
id_map.begin() + 1,
id_map.begin() + 3);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
false);
EXPECT_TRUE(model_->items()[2].type == TYPE_BROWSER_SHORTCUT);
// Dragging changes model order.
dragged_button = SimulateDrag(internal::ShelfButtonHost::MOUSE, 0, 2);
std::rotate(id_map.begin(),
id_map.begin() + 1,
id_map.begin() + 3);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
// Cancelling the drag operation restores previous order.
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
true);
std::rotate(id_map.begin(),
id_map.begin() + 2,
id_map.begin() + 3);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
// Deleting an item keeps the remaining intact.
dragged_button = SimulateDrag(internal::ShelfButtonHost::MOUSE, 0, 2);
model_->RemoveItemAt(1);
id_map.erase(id_map.begin() + 1);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
false);
// Adding a launcher item cancels the drag and respects the order.
dragged_button = SimulateDrag(internal::ShelfButtonHost::MOUSE, 0, 2);
LauncherID new_id = AddAppShortcut();
id_map.insert(id_map.begin() + 5,
std::make_pair(new_id, GetButtonByID(new_id)));
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
false);
// Adding a launcher item at the end (i.e. a panel) canels drag and respects
// the order.
dragged_button = SimulateDrag(internal::ShelfButtonHost::MOUSE, 0, 2);
new_id = AddPanel();
id_map.insert(id_map.begin() + 7,
std::make_pair(new_id, GetButtonByID(new_id)));
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
false);
}
// Check that 2nd drag from the other pointer would be ignored.
TEST_F(ShelfViewTest, SimultaneousDrag) {
internal::ShelfButtonHost* button_host = shelf_view_;
std::vector<std::pair<LauncherID, views::View*> > id_map;
SetupForDragTest(&id_map);
// Start a mouse drag.
views::View* dragged_button_mouse = SimulateDrag(
internal::ShelfButtonHost::MOUSE, 1, 3);
std::rotate(id_map.begin() + 1,
id_map.begin() + 2,
id_map.begin() + 4);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
// Attempt a touch drag before the mouse drag finishes.
views::View* dragged_button_touch = SimulateDrag(
internal::ShelfButtonHost::TOUCH, 4, 2);
// Nothing changes since 2nd drag is ignored.
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
// Finish the mouse drag.
button_host->PointerReleasedOnButton(dragged_button_mouse,
internal::ShelfButtonHost::MOUSE,
false);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
// Now start a touch drag.
dragged_button_touch = SimulateDrag(internal::ShelfButtonHost::TOUCH, 4, 2);
std::rotate(id_map.begin() + 3,
id_map.begin() + 4,
id_map.begin() + 5);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
// And attempt a mouse drag before the touch drag finishes.
dragged_button_mouse = SimulateDrag(internal::ShelfButtonHost::MOUSE, 1, 2);
// Nothing changes since 2nd drag is ignored.
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button_touch,
internal::ShelfButtonHost::TOUCH,
false);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
}
// Check that clicking first on one item and then dragging another works as
// expected.
TEST_F(ShelfViewTest, ClickOneDragAnother) {
internal::ShelfButtonHost* button_host = shelf_view_;
std::vector<std::pair<LauncherID, views::View*> > id_map;
SetupForDragTest(&id_map);
// A click on item 1 is simulated.
SimulateClick(internal::ShelfButtonHost::MOUSE, 1);
// Dragging browser index at 0 should change the model order correctly.
EXPECT_TRUE(model_->items()[1].type == TYPE_BROWSER_SHORTCUT);
views::View* dragged_button = SimulateDrag(
internal::ShelfButtonHost::MOUSE, 1, 3);
std::rotate(id_map.begin() + 1,
id_map.begin() + 2,
id_map.begin() + 4);
ASSERT_NO_FATAL_FAILURE(CheckModelIDs(id_map));
button_host->PointerReleasedOnButton(dragged_button,
internal::ShelfButtonHost::MOUSE,
false);
EXPECT_TRUE(model_->items()[3].type == TYPE_BROWSER_SHORTCUT);
}
// Confirm that item status changes are reflected in the buttons.
TEST_F(ShelfViewTest, LauncherItemStatus) {
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add platform app button.
LauncherID last_added = AddPlatformApp();
LauncherItem item = GetItemByID(last_added);
int index = model_->ItemIndexByID(last_added);
internal::ShelfButton* button = GetButtonByID(last_added);
ASSERT_EQ(internal::ShelfButton::STATE_RUNNING, button->state());
item.status = STATUS_ACTIVE;
model_->Set(index, item);
ASSERT_EQ(internal::ShelfButton::STATE_ACTIVE, button->state());
item.status = STATUS_ATTENTION;
model_->Set(index, item);
ASSERT_EQ(internal::ShelfButton::STATE_ATTENTION, button->state());
}
TEST_F(ShelfViewLegacyShelfLayoutTest,
LauncherItemPositionReflectedOnStateChanged) {
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add 2 items to the launcher.
LauncherID item1_id = AddPlatformApp();
LauncherID item2_id = AddPlatformAppNoWait();
internal::ShelfButton* item1_button = GetButtonByID(item1_id);
internal::ShelfButton* item2_button = GetButtonByID(item2_id);
internal::ShelfButton::State state_mask =
static_cast<internal::ShelfButton::State>(
internal::ShelfButton::STATE_NORMAL |
internal::ShelfButton::STATE_HOVERED |
internal::ShelfButton::STATE_RUNNING |
internal::ShelfButton::STATE_ACTIVE |
internal::ShelfButton::STATE_ATTENTION |
internal::ShelfButton::STATE_FOCUSED);
// Clear the button states.
item1_button->ClearState(state_mask);
item2_button->ClearState(state_mask);
// Since default alignment in tests is bottom, state is reflected in y-axis.
ASSERT_EQ(item1_button->GetIconBounds().y(),
item2_button->GetIconBounds().y());
item1_button->AddState(internal::ShelfButton::STATE_HOVERED);
ASSERT_NE(item1_button->GetIconBounds().y(),
item2_button->GetIconBounds().y());
item1_button->ClearState(internal::ShelfButton::STATE_HOVERED);
}
// Confirm that item status changes are reflected in the buttons
// for platform apps.
TEST_F(ShelfViewTest, LauncherItemStatusPlatformApp) {
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add platform app button.
LauncherID last_added = AddPlatformApp();
LauncherItem item = GetItemByID(last_added);
int index = model_->ItemIndexByID(last_added);
internal::ShelfButton* button = GetButtonByID(last_added);
ASSERT_EQ(internal::ShelfButton::STATE_RUNNING, button->state());
item.status = STATUS_ACTIVE;
model_->Set(index, item);
ASSERT_EQ(internal::ShelfButton::STATE_ACTIVE, button->state());
item.status = STATUS_ATTENTION;
model_->Set(index, item);
ASSERT_EQ(internal::ShelfButton::STATE_ATTENTION, button->state());
}
// Confirm that launcher item bounds are correctly updated on shelf changes.
TEST_F(ShelfViewTest, LauncherItemBoundsCheck) {
VerifyLauncherItemBoundsAreValid();
shelf_view_->shelf_layout_manager()->SetAutoHideBehavior(
SHELF_AUTO_HIDE_BEHAVIOR_ALWAYS);
test_api_->RunMessageLoopUntilAnimationsDone();
VerifyLauncherItemBoundsAreValid();
shelf_view_->shelf_layout_manager()->SetAutoHideBehavior(
SHELF_AUTO_HIDE_BEHAVIOR_NEVER);
test_api_->RunMessageLoopUntilAnimationsDone();
VerifyLauncherItemBoundsAreValid();
}
TEST_F(ShelfViewTest, ShelfTooltipTest) {
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Prepare some items to the launcher.
LauncherID app_button_id = AddAppShortcut();
LauncherID platform_button_id = AddPlatformApp();
internal::ShelfButton* app_button = GetButtonByID(app_button_id);
internal::ShelfButton* platform_button = GetButtonByID(platform_button_id);
internal::ShelfButtonHost* button_host = shelf_view_;
internal::ShelfTooltipManager* tooltip_manager =
shelf_view_->tooltip_manager();
button_host->MouseEnteredButton(app_button);
// There's a delay to show the tooltip, so it's not visible yet.
EXPECT_FALSE(tooltip_manager->IsVisible());
EXPECT_EQ(app_button, GetTooltipAnchorView());
ShowTooltip();
EXPECT_TRUE(tooltip_manager->IsVisible());
// Once it's visible, it keeps visibility and is pointing to the same
// item.
button_host->MouseExitedButton(app_button);
EXPECT_TRUE(tooltip_manager->IsVisible());
EXPECT_EQ(app_button, GetTooltipAnchorView());
// When entered to another item, it switches to the new item. There is no
// delay for the visibility.
button_host->MouseEnteredButton(platform_button);
EXPECT_TRUE(tooltip_manager->IsVisible());
EXPECT_EQ(platform_button, GetTooltipAnchorView());
button_host->MouseExitedButton(platform_button);
tooltip_manager->Close();
// Next time: enter app_button -> move immediately to tab_button.
button_host->MouseEnteredButton(app_button);
button_host->MouseExitedButton(app_button);
button_host->MouseEnteredButton(platform_button);
EXPECT_FALSE(tooltip_manager->IsVisible());
EXPECT_EQ(platform_button, GetTooltipAnchorView());
}
// Verify a fix for crash caused by a tooltip update for a deleted launcher
// button, see crbug.com/288838.
TEST_F(ShelfViewTest, RemovingItemClosesTooltip) {
internal::ShelfButtonHost* button_host = shelf_view_;
internal::ShelfTooltipManager* tooltip_manager =
shelf_view_->tooltip_manager();
// Add an item to the launcher.
LauncherID app_button_id = AddAppShortcut();
internal::ShelfButton* app_button = GetButtonByID(app_button_id);
// Spawn a tooltip on that item.
button_host->MouseEnteredButton(app_button);
ShowTooltip();
EXPECT_TRUE(tooltip_manager->IsVisible());
// Remove the app shortcut while the tooltip is open. The tooltip should be
// closed.
RemoveByID(app_button_id);
EXPECT_FALSE(tooltip_manager->IsVisible());
// Change the shelf layout. This should not crash.
Shell::GetInstance()->SetShelfAlignment(SHELF_ALIGNMENT_LEFT,
Shell::GetPrimaryRootWindow());
}
// Changing the shelf alignment closes any open tooltip.
TEST_F(ShelfViewTest, ShelfAlignmentClosesTooltip) {
internal::ShelfButtonHost* button_host = shelf_view_;
internal::ShelfTooltipManager* tooltip_manager =
shelf_view_->tooltip_manager();
// Add an item to the launcher.
LauncherID app_button_id = AddAppShortcut();
internal::ShelfButton* app_button = GetButtonByID(app_button_id);
// Spawn a tooltip on the item.
button_host->MouseEnteredButton(app_button);
ShowTooltip();
EXPECT_TRUE(tooltip_manager->IsVisible());
// Changing shelf alignment hides the tooltip.
Shell::GetInstance()->SetShelfAlignment(SHELF_ALIGNMENT_LEFT,
Shell::GetPrimaryRootWindow());
EXPECT_FALSE(tooltip_manager->IsVisible());
}
TEST_F(ShelfViewTest, ShouldHideTooltipTest) {
LauncherID app_button_id = AddAppShortcut();
LauncherID platform_button_id = AddPlatformApp();
// The tooltip shouldn't hide if the mouse is on normal buttons.
for (int i = 0; i < test_api_->GetButtonCount(); i++) {
internal::ShelfButton* button = test_api_->GetButton(i);
if (!button)
continue;
EXPECT_FALSE(shelf_view_->ShouldHideTooltip(
button->GetMirroredBounds().CenterPoint()))
<< "ShelfView tries to hide on button " << i;
}
// The tooltip should not hide on the app-list button.
views::View* app_list_button = shelf_view_->GetAppListButtonView();
EXPECT_FALSE(shelf_view_->ShouldHideTooltip(
app_list_button->GetMirroredBounds().CenterPoint()));
// The tooltip shouldn't hide if the mouse is in the gap between two buttons.
gfx::Rect app_button_rect = GetButtonByID(app_button_id)->GetMirroredBounds();
gfx::Rect platform_button_rect =
GetButtonByID(platform_button_id)->GetMirroredBounds();
ASSERT_FALSE(app_button_rect.Intersects(platform_button_rect));
EXPECT_FALSE(shelf_view_->ShouldHideTooltip(
gfx::UnionRects(app_button_rect, platform_button_rect).CenterPoint()));
// The tooltip should hide if it's outside of all buttons.
gfx::Rect all_area;
for (int i = 0; i < test_api_->GetButtonCount(); i++) {
internal::ShelfButton* button = test_api_->GetButton(i);
if (!button)
continue;
all_area.Union(button->GetMirroredBounds());
}
all_area.Union(shelf_view_->GetAppListButtonView()->GetMirroredBounds());
EXPECT_FALSE(shelf_view_->ShouldHideTooltip(all_area.origin()));
EXPECT_FALSE(shelf_view_->ShouldHideTooltip(
gfx::Point(all_area.right() - 1, all_area.bottom() - 1)));
EXPECT_TRUE(shelf_view_->ShouldHideTooltip(
gfx::Point(all_area.right(), all_area.y())));
EXPECT_TRUE(shelf_view_->ShouldHideTooltip(
gfx::Point(all_area.x() - 1, all_area.y())));
EXPECT_TRUE(shelf_view_->ShouldHideTooltip(
gfx::Point(all_area.x(), all_area.y() - 1)));
EXPECT_TRUE(shelf_view_->ShouldHideTooltip(
gfx::Point(all_area.x(), all_area.bottom())));
}
TEST_F(ShelfViewTest, ShouldHideTooltipWithAppListWindowTest) {
Shell::GetInstance()->ToggleAppList(NULL);
ASSERT_TRUE(Shell::GetInstance()->GetAppListWindow());
// The tooltip shouldn't hide if the mouse is on normal buttons.
for (int i = 1; i < test_api_->GetButtonCount(); i++) {
internal::ShelfButton* button = test_api_->GetButton(i);
if (!button)
continue;
EXPECT_FALSE(shelf_view_->ShouldHideTooltip(
button->GetMirroredBounds().CenterPoint()))
<< "ShelfView tries to hide on button " << i;
}
// The tooltip should hide on the app-list button.
views::View* app_list_button = shelf_view_->GetAppListButtonView();
EXPECT_TRUE(shelf_view_->ShouldHideTooltip(
app_list_button->GetMirroredBounds().CenterPoint()));
}
// Test that by moving the mouse cursor off the button onto the bubble it closes
// the bubble.
TEST_F(ShelfViewTest, ShouldHideTooltipWhenHoveringOnTooltip) {
internal::ShelfTooltipManager* tooltip_manager =
shelf_view_->tooltip_manager();
tooltip_manager->CreateZeroDelayTimerForTest();
aura::test::EventGenerator generator(Shell::GetPrimaryRootWindow());
// Move the mouse off any item and check that no tooltip is shown.
generator.MoveMouseTo(gfx::Point(0, 0));
EXPECT_FALSE(tooltip_manager->IsVisible());
// Move the mouse over the button and check that it is visible.
views::View* app_list_button = shelf_view_->GetAppListButtonView();
gfx::Rect bounds = app_list_button->GetBoundsInScreen();
generator.MoveMouseTo(bounds.CenterPoint());
// Wait for the timer to go off.
RunAllPendingInMessageLoop();
EXPECT_TRUE(tooltip_manager->IsVisible());
// Move the mouse cursor slightly to the right of the item. The tooltip should
// stay open.
generator.MoveMouseBy(bounds.width() / 2 + 5, 0);
// Make sure there is no delayed close.
RunAllPendingInMessageLoop();
EXPECT_TRUE(tooltip_manager->IsVisible());
// Move back - it should still stay open.
generator.MoveMouseBy(-(bounds.width() / 2 + 5), 0);
// Make sure there is no delayed close.
RunAllPendingInMessageLoop();
EXPECT_TRUE(tooltip_manager->IsVisible());
// Now move the mouse cursor slightly above the item - so that it is over the
// tooltip bubble. Now it should disappear.
generator.MoveMouseBy(0, -(bounds.height() / 2 + 5));
// Wait until the delayed close kicked in.
RunAllPendingInMessageLoop();
EXPECT_FALSE(tooltip_manager->IsVisible());
}
// Resizing shelf view while an add animation without fade-in is running,
// which happens when overflow happens. App list button should end up in its
// new ideal bounds.
TEST_F(ShelfViewTest, ResizeDuringOverflowAddAnimation) {
// All buttons should be visible.
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
// Add buttons until overflow. Let the non-overflow add animations finish but
// leave the last running.
int items_added = 0;
AddPlatformAppNoWait();
while (!test_api_->IsOverflowButtonVisible()) {
test_api_->RunMessageLoopUntilAnimationsDone();
AddPlatformAppNoWait();
++items_added;
ASSERT_LT(items_added, 10000);
}
// Resize shelf view with that animation running and stay overflown.
gfx::Rect bounds = shelf_view_->bounds();
bounds.set_width(bounds.width() - kLauncherPreferredSize);
shelf_view_->SetBoundsRect(bounds);
ASSERT_TRUE(test_api_->IsOverflowButtonVisible());
// Finish the animation.
test_api_->RunMessageLoopUntilAnimationsDone();
// App list button should ends up in its new ideal bounds.
const int app_list_button_index = test_api_->GetButtonCount() - 1;
const gfx::Rect& app_list_ideal_bounds =
test_api_->GetIdealBoundsByIndex(app_list_button_index);
const gfx::Rect& app_list_bounds =
test_api_->GetBoundsByIndex(app_list_button_index);
EXPECT_EQ(app_list_bounds, app_list_ideal_bounds);
}
// Checks the overflow bubble size when an item is ripped off and re-inserted.
TEST_F(ShelfViewTest, OverflowBubbleSize) {
// Replace ShelfDelegate.
test::ShellTestApi test_api(Shell::GetInstance());
test_api.SetShelfDelegate(NULL);
ShelfDelegate *delegate = new TestShelfDelegateForShelfView(model_);
test_api.SetShelfDelegate(delegate);
test::LauncherTestAPI(
Launcher::ForPrimaryDisplay()).SetShelfDelegate(delegate);
test_api_->SetShelfDelegate(delegate);
AddButtonsUntilOverflow();
// Show overflow bubble.
test_api_->ShowOverflowBubble();
ASSERT_TRUE(test_api_->overflow_bubble() &&
test_api_->overflow_bubble()->IsShowing());
ShelfViewTestAPI test_for_overflow_view(
test_api_->overflow_bubble()->shelf_view());
int ripped_index = test_for_overflow_view.GetLastVisibleIndex();
gfx::Size bubble_size = test_for_overflow_view.GetPreferredSize();
int item_width = test_for_overflow_view.GetButtonSize() +
test_for_overflow_view.GetButtonSpacing();
aura::test::EventGenerator generator(Shell::GetPrimaryRootWindow(),
gfx::Point());
internal::ShelfButton* button =
test_for_overflow_view.GetButton(ripped_index);
// Rip off the last visible item.
gfx::Point start_point = button->GetBoundsInScreen().CenterPoint();
gfx::Point rip_off_point(start_point.x(), 0);
generator.MoveMouseTo(start_point.x(), start_point.y());
base::MessageLoop::current()->RunUntilIdle();
generator.PressLeftButton();
base::MessageLoop::current()->RunUntilIdle();
generator.MoveMouseTo(rip_off_point.x(), rip_off_point.y());
base::MessageLoop::current()->RunUntilIdle();
test_for_overflow_view.RunMessageLoopUntilAnimationsDone();
// Check the overflow bubble size when an item is ripped off.
EXPECT_EQ(bubble_size.width() - item_width,
test_for_overflow_view.GetPreferredSize().width());
ASSERT_TRUE(test_api_->overflow_bubble() &&
test_api_->overflow_bubble()->IsShowing());
// Re-insert an item into the overflow bubble.
int first_index = test_for_overflow_view.GetFirstVisibleIndex();
button = test_for_overflow_view.GetButton(first_index);
// Check the bubble size after an item is re-inserted.
generator.MoveMouseTo(button->GetBoundsInScreen().CenterPoint());
test_for_overflow_view.RunMessageLoopUntilAnimationsDone();
EXPECT_EQ(bubble_size.width(),
test_for_overflow_view.GetPreferredSize().width());
generator.ReleaseLeftButton();
test_for_overflow_view.RunMessageLoopUntilAnimationsDone();
EXPECT_EQ(bubble_size.width(),
test_for_overflow_view.GetPreferredSize().width());
}
// Check that the first item in the list follows Fitt's law by including the
// first pixel and being therefore bigger then the others.
TEST_F(ShelfViewLegacyShelfLayoutTest, CheckFittsLaw) {
// All buttons should be visible.
ASSERT_EQ(test_api_->GetLastVisibleIndex() + 1,
test_api_->GetButtonCount());
gfx::Rect ideal_bounds_0 = test_api_->GetIdealBoundsByIndex(0);
gfx::Rect ideal_bounds_1 = test_api_->GetIdealBoundsByIndex(1);
EXPECT_GT(ideal_bounds_0.width(), ideal_bounds_1.width());
}
// Check the drag insertion bounds of scrolled overflow bubble.
TEST_F(ShelfViewTest, CheckDragInsertBoundsOfScrolledOverflowBubble) {
UpdateDisplay("400x300");
EXPECT_EQ(2, model_->item_count());
AddButtonsUntilOverflow();
// Show overflow bubble.
test_api_->ShowOverflowBubble();
ASSERT_TRUE(test_api_->overflow_bubble() &&
test_api_->overflow_bubble()->IsShowing());
int item_width = test_api_->GetButtonSize() +
test_api_->GetButtonSpacing();
internal::OverflowBubbleView* bubble_view =
test_api_->overflow_bubble()->bubble_view();
test::OverflowBubbleViewTestAPI bubble_view_api(bubble_view);
// Add more buttons until OverflowBubble is scrollable and it has 3 invisible
// items.
while (bubble_view_api.GetContentsSize().width() <
(bubble_view->GetContentsBounds().width() + 3 * item_width))
AddAppShortcut();
ASSERT_TRUE(test_api_->overflow_bubble() &&
test_api_->overflow_bubble()->IsShowing());
ShelfViewTestAPI test_for_overflow_view(
test_api_->overflow_bubble()->shelf_view());
int first_index = test_for_overflow_view.GetFirstVisibleIndex();
int last_index = test_for_overflow_view.GetLastVisibleIndex();
internal::ShelfButton* first_button =
test_for_overflow_view.GetButton(first_index);
internal::ShelfButton* last_button =
test_for_overflow_view.GetButton(last_index);
gfx::Point first_point = first_button->GetBoundsInScreen().CenterPoint();
gfx::Point last_point = last_button->GetBoundsInScreen().CenterPoint();
gfx::Rect drag_reinsert_bounds =
test_for_overflow_view.GetBoundsForDragInsertInScreen();
EXPECT_TRUE(drag_reinsert_bounds.Contains(first_point));
EXPECT_FALSE(drag_reinsert_bounds.Contains(last_point));
// Scrolls sufficiently to show last item.
bubble_view_api.ScrollByXOffset(3 * item_width);
drag_reinsert_bounds =
test_for_overflow_view.GetBoundsForDragInsertInScreen();
first_point = first_button->GetBoundsInScreen().CenterPoint();
last_point = last_button->GetBoundsInScreen().CenterPoint();
EXPECT_FALSE(drag_reinsert_bounds.Contains(first_point));
EXPECT_TRUE(drag_reinsert_bounds.Contains(last_point));
}
// Check the drag insertion bounds of shelf view in multi monitor environment.
TEST_F(ShelfViewTest, CheckDragInsertBoundsWithMultiMonitor) {
// win8-aura doesn't support multiple display.
if (!SupportsMultipleDisplays())
return;
UpdateDisplay("800x600,800x600");
Launcher* secondary_launcher =
Launcher::ForWindow(Shell::GetAllRootWindows()[1]);
internal::ShelfView* shelf_view_for_secondary =
test::LauncherTestAPI(secondary_launcher).shelf_view();
// The bounds should be big enough for 4 buttons + overflow chevron.
shelf_view_for_secondary->SetBounds(0, 0, 500,
internal::ShelfLayoutManager::GetPreferredShelfSize());
ShelfViewTestAPI test_api_for_secondary(shelf_view_for_secondary);
// Speeds up animation for test.
test_api_for_secondary.SetAnimationDuration(1);
AddButtonsUntilOverflow();
// Test #1: Test drag insertion bounds of primary shelf.
// Show overflow bubble.
test_api_->ShowOverflowBubble();
ASSERT_TRUE(test_api_->overflow_bubble() &&
test_api_->overflow_bubble()->IsShowing());
ShelfViewTestAPI test_api_for_overflow_view(
test_api_->overflow_bubble()->shelf_view());
internal::ShelfButton* button = test_api_for_overflow_view.GetButton(
test_api_for_overflow_view.GetLastVisibleIndex());
// Checks that a point in shelf is contained in drag insert bounds.
gfx::Point point_in_shelf_view = button->GetBoundsInScreen().CenterPoint();
gfx::Rect drag_reinsert_bounds =
test_api_for_overflow_view.GetBoundsForDragInsertInScreen();
EXPECT_TRUE(drag_reinsert_bounds.Contains(point_in_shelf_view));
// Checks that a point out of shelf is not contained in drag insert bounds.
EXPECT_FALSE(drag_reinsert_bounds.Contains(
gfx::Point(point_in_shelf_view.x(), 0)));
// Test #2: Test drag insertion bounds of secondary shelf.
// Show overflow bubble.
test_api_for_secondary.ShowOverflowBubble();
ASSERT_TRUE(test_api_for_secondary.overflow_bubble() &&
test_api_for_secondary.overflow_bubble()->IsShowing());
ShelfViewTestAPI test_api_for_overflow_view_of_secondary(
test_api_for_secondary.overflow_bubble()->shelf_view());
internal::ShelfButton* button_in_secondary =
test_api_for_overflow_view_of_secondary.GetButton(
test_api_for_overflow_view_of_secondary.GetLastVisibleIndex());
// Checks that a point in shelf is contained in drag insert bounds.
gfx::Point point_in_secondary_shelf_view =
button_in_secondary->GetBoundsInScreen().CenterPoint();
gfx::Rect drag_reinsert_bounds_in_secondary =
test_api_for_overflow_view_of_secondary.GetBoundsForDragInsertInScreen();
EXPECT_TRUE(drag_reinsert_bounds_in_secondary.Contains(
point_in_secondary_shelf_view));
// Checks that a point out of shelf is not contained in drag insert bounds.
EXPECT_FALSE(drag_reinsert_bounds_in_secondary.Contains(
gfx::Point(point_in_secondary_shelf_view.x(), 0)));
// Checks that a point of overflow bubble in primary shelf should not be
// contained by insert bounds of secondary shelf.
EXPECT_FALSE(drag_reinsert_bounds_in_secondary.Contains(point_in_shelf_view));
}
// Checks the rip an item off from left aligned shelf in secondary monitor.
TEST_F(ShelfViewTest, CheckRipOffFromLeftShelfAlignmentWithMultiMonitor) {
// win8-aura doesn't support multiple display.
if (!SupportsMultipleDisplays())
return;
UpdateDisplay("800x600,800x600");
ASSERT_EQ(2U, Shell::GetAllRootWindows().size());
aura::Window* second_root = Shell::GetAllRootWindows()[1];
Shell::GetInstance()->SetShelfAlignment(SHELF_ALIGNMENT_LEFT, second_root);
ASSERT_EQ(SHELF_ALIGNMENT_LEFT,
Shell::GetInstance()->GetShelfAlignment(second_root));
// Initially, app list and browser shortcut are added.
EXPECT_EQ(2, model_->item_count());
int browser_index = model_->GetItemIndexForType(TYPE_BROWSER_SHORTCUT);
EXPECT_GT(browser_index, 0);
Launcher* secondary_launcher = Launcher::ForWindow(second_root);
internal::ShelfView* shelf_view_for_secondary =
test::LauncherTestAPI(secondary_launcher).shelf_view();
ShelfViewTestAPI test_api_for_secondary_shelf_view(shelf_view_for_secondary);
internal::ShelfButton* button =
test_api_for_secondary_shelf_view.GetButton(browser_index);
// Fetch the start point of dragging.
gfx::Point start_point = button->GetBoundsInScreen().CenterPoint();
wm::ConvertPointFromScreen(second_root, &start_point);
aura::test::EventGenerator generator(second_root, start_point);
// Rip off the browser item.
generator.PressLeftButton();
generator.MoveMouseTo(start_point.x() + 400, start_point.y());
test_api_for_secondary_shelf_view.RunMessageLoopUntilAnimationsDone();
EXPECT_TRUE(test_api_for_secondary_shelf_view.IsRippedOffFromShelf());
}
// Checks various drag and drop operations from OverflowBubble to Shelf.
TEST_F(ShelfViewTest, CheckDragAndDropFromOverflowBubbleToShelf) {
// Replace LauncherDelegate.
test::ShellTestApi test_api(Shell::GetInstance());
test_api.SetShelfDelegate(NULL);
ShelfDelegate *delegate = new TestShelfDelegateForShelfView(model_);
test_api.SetShelfDelegate(delegate);
test::LauncherTestAPI(
Launcher::ForPrimaryDisplay()).SetShelfDelegate(delegate);
test_api_->SetShelfDelegate(delegate);
AddButtonsUntilOverflow();
TestDraggingAnItemFromOverflowToShelf(false);
TestDraggingAnItemFromOverflowToShelf(true);
}
class ShelfViewVisibleBoundsTest : public ShelfViewTest,
public testing::WithParamInterface<bool> {
public:
ShelfViewVisibleBoundsTest() : text_direction_change_(GetParam()) {}
void CheckAllItemsAreInBounds() {
gfx::Rect visible_bounds = shelf_view_->GetVisibleItemsBoundsInScreen();
gfx::Rect launcher_bounds = shelf_view_->GetBoundsInScreen();
EXPECT_TRUE(launcher_bounds.Contains(visible_bounds));
for (int i = 0; i < test_api_->GetButtonCount(); ++i)
if (internal::ShelfButton* button = test_api_->GetButton(i))
EXPECT_TRUE(visible_bounds.Contains(button->GetBoundsInScreen()));
CheckAppListButtonIsInBounds();
}
void CheckAppListButtonIsInBounds() {
gfx::Rect visible_bounds = shelf_view_->GetVisibleItemsBoundsInScreen();
gfx::Rect app_list_button_bounds = shelf_view_->GetAppListButtonView()->
GetBoundsInScreen();
EXPECT_TRUE(visible_bounds.Contains(app_list_button_bounds));
}
private:
ScopedTextDirectionChange text_direction_change_;
DISALLOW_COPY_AND_ASSIGN(ShelfViewVisibleBoundsTest);
};
TEST_P(ShelfViewVisibleBoundsTest, ItemsAreInBounds) {
// Adding elements leaving some empty space.
for (int i = 0; i < 3; i++) {
AddAppShortcut();
}
test_api_->RunMessageLoopUntilAnimationsDone();
EXPECT_FALSE(test_api_->IsOverflowButtonVisible());
CheckAllItemsAreInBounds();
// Same for overflow case.
while (!test_api_->IsOverflowButtonVisible()) {
AddAppShortcut();
}
test_api_->RunMessageLoopUntilAnimationsDone();
CheckAllItemsAreInBounds();
}
INSTANTIATE_TEST_CASE_P(LtrRtl, ShelfViewTextDirectionTest, testing::Bool());
INSTANTIATE_TEST_CASE_P(VisibleBounds, ShelfViewVisibleBoundsTest,
testing::Bool());
} // namespace test
} // namespace ash
| qtekfun/htcDesire820Kernel | external/chromium_org/ash/shelf/shelf_view_unittest.cc | C++ | gpl-2.0 | 66,772 |
<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* This file contains the backup structure for the lesson module
*
* This is the "graphical" structure of the lesson module:
*
* lesson ------------>---------------|-------------->-----------|------------->------------|
* (CL,pk->id) | | |
* | | | |
* | lesson_grades lesson_high_scores lesson_timer
* | (UL, pk->id,fk->lessonid) (UL, pk->id,fk->lessonid) (UL, pk->id,fk->lessonid)
* | |
* | |
* | |
* | |
* lesson_pages----------->---------lesson_branch
* (CL,pk->id,fk->lessonid) (UL, pk->id,fk->pageid)
* |
* |
* |
* lesson_answers
* (CL,pk->id,fk->pageid)
* |
* |
* |
* lesson_attempts
* (UL,pk->id,fk->answerid)
*
* Meaning: pk->primary key field of the table
* fk->foreign key to link with parent
* nt->nested field (recursive data)
* CL->course level info
* UL->user level info
* files->table may have files)
*
* @package mod_lesson
* @copyright 2010 Sam Hemelryk
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
/**
* Structure step class that informs a backup task how to backup the lesson module.
*
* @copyright 2010 Sam Hemelryk
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
class backup_lesson_activity_structure_step extends backup_activity_structure_step {
protected function define_structure() {
// The lesson table
// This table contains all of the goodness for the lesson module, quite
// alot goes into it but nothing relational other than course when will
// need to be corrected upon restore
$lesson = new backup_nested_element('lesson', array('id'), array(
'course','name','practice','modattempts','usepassword','password',
'dependency','conditions','grade','custom','ongoing','usemaxgrade',
'maxanswers','maxattempts','review','nextpagedefault','feedback',
'minquestions','maxpages','timed','maxtime','retake','activitylink',
'mediafile','mediaheight','mediawidth','mediaclose','slideshow',
'width','height','bgcolor','displayleft','displayleftif','progressbar',
'showhighscores','maxhighscores','available','deadline','timemodified'
));
// Tell the lesson element about the showhighscores elements mapping to the highscores
// database field.
$lesson->set_source_alias('highscores', 'showhighscores');
// The lesson_pages table
// Grouped within a `pages` element, important to note that page is relational
// to the lesson, and also to the previous/next page in the series.
// Upon restore prevpageid and nextpageid will need to be corrected.
$pages = new backup_nested_element('pages');
$page = new backup_nested_element('page', array('id'), array(
'prevpageid','nextpageid','qtype','qoption','layout',
'display','timecreated','timemodified','title','contents',
'contentsformat'
));
// The lesson_answers table
// Grouped within an answers `element`, the lesson_answers table relates
// to the page and lesson with `pageid` and `lessonid` that will both need
// to be corrected during restore.
$answers = new backup_nested_element('answers');
$answer = new backup_nested_element('answer', array('id'), array(
'jumpto','grade','score','flags','timecreated','timemodified','answer_text',
'response', 'answerformat', 'responseformat'
));
// Tell the answer element about the answer_text elements mapping to the answer
// database field.
$answer->set_source_alias('answer', 'answer_text');
// The lesson_attempts table
// Grouped by an `attempts` element this is relational to the page, lesson,
// and user.
$attempts = new backup_nested_element('attempts');
$attempt = new backup_nested_element('attempt', array('id'), array(
'userid','retry','correct','useranswer','timeseen'
));
// The lesson_branch table
// Grouped by a `branch` element this is relational to the page, lesson,
// and user.
$branches = new backup_nested_element('branches');
$branch = new backup_nested_element('branch', array('id'), array(
'userid','retry','flag','timeseen'
));
// The lesson_grades table
// Grouped by a grades element this is relational to the lesson and user.
$grades = new backup_nested_element('grades');
$grade = new backup_nested_element('grade', array('id'), array(
'userid','grade','late','completed'
));
// The lesson_high_scores table
// Grouped by a highscores element this is relational to the lesson, user,
// and possibly a grade.
$highscores = new backup_nested_element('highscores');
$highscore = new backup_nested_element('highscore', array('id'), array(
'gradeid','userid','nickname'
));
// The lesson_timer table
// Grouped by a `timers` element this is relational to the lesson and user.
$timers = new backup_nested_element('timers');
$timer = new backup_nested_element('timer', array('id'), array(
'userid','starttime','lessontime'
));
// Now that we have all of the elements created we've got to put them
// together correctly.
$lesson->add_child($pages);
$pages->add_child($page);
$page->add_child($answers);
$answers->add_child($answer);
$answer->add_child($attempts);
$attempts->add_child($attempt);
$page->add_child($branches);
$branches->add_child($branch);
$lesson->add_child($grades);
$grades->add_child($grade);
$lesson->add_child($highscores);
$highscores->add_child($highscore);
$lesson->add_child($timers);
$timers->add_child($timer);
// Set the source table for the elements that aren't reliant on the user
// at this point (lesson, lesson_pages, lesson_answers)
$lesson->set_source_table('lesson', array('id' => backup::VAR_ACTIVITYID));
//we use SQL here as it must be ordered by prevpageid so that restore gets the pages in the right order.
$page->set_source_table('lesson_pages', array('lessonid' => backup::VAR_PARENTID), 'prevpageid ASC');
// We use SQL here as answers must be ordered by id so that the restore gets them in the right order
$answer->set_source_table('lesson_answers', array('pageid' => backup::VAR_PARENTID), 'id ASC');
// Check if we are also backing up user information
if ($this->get_setting_value('userinfo')) {
// Set the source table for elements that are reliant on the user
// lesson_attempts, lesson_branch, lesson_grades, lesson_high_scores, lesson_timer
$attempt->set_source_table('lesson_attempts', array('answerid' => backup::VAR_PARENTID));
$branch->set_source_table('lesson_branch', array('pageid' => backup::VAR_PARENTID));
$grade->set_source_table('lesson_grades', array('lessonid'=>backup::VAR_PARENTID));
$highscore->set_source_table('lesson_high_scores', array('lessonid' => backup::VAR_PARENTID));
$timer->set_source_table('lesson_timer', array('lessonid' => backup::VAR_PARENTID));
}
// Annotate the user id's where required.
$attempt->annotate_ids('user', 'userid');
$branch->annotate_ids('user', 'userid');
$grade->annotate_ids('user', 'userid');
$highscore->annotate_ids('user', 'userid');
$timer->annotate_ids('user', 'userid');
// Annotate the file areas in user by the lesson module.
$lesson->annotate_files('mod_lesson', 'mediafile', null);
$page->annotate_files('mod_lesson', 'page_contents', 'id');
$answer->annotate_files('mod_lesson', 'page_answers', 'id');
$answer->annotate_files('mod_lesson', 'page_responses', 'id');
// Prepare and return the structure we have just created for the lesson module.
return $this->prepare_activity_structure($lesson);
}
}
| Quidd1tch/moodle | mod/lesson/backup/moodle2/backup_lesson_stepslib.php | PHP | gpl-3.0 | 9,660 |
/*
* Copyright 2015 Cloudius Systems
*/
/*
* This file is part of Scylla.
*
* Scylla is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Scylla is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Scylla. If not, see <http://www.gnu.org/licenses/>.
*/
#define BOOST_TEST_DYN_LINK
#include <boost/test/unit_test.hpp>
#include "tests/test-utils.hh"
#include "message/messaging_service.hh"
#include "gms/failure_detector.hh"
#include "gms/gossiper.hh"
#include "core/reactor.hh"
SEASTAR_TEST_CASE(test_boot_shutdown){
return net::get_messaging_service().start(gms::inet_address("127.0.0.1")).then( [] () {
return gms::get_failure_detector().start().then([] {
return gms::get_gossiper().start().then([] {
return gms::get_gossiper().stop().then( [] (){
return gms::get_failure_detector().stop().then( [] (){
return net::get_messaging_service().stop().then ( [] () {
return make_ready_future<>();
});
});
});
});
});
});
}
| stamhe/scylla | tests/gossip_test.cc | C++ | agpl-3.0 | 1,586 |
# merge_frontend.py
import sys
import io
import os
import pygit2
import collections
import typing
ENCODING = 'utf-8'
class MergeReturn(typing.NamedTuple):
success: bool
merge_result: typing.Optional[object]
class MergeDriver:
driver_id: typing.Optional[str] = None
def pre_announce(self, path: str):
"""
Called before merge() is called, with a human-friendly path for output.
"""
print(f"Merging {self.driver_id}: {path}")
def merge(self, base: typing.BinaryIO, left: typing.BinaryIO, right: typing.BinaryIO) -> MergeReturn:
"""
Read from three BinaryIOs: base (common ancestor), left (ours), and
right (theirs). Perform the actual three-way merge operation. Leave
conflict markers if necessary.
Return (False, None) to indicate the merge driver totally failed.
Return (False, merge_result) if the result contains conflict markers.
Return (True, merge_result) if everything went smoothly.
"""
raise NotImplementedError
def to_file(self, output: typing.BinaryIO, merge_result: object):
"""
Save the merge() result to the given output stream.
Override this if the merge() result is not bytes or str.
"""
if isinstance(merge_result, bytes):
output.write(merge_result)
elif isinstance(merge_result, str):
with io.TextIOWrapper(output, ENCODING) as f:
f.write(merge_result)
else:
raise NotImplementedError
def post_announce(self, success: bool, merge_result: object):
"""
Called after merge() is called, to warn the user if action is needed.
"""
if not success:
print("!!! Manual merge required")
if merge_result:
print(" A best-effort merge was performed. You must finish the job yourself.")
else:
print(" No merge was possible. You must resolve the conflict yourself.")
def main(self, args: typing.List[str] = None):
return _main(self, args or sys.argv[1:])
def _main(driver: MergeDriver, args: typing.List[str]):
if len(args) > 0 and args[0] == '--posthoc':
return _posthoc_main(driver, args[1:])
else:
return _driver_main(driver, args)
def _driver_main(driver: MergeDriver, args: typing.List[str]):
"""
Act like a normal Git merge driver, called by Git during a merge.
"""
if len(args) != 5:
print("merge driver called with wrong number of arguments")
print(" usage: %P %O %A %B %L")
return 1
path, path_base, path_left, path_right, _ = args
driver.pre_announce(path)
with open(path_base, 'rb') as io_base:
with open(path_left, 'rb') as io_left:
with open(path_right, 'rb') as io_right:
success, merge_result = driver.merge(io_base, io_left, io_right)
if merge_result:
# If we got anything, write it to the working directory.
with open(path_left, 'wb') as io_output:
driver.to_file(io_output, merge_result)
driver.post_announce(success, merge_result)
if not success:
# If we were not successful, do not mark the conflict as resolved.
return 1
def _posthoc_main(driver: MergeDriver, args: typing.List[str]):
"""
Apply merge driver logic to a repository which is already in a conflicted
state, running the driver on any conflicted files.
"""
repo_dir = pygit2.discover_repository(os.getcwd())
repo = pygit2.Repository(repo_dir)
conflicts = repo.index.conflicts
if not conflicts:
print("There are no unresolved conflicts.")
return 0
all_success = True
index_changed = False
any_attempted = False
for base, left, right in list(conflicts):
if not base or not left or not right:
# (not left) or (not right): deleted in one branch, modified in the other.
# (not base): added differently in both branches.
# In either case, there's nothing we can do for now.
continue
path = left.path
if not _applies_to(repo, driver, path):
# Skip the file if it's not the right extension.
continue
any_attempted = True
driver.pre_announce(path)
io_base = io.BytesIO(repo[base.id].data)
io_left = io.BytesIO(repo[left.id].data)
io_right = io.BytesIO(repo[right.id].data)
success, merge_result = driver.merge(io_base, io_left, io_right)
if merge_result:
# If we got anything, write it to the working directory.
with open(os.path.join(repo.workdir, path), 'wb') as io_output:
driver.to_file(io_output, merge_result)
if success:
# If we were successful, mark the conflict as resolved.
with open(os.path.join(repo.workdir, path), 'rb') as io_readback:
contents = io_readback.read()
merged_id = repo.create_blob(contents)
repo.index.add(pygit2.IndexEntry(path, merged_id, left.mode))
del conflicts[path]
index_changed = True
if not success:
all_success = False
driver.post_announce(success, merge_result)
if index_changed:
repo.index.write()
if not any_attempted:
print("There are no unresolved", driver.driver_id, "conflicts.")
if not all_success:
# Not usually observed, but indicate the failure just in case.
return 1
def _applies_to(repo: pygit2.Repository, driver: MergeDriver, path: str):
"""
Check if the current merge driver is a candidate to handle a given path.
"""
if not driver.driver_id:
raise ValueError('Driver must have ID to perform post-hoc merge')
return repo.get_attr(path, 'merge') == driver.driver_id
| erwgd/-tg-station | tools/hooks/merge_frontend.py | Python | agpl-3.0 | 5,937 |
// ========================================================================
// $Id: SessionManager.java,v 1.18 2005/03/15 10:03:58 gregwilkins Exp $
// Copyright 1996-2004 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ========================================================================
package org.openqa.jetty.jetty.servlet;
import java.io.Serializable;
import java.util.EventListener;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.openqa.jetty.util.LifeCycle;
/* --------------------------------------------------------------------- */
/** Session Manager.
* The API required to manage sessions for a servlet context.
*
* @version $Id: SessionManager.java,v 1.18 2005/03/15 10:03:58 gregwilkins Exp $
* @author Greg Wilkins
*/
public interface SessionManager extends LifeCycle, Serializable
{
/* ------------------------------------------------------------ */
/** Session cookie name.
* Defaults to JSESSIONID, but can be set with the
* org.openqa.jetty.jetty.servlet.SessionCookie system property.
*/
public final static String __SessionCookie=
System.getProperty("org.openqa.jetty.jetty.servlet.SessionCookie","JSESSIONID");
/* ------------------------------------------------------------ */
/** Session URL parameter name.
* Defaults to jsessionid, but can be set with the
* org.openqa.jetty.jetty.servlet.SessionURL system property.
*/
public final static String __SessionURL =
System.getProperty("org.openqa.jetty.jetty.servlet.SessionURL","jsessionid");
final static String __SessionUrlPrefix=";"+__SessionURL+"=";
/* ------------------------------------------------------------ */
/** Session Domain.
* If this property is set as a ServletContext InitParam, then it is
* used as the domain for session cookies. If it is not set, then
* no domain is specified for the session cookie.
*/
public final static String __SessionDomain=
"org.openqa.jetty.jetty.servlet.SessionDomain";
/* ------------------------------------------------------------ */
/** Session Path.
* If this property is set as a ServletContext InitParam, then it is
* used as the path for the session cookie. If it is not set, then
* the context path is used as the path for the cookie.
*/
public final static String __SessionPath=
"org.openqa.jetty.jetty.servlet.SessionPath";
/* ------------------------------------------------------------ */
/** Session Max Age.
* If this property is set as a ServletContext InitParam, then it is
* used as the max age for the session cookie. If it is not set, then
* a max age of -1 is used.
*/
public final static String __MaxAge=
"org.openqa.jetty.jetty.servlet.MaxAge";
/* ------------------------------------------------------------ */
public void initialize(ServletHandler handler);
/* ------------------------------------------------------------ */
public HttpSession getHttpSession(String id);
/* ------------------------------------------------------------ */
public HttpSession newHttpSession(HttpServletRequest request);
/* ------------------------------------------------------------ */
/** @return true if session cookies should be secure
*/
public boolean getSecureCookies();
/* ------------------------------------------------------------ */
/** @return true if session cookies should be httponly (microsoft extension)
*/
public boolean getHttpOnly();
/* ------------------------------------------------------------ */
public int getMaxInactiveInterval();
/* ------------------------------------------------------------ */
public void setMaxInactiveInterval(int seconds);
/* ------------------------------------------------------------ */
/** Add an event listener.
* @param listener An Event Listener. Individual SessionManagers
* implemetations may accept arbitrary listener types, but they
* are expected to at least handle
* HttpSessionActivationListener,
* HttpSessionAttributeListener,
* HttpSessionBindingListener,
* HttpSessionListener
* @exception IllegalArgumentException If an unsupported listener
* is passed.
*/
public void addEventListener(EventListener listener)
throws IllegalArgumentException;
/* ------------------------------------------------------------ */
public void removeEventListener(EventListener listener);
/* ------------------------------------------------------------ */
/** Get a Cookie for a session.
* @param session
* @return A Cookie object
*/
public Cookie getSessionCookie(HttpSession session,boolean requestIsSecure);
/* ------------------------------------------------------------ */
/* ------------------------------------------------------------ */
public interface Session extends HttpSession
{
/* ------------------------------------------------------------ */
public boolean isValid();
/* ------------------------------------------------------------ */
public void access();
}
}
| jknguyen/josephknguyen-selenium | java/server/src/org/openqa/jetty/jetty/servlet/SessionManager.java | Java | apache-2.0 | 5,960 |
// mksyscall.pl -tags linux,s390x syscall_linux.go syscall_linux_s390x.go
// Code generated by the command above; see README.md. DO NOT EDIT.
// +build linux,s390x
package unix
import (
"syscall"
"unsafe"
)
var _ syscall.Errno
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fchmodat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_FCHMODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ioctl(fd int, req uint, arg uintptr) (err error) {
_, _, e1 := Syscall(SYS_IOCTL, uintptr(fd), uintptr(req), uintptr(arg))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Linkat(olddirfd int, oldpath string, newdirfd int, newpath string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_LINKAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func openat(dirfd int, path string, flags int, mode uint32) (fd int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
r0, _, e1 := Syscall6(SYS_OPENAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags), uintptr(mode), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ppoll(fds *PollFd, nfds int, timeout *Timespec, sigmask *Sigset_t) (n int, err error) {
r0, _, e1 := Syscall6(SYS_PPOLL, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Readlinkat(dirfd int, path string, buf []byte) (n int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(buf) > 0 {
_p1 = unsafe.Pointer(&buf[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_READLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf)), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Symlinkat(oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_SYMLINKAT, uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unlinkat(dirfd int, path string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UNLINKAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimensat(dirfd int, path string, times *[2]Timespec, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_UTIMENSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getcwd(buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETCWD, uintptr(_p0), uintptr(len(buf)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func wait4(pid int, wstatus *_C_int, options int, rusage *Rusage) (wpid int, err error) {
r0, _, e1 := Syscall6(SYS_WAIT4, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options), uintptr(unsafe.Pointer(rusage)), 0, 0)
wpid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func KeyctlInt(cmd int, arg2 int, arg3 int, arg4 int, arg5 int) (ret int, err error) {
r0, _, e1 := Syscall6(SYS_KEYCTL, uintptr(cmd), uintptr(arg2), uintptr(arg3), uintptr(arg4), uintptr(arg5), 0)
ret = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func KeyctlBuffer(cmd int, arg2 int, buf []byte, arg5 int) (ret int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_KEYCTL, uintptr(cmd), uintptr(arg2), uintptr(_p0), uintptr(len(buf)), uintptr(arg5), 0)
ret = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func keyctlJoin(cmd int, arg2 string) (ret int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(arg2)
if err != nil {
return
}
r0, _, e1 := Syscall(SYS_KEYCTL, uintptr(cmd), uintptr(unsafe.Pointer(_p0)), 0)
ret = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func keyctlSearch(cmd int, arg2 int, arg3 string, arg4 string, arg5 int) (ret int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(arg3)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(arg4)
if err != nil {
return
}
r0, _, e1 := Syscall6(SYS_KEYCTL, uintptr(cmd), uintptr(arg2), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(arg5), 0)
ret = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func keyctlIOV(cmd int, arg2 int, payload []Iovec, arg5 int) (err error) {
var _p0 unsafe.Pointer
if len(payload) > 0 {
_p0 = unsafe.Pointer(&payload[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_KEYCTL, uintptr(cmd), uintptr(arg2), uintptr(_p0), uintptr(len(payload)), uintptr(arg5), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func keyctlDH(cmd int, arg2 *KeyctlDHParams, buf []byte) (ret int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_KEYCTL, uintptr(cmd), uintptr(unsafe.Pointer(arg2)), uintptr(_p0), uintptr(len(buf)), 0, 0)
ret = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ptrace(request int, pid int, addr uintptr, data uintptr) (err error) {
_, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func reboot(magic1 uint, magic2 uint, cmd int, arg string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(arg)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_REBOOT, uintptr(magic1), uintptr(magic2), uintptr(cmd), uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func mount(source string, target string, fstype string, flags uintptr, data *byte) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(source)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(target)
if err != nil {
return
}
var _p2 *byte
_p2, err = BytePtrFromString(fstype)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_MOUNT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(unsafe.Pointer(_p2)), uintptr(flags), uintptr(unsafe.Pointer(data)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Acct(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_ACCT, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func AddKey(keyType string, description string, payload []byte, ringid int) (id int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(keyType)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(description)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(payload) > 0 {
_p2 = unsafe.Pointer(&payload[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_ADD_KEY, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(payload)), uintptr(ringid), 0)
id = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Adjtimex(buf *Timex) (state int, err error) {
r0, _, e1 := Syscall(SYS_ADJTIMEX, uintptr(unsafe.Pointer(buf)), 0, 0)
state = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chdir(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Chroot(path string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_CHROOT, uintptr(unsafe.Pointer(_p0)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func ClockGettime(clockid int32, time *Timespec) (err error) {
_, _, e1 := Syscall(SYS_CLOCK_GETTIME, uintptr(clockid), uintptr(unsafe.Pointer(time)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Close(fd int) (err error) {
_, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func CopyFileRange(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int, err error) {
r0, _, e1 := Syscall6(SYS_COPY_FILE_RANGE, uintptr(rfd), uintptr(unsafe.Pointer(roff)), uintptr(wfd), uintptr(unsafe.Pointer(woff)), uintptr(len), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup(oldfd int) (fd int, err error) {
r0, _, e1 := Syscall(SYS_DUP, uintptr(oldfd), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup3(oldfd int, newfd int, flags int) (err error) {
_, _, e1 := Syscall(SYS_DUP3, uintptr(oldfd), uintptr(newfd), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCreate1(flag int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_EPOLL_CREATE1, uintptr(flag), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCtl(epfd int, op int, fd int, event *EpollEvent) (err error) {
_, _, e1 := RawSyscall6(SYS_EPOLL_CTL, uintptr(epfd), uintptr(op), uintptr(fd), uintptr(unsafe.Pointer(event)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Eventfd(initval uint, flags int) (fd int, err error) {
r0, _, e1 := Syscall(SYS_EVENTFD2, uintptr(initval), uintptr(flags), 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Exit(code int) {
SyscallNoError(SYS_EXIT_GROUP, uintptr(code), 0, 0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fallocate(fd int, mode uint32, off int64, len int64) (err error) {
_, _, e1 := Syscall6(SYS_FALLOCATE, uintptr(fd), uintptr(mode), uintptr(off), uintptr(len), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchdir(fd int) (err error) {
_, _, e1 := Syscall(SYS_FCHDIR, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchmod(fd int, mode uint32) (err error) {
_, _, e1 := Syscall(SYS_FCHMOD, uintptr(fd), uintptr(mode), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchownat(dirfd int, path string, uid int, gid int, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_FCHOWNAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func fcntl(fd int, cmd int, arg int) (val int, err error) {
r0, _, e1 := Syscall(SYS_FCNTL, uintptr(fd), uintptr(cmd), uintptr(arg))
val = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fdatasync(fd int) (err error) {
_, _, e1 := Syscall(SYS_FDATASYNC, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fgetxattr(fd int, attr string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(dest) > 0 {
_p1 = unsafe.Pointer(&dest[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_FGETXATTR, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest)), 0, 0)
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Flistxattr(fd int, dest []byte) (sz int, err error) {
var _p0 unsafe.Pointer
if len(dest) > 0 {
_p0 = unsafe.Pointer(&dest[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_FLISTXATTR, uintptr(fd), uintptr(_p0), uintptr(len(dest)))
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Flock(fd int, how int) (err error) {
_, _, e1 := Syscall(SYS_FLOCK, uintptr(fd), uintptr(how), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fremovexattr(fd int, attr string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(attr)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_FREMOVEXATTR, uintptr(fd), uintptr(unsafe.Pointer(_p0)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fsetxattr(fd int, attr string, dest []byte, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(dest) > 0 {
_p1 = unsafe.Pointer(&dest[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_FSETXATTR, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fsync(fd int) (err error) {
_, _, e1 := Syscall(SYS_FSYNC, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getdents(fd int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETDENTS64, uintptr(fd), uintptr(_p0), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpgid(pid int) (pgid int, err error) {
r0, _, e1 := RawSyscall(SYS_GETPGID, uintptr(pid), 0, 0)
pgid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpid() (pid int) {
r0, _ := RawSyscallNoError(SYS_GETPID, 0, 0, 0)
pid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getppid() (ppid int) {
r0, _ := RawSyscallNoError(SYS_GETPPID, 0, 0, 0)
ppid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getpriority(which int, who int) (prio int, err error) {
r0, _, e1 := Syscall(SYS_GETPRIORITY, uintptr(which), uintptr(who), 0)
prio = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrandom(buf []byte, flags int) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_GETRANDOM, uintptr(_p0), uintptr(len(buf)), uintptr(flags))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrusage(who int, rusage *Rusage) (err error) {
_, _, e1 := RawSyscall(SYS_GETRUSAGE, uintptr(who), uintptr(unsafe.Pointer(rusage)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getsid(pid int) (sid int, err error) {
r0, _, e1 := RawSyscall(SYS_GETSID, uintptr(pid), 0, 0)
sid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Gettid() (tid int) {
r0, _ := RawSyscallNoError(SYS_GETTID, 0, 0, 0)
tid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getxattr(path string, attr string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(dest) > 0 {
_p2 = unsafe.Pointer(&dest[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_GETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(dest)), 0, 0)
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyAddWatch(fd int, pathname string, mask uint32) (watchdesc int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(pathname)
if err != nil {
return
}
r0, _, e1 := Syscall(SYS_INOTIFY_ADD_WATCH, uintptr(fd), uintptr(unsafe.Pointer(_p0)), uintptr(mask))
watchdesc = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyInit1(flags int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_INIT1, uintptr(flags), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyRmWatch(fd int, watchdesc uint32) (success int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_RM_WATCH, uintptr(fd), uintptr(watchdesc), 0)
success = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Kill(pid int, sig syscall.Signal) (err error) {
_, _, e1 := RawSyscall(SYS_KILL, uintptr(pid), uintptr(sig), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Klogctl(typ int, buf []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(buf) > 0 {
_p0 = unsafe.Pointer(&buf[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_SYSLOG, uintptr(typ), uintptr(_p0), uintptr(len(buf)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lgetxattr(path string, attr string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(dest) > 0 {
_p2 = unsafe.Pointer(&dest[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_LGETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(dest)), 0, 0)
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Listxattr(path string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(dest) > 0 {
_p1 = unsafe.Pointer(&dest[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_LISTXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest)))
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Llistxattr(path string, dest []byte) (sz int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 unsafe.Pointer
if len(dest) > 0 {
_p1 = unsafe.Pointer(&dest[0])
} else {
_p1 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_LLISTXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(dest)))
sz = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lremovexattr(path string, attr string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LREMOVEXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lsetxattr(path string, attr string, data []byte, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(data) > 0 {
_p2 = unsafe.Pointer(&data[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_LSETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(data)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func MemfdCreate(name string, flags int) (fd int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(name)
if err != nil {
return
}
r0, _, e1 := Syscall(SYS_MEMFD_CREATE, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mkdirat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_MKDIRAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mknodat(dirfd int, path string, mode uint32, dev int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_MKNODAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Nanosleep(time *Timespec, leftover *Timespec) (err error) {
_, _, e1 := Syscall(SYS_NANOSLEEP, uintptr(unsafe.Pointer(time)), uintptr(unsafe.Pointer(leftover)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func PerfEventOpen(attr *PerfEventAttr, pid int, cpu int, groupFd int, flags int) (fd int, err error) {
r0, _, e1 := Syscall6(SYS_PERF_EVENT_OPEN, uintptr(unsafe.Pointer(attr)), uintptr(pid), uintptr(cpu), uintptr(groupFd), uintptr(flags), 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func PivotRoot(newroot string, putold string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(newroot)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(putold)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_PIVOT_ROOT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func prlimit(pid int, resource int, newlimit *Rlimit, old *Rlimit) (err error) {
_, _, e1 := RawSyscall6(SYS_PRLIMIT64, uintptr(pid), uintptr(resource), uintptr(unsafe.Pointer(newlimit)), uintptr(unsafe.Pointer(old)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Prctl(option int, arg2 uintptr, arg3 uintptr, arg4 uintptr, arg5 uintptr) (err error) {
_, _, e1 := Syscall6(SYS_PRCTL, uintptr(option), uintptr(arg2), uintptr(arg3), uintptr(arg4), uintptr(arg5), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pselect(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timespec, sigmask *Sigset_t) (n int, err error) {
r0, _, e1 := Syscall6(SYS_PSELECT6, uintptr(nfd), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), uintptr(unsafe.Pointer(sigmask)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func read(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Removexattr(path string, attr string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_REMOVEXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Renameat(olddirfd int, oldpath string, newdirfd int, newpath string) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_RENAMEAT, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Renameat2(olddirfd int, oldpath string, newdirfd int, newpath string, flags uint) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(oldpath)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(newpath)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_RENAMEAT2, uintptr(olddirfd), uintptr(unsafe.Pointer(_p0)), uintptr(newdirfd), uintptr(unsafe.Pointer(_p1)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func RequestKey(keyType string, description string, callback string, destRingid int) (id int, err error) {
var _p0 *byte
_p0, err = BytePtrFromString(keyType)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(description)
if err != nil {
return
}
var _p2 *byte
_p2, err = BytePtrFromString(callback)
if err != nil {
return
}
r0, _, e1 := Syscall6(SYS_REQUEST_KEY, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(unsafe.Pointer(_p2)), uintptr(destRingid), 0, 0)
id = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setdomainname(p []byte) (err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_SETDOMAINNAME, uintptr(_p0), uintptr(len(p)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sethostname(p []byte) (err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_SETHOSTNAME, uintptr(_p0), uintptr(len(p)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpgid(pid int, pgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETPGID, uintptr(pid), uintptr(pgid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setsid() (pid int, err error) {
r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0)
pid = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Settimeofday(tv *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_SETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setns(fd int, nstype int) (err error) {
_, _, e1 := Syscall(SYS_SETNS, uintptr(fd), uintptr(nstype), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setpriority(which int, who int, prio int) (err error) {
_, _, e1 := Syscall(SYS_SETPRIORITY, uintptr(which), uintptr(who), uintptr(prio))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setxattr(path string, attr string, data []byte, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
var _p1 *byte
_p1, err = BytePtrFromString(attr)
if err != nil {
return
}
var _p2 unsafe.Pointer
if len(data) > 0 {
_p2 = unsafe.Pointer(&data[0])
} else {
_p2 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall6(SYS_SETXATTR, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(_p2), uintptr(len(data)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Statx(dirfd int, path string, flags int, mask int, stat *Statx_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_STATX, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(flags), uintptr(mask), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sync() {
SyscallNoError(SYS_SYNC, 0, 0, 0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Syncfs(fd int) (err error) {
_, _, e1 := Syscall(SYS_SYNCFS, uintptr(fd), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Sysinfo(info *Sysinfo_t) (err error) {
_, _, e1 := RawSyscall(SYS_SYSINFO, uintptr(unsafe.Pointer(info)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Tee(rfd int, wfd int, len int, flags int) (n int64, err error) {
r0, _, e1 := Syscall6(SYS_TEE, uintptr(rfd), uintptr(wfd), uintptr(len), uintptr(flags), 0, 0)
n = int64(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Tgkill(tgid int, tid int, sig syscall.Signal) (err error) {
_, _, e1 := RawSyscall(SYS_TGKILL, uintptr(tgid), uintptr(tid), uintptr(sig))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Times(tms *Tms) (ticks uintptr, err error) {
r0, _, e1 := RawSyscall(SYS_TIMES, uintptr(unsafe.Pointer(tms)), 0, 0)
ticks = uintptr(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Umask(mask int) (oldmask int) {
r0, _ := RawSyscallNoError(SYS_UMASK, uintptr(mask), 0, 0)
oldmask = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Uname(buf *Utsname) (err error) {
_, _, e1 := RawSyscall(SYS_UNAME, uintptr(unsafe.Pointer(buf)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unmount(target string, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(target)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UMOUNT2, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Unshare(flags int) (err error) {
_, _, e1 := Syscall(SYS_UNSHARE, uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func write(fd int, p []byte) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func exitThread(code int) (err error) {
_, _, e1 := Syscall(SYS_EXIT, uintptr(code), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func readlen(fd int, p *byte, np int) (n int, err error) {
r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(unsafe.Pointer(p)), uintptr(np))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func writelen(fd int, p *byte, np int) (n int, err error) {
r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(unsafe.Pointer(p)), uintptr(np))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func munmap(addr uintptr, length uintptr) (err error) {
_, _, e1 := Syscall(SYS_MUNMAP, uintptr(addr), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Madvise(b []byte, advice int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MADVISE, uintptr(_p0), uintptr(len(b)), uintptr(advice))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mprotect(b []byte, prot int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MPROTECT, uintptr(_p0), uintptr(len(b)), uintptr(prot))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Mlockall(flags int) (err error) {
_, _, e1 := Syscall(SYS_MLOCKALL, uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Msync(b []byte, flags int) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MSYNC, uintptr(_p0), uintptr(len(b)), uintptr(flags))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlock(b []byte) (err error) {
var _p0 unsafe.Pointer
if len(b) > 0 {
_p0 = unsafe.Pointer(&b[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
_, _, e1 := Syscall(SYS_MUNLOCK, uintptr(_p0), uintptr(len(b)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Munlockall() (err error) {
_, _, e1 := Syscall(SYS_MUNLOCKALL, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func faccessat(dirfd int, path string, mode uint32) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_FACCESSAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(mode))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Dup2(oldfd int, newfd int) (err error) {
_, _, e1 := Syscall(SYS_DUP2, uintptr(oldfd), uintptr(newfd), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollCreate(size int) (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_EPOLL_CREATE, uintptr(size), 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func EpollWait(epfd int, events []EpollEvent, msec int) (n int, err error) {
var _p0 unsafe.Pointer
if len(events) > 0 {
_p0 = unsafe.Pointer(&events[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_EPOLL_WAIT, uintptr(epfd), uintptr(_p0), uintptr(len(events)), uintptr(msec), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fadvise(fd int, offset int64, length int64, advice int) (err error) {
_, _, e1 := Syscall6(SYS_FADVISE64, uintptr(fd), uintptr(offset), uintptr(length), uintptr(advice), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fchown(fd int, uid int, gid int) (err error) {
_, _, e1 := Syscall(SYS_FCHOWN, uintptr(fd), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstat(fd int, stat *Stat_t) (err error) {
_, _, e1 := Syscall(SYS_FSTAT, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstatat(dirfd int, path string, stat *Stat_t, flags int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall6(SYS_NEWFSTATAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Fstatfs(fd int, buf *Statfs_t) (err error) {
_, _, e1 := Syscall(SYS_FSTATFS, uintptr(fd), uintptr(unsafe.Pointer(buf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ftruncate(fd int, length int64) (err error) {
_, _, e1 := Syscall(SYS_FTRUNCATE, uintptr(fd), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getegid() (egid int) {
r0, _ := RawSyscallNoError(SYS_GETEGID, 0, 0, 0)
egid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Geteuid() (euid int) {
r0, _ := RawSyscallNoError(SYS_GETEUID, 0, 0, 0)
euid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getgid() (gid int) {
r0, _ := RawSyscallNoError(SYS_GETGID, 0, 0, 0)
gid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getrlimit(resource int, rlim *Rlimit) (err error) {
_, _, e1 := RawSyscall(SYS_GETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Getuid() (uid int) {
r0, _ := RawSyscallNoError(SYS_GETUID, 0, 0, 0)
uid = int(r0)
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func InotifyInit() (fd int, err error) {
r0, _, e1 := RawSyscall(SYS_INOTIFY_INIT, 0, 0, 0)
fd = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lchown(path string, uid int, gid int) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LCHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Lstat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_LSTAT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pause() (err error) {
_, _, e1 := Syscall(SYS_PAUSE, 0, 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pread(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PREAD64, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Pwrite(fd int, p []byte, offset int64) (n int, err error) {
var _p0 unsafe.Pointer
if len(p) > 0 {
_p0 = unsafe.Pointer(&p[0])
} else {
_p0 = unsafe.Pointer(&_zero)
}
r0, _, e1 := Syscall6(SYS_PWRITE64, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), 0, 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Seek(fd int, offset int64, whence int) (off int64, err error) {
r0, _, e1 := Syscall(SYS_LSEEK, uintptr(fd), uintptr(offset), uintptr(whence))
off = int64(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Select(nfd int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (n int, err error) {
r0, _, e1 := Syscall6(SYS_SELECT, uintptr(nfd), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), 0)
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func sendfile(outfd int, infd int, offset *int64, count int) (written int, err error) {
r0, _, e1 := Syscall6(SYS_SENDFILE, uintptr(outfd), uintptr(infd), uintptr(unsafe.Pointer(offset)), uintptr(count), 0, 0)
written = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setfsgid(gid int) (err error) {
_, _, e1 := Syscall(SYS_SETFSGID, uintptr(gid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setfsuid(uid int) (err error) {
_, _, e1 := Syscall(SYS_SETFSUID, uintptr(uid), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setregid(rgid int, egid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREGID, uintptr(rgid), uintptr(egid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresgid(rgid int, egid int, sgid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESGID, uintptr(rgid), uintptr(egid), uintptr(sgid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setresuid(ruid int, euid int, suid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETRESUID, uintptr(ruid), uintptr(euid), uintptr(suid))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setrlimit(resource int, rlim *Rlimit) (err error) {
_, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(resource), uintptr(unsafe.Pointer(rlim)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Setreuid(ruid int, euid int) (err error) {
_, _, e1 := RawSyscall(SYS_SETREUID, uintptr(ruid), uintptr(euid), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Splice(rfd int, roff *int64, wfd int, woff *int64, len int, flags int) (n int64, err error) {
r0, _, e1 := Syscall6(SYS_SPLICE, uintptr(rfd), uintptr(unsafe.Pointer(roff)), uintptr(wfd), uintptr(unsafe.Pointer(woff)), uintptr(len), uintptr(flags))
n = int64(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Stat(path string, stat *Stat_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STAT, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Statfs(path string, buf *Statfs_t) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_STATFS, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func SyncFileRange(fd int, off int64, n int64, flags int) (err error) {
_, _, e1 := Syscall6(SYS_SYNC_FILE_RANGE, uintptr(fd), uintptr(off), uintptr(n), uintptr(flags), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Truncate(path string, length int64) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_TRUNCATE, uintptr(unsafe.Pointer(_p0)), uintptr(length), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Ustat(dev int, ubuf *Ustat_t) (err error) {
_, _, e1 := Syscall(SYS_USTAT, uintptr(dev), uintptr(unsafe.Pointer(ubuf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func getgroups(n int, list *_Gid_t) (nn int, err error) {
r0, _, e1 := RawSyscall(SYS_GETGROUPS, uintptr(n), uintptr(unsafe.Pointer(list)), 0)
nn = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func setgroups(n int, list *_Gid_t) (err error) {
_, _, e1 := RawSyscall(SYS_SETGROUPS, uintptr(n), uintptr(unsafe.Pointer(list)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func futimesat(dirfd int, path string, times *[2]Timeval) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_FUTIMESAT, uintptr(dirfd), uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)))
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Gettimeofday(tv *Timeval) (err error) {
_, _, e1 := RawSyscall(SYS_GETTIMEOFDAY, uintptr(unsafe.Pointer(tv)), 0, 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func Utime(path string, buf *Utimbuf) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIME, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(buf)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func utimes(path string, times *[2]Timeval) (err error) {
var _p0 *byte
_p0, err = BytePtrFromString(path)
if err != nil {
return
}
_, _, e1 := Syscall(SYS_UTIMES, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(times)), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func pipe2(p *[2]_C_int, flags int) (err error) {
_, _, e1 := RawSyscall(SYS_PIPE2, uintptr(unsafe.Pointer(p)), uintptr(flags), 0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
func poll(fds *PollFd, nfds int, timeout int) (n int, err error) {
r0, _, e1 := Syscall(SYS_POLL, uintptr(unsafe.Pointer(fds)), uintptr(nfds), uintptr(timeout))
n = int(r0)
if e1 != 0 {
err = errnoErr(e1)
}
return
}
| laijs/moby | vendor/golang.org/x/sys/unix/zsyscall_linux_s390x.go | GO | apache-2.0 | 50,840 |
/**
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.bitcoin.core;
import com.google.bitcoin.store.BlockStore;
import com.google.bitcoin.store.BlockStoreException;
import com.google.bitcoin.store.FullPrunedBlockStore;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.BufferedInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.security.DigestInputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Map;
import java.util.TreeMap;
import static com.google.common.base.Preconditions.*;
/**
* <p>Vends hard-coded {@link StoredBlock}s for blocks throughout the chain. Checkpoints serve two purposes:</p>
* <ol>
* <li>They act as a safety mechanism against huge re-orgs that could rewrite large chunks of history, thus
* constraining the block chain to be a consensus mechanism only for recent parts of the timeline.</li>
* <li>They allow synchronization to the head of the chain for new wallets/users much faster than syncing all
* headers from the genesis block.</li>
* </ol>
*
* <p>Checkpoints are used by the SPV {@link BlockChain} to initialize fresh
* {@link com.google.bitcoin.store.SPVBlockStore}s. They are not used by fully validating mode, which instead has a
* different concept of checkpoints that are used to hard-code the validity of blocks that violate BIP30 (duplicate
* coinbase transactions). Those "checkpoints" can be found in NetworkParameters.</p>
*
* <p>The file format consists of the string "CHECKPOINTS 1", followed by a uint32 containing the number of signatures
* to read. The value may not be larger than 256 (so it could have been a byte but isn't for historical reasons).
* If the number of signatures is larger than zero, each 65 byte ECDSA secp256k1 signature then follows. The signatures
* sign the hash of all bytes that follow the last signature.</p>
*
* <p>After the signatures come an int32 containing the number of checkpoints in the file. Then each checkpoint follows
* one after the other. A checkpoint is 12 bytes for the total work done field, 4 bytes for the height, 80 bytes
* for the block header and then 1 zero byte at the end (i.e. number of transactions in the block: always zero).</p>
*/
public class CheckpointManager {
private static final Logger log = LoggerFactory.getLogger(CheckpointManager.class);
private static final int MAX_SIGNATURES = 256;
// Map of block header time to data.
protected final TreeMap<Long, StoredBlock> checkpoints = new TreeMap<Long, StoredBlock>();
protected final NetworkParameters params;
protected final Sha256Hash dataHash;
public CheckpointManager(NetworkParameters params, InputStream inputStream) throws IOException {
this.params = checkNotNull(params);
checkNotNull(inputStream);
DataInputStream dis = null;
try {
MessageDigest digest = MessageDigest.getInstance("SHA-256");
DigestInputStream digestInputStream = new DigestInputStream(inputStream, digest);
dis = new DataInputStream(digestInputStream);
digestInputStream.on(false);
String magic = "CHECKPOINTS 1";
byte[] header = new byte[magic.length()];
dis.readFully(header);
if (!Arrays.equals(header, magic.getBytes("US-ASCII")))
throw new IOException("Header bytes did not match expected version");
int numSignatures = checkPositionIndex(dis.readInt(), MAX_SIGNATURES, "Num signatures out of range");
for (int i = 0; i < numSignatures; i++) {
byte[] sig = new byte[65];
dis.readFully(sig);
// TODO: Do something with the signature here.
}
digestInputStream.on(true);
int numCheckpoints = dis.readInt();
checkState(numCheckpoints > 0);
final int size = StoredBlock.COMPACT_SERIALIZED_SIZE;
ByteBuffer buffer = ByteBuffer.allocate(size);
for (int i = 0; i < numCheckpoints; i++) {
if (dis.read(buffer.array(), 0, size) < size)
throw new IOException("Incomplete read whilst loading checkpoints.");
StoredBlock block = StoredBlock.deserializeCompact(params, buffer);
buffer.position(0);
checkpoints.put(block.getHeader().getTimeSeconds(), block);
}
dataHash = new Sha256Hash(digest.digest());
log.info("Read {} checkpoints, hash is {}", checkpoints.size(), dataHash);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e); // Cannot happen.
} catch (ProtocolException e) {
throw new IOException(e);
} finally {
if (dis != null) dis.close();
inputStream.close();
}
}
/**
* Returns a {@link StoredBlock} representing the last checkpoint before the given time, for example, normally
* you would want to know the checkpoint before the earliest wallet birthday.
*/
public StoredBlock getCheckpointBefore(long time) {
try {
checkArgument(time > params.getGenesisBlock().getTimeSeconds());
// This is thread safe because the map never changes after creation.
Map.Entry<Long, StoredBlock> entry = checkpoints.floorEntry(time);
if (entry != null) return entry.getValue();
Block genesis = params.getGenesisBlock().cloneAsHeader();
return new StoredBlock(genesis, genesis.getWork(), 0);
} catch (VerificationException e) {
throw new RuntimeException(e); // Cannot happen.
}
}
/** Returns the number of checkpoints that were loaded. */
public int numCheckpoints() {
return checkpoints.size();
}
/** Returns a hash of the concatenated checkpoint data. */
public Sha256Hash getDataHash() {
return dataHash;
}
/**
* <p>Convenience method that creates a CheckpointManager, loads the given data, gets the checkpoint for the given
* time, then inserts it into the store and sets that to be the chain head. Useful when you have just created
* a new store from scratch and want to use configure it all in one go.</p>
*
* <p>Note that time is adjusted backwards by a week to account for possible clock drift in the block headers.</p>
*/
public static void checkpoint(NetworkParameters params, InputStream checkpoints, BlockStore store, long time)
throws IOException, BlockStoreException {
checkNotNull(params);
checkNotNull(store);
checkArgument(!(store instanceof FullPrunedBlockStore), "You cannot use checkpointing with a full store.");
time -= 86400 * 7;
BufferedInputStream stream = new BufferedInputStream(checkpoints);
CheckpointManager manager = new CheckpointManager(params, stream);
StoredBlock checkpoint = manager.getCheckpointBefore(time);
store.put(checkpoint);
store.setChainHead(checkpoint);
}
}
| hardbitcn/HardbitSafetyCheck | src/com/google/bitcoin/core/CheckpointManager.java | Java | apache-2.0 | 7,782 |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package sort provides primitives for sorting slices and user-defined
// collections.
package sort
import "math"
// A type, typically a collection, that satisfies sort.Interface can be
// sorted by the routines in this package. The methods require that the
// elements of the collection be enumerated by an integer index.
type Interface interface {
// Len is the number of elements in the collection.
Len() int
// Less returns whether the element with index i should sort
// before the element with index j.
Less(i, j int) bool
// Swap swaps the elements with indexes i and j.
Swap(i, j int)
}
func min(a, b int) int {
if a < b {
return a
}
return b
}
// Insertion sort
func insertionSort(data Interface, a, b int) {
for i := a + 1; i < b; i++ {
for j := i; j > a && data.Less(j, j-1); j-- {
data.Swap(j, j-1)
}
}
}
// siftDown implements the heap property on data[lo, hi).
// first is an offset into the array where the root of the heap lies.
func siftDown(data Interface, lo, hi, first int) {
root := lo
for {
child := 2*root + 1
if child >= hi {
break
}
if child+1 < hi && data.Less(first+child, first+child+1) {
child++
}
if !data.Less(first+root, first+child) {
return
}
data.Swap(first+root, first+child)
root = child
}
}
func heapSort(data Interface, a, b int) {
first := a
lo := 0
hi := b - a
// Build heap with greatest element at top.
for i := (hi - 1) / 2; i >= 0; i-- {
siftDown(data, i, hi, first)
}
// Pop elements, largest first, into end of data.
for i := hi - 1; i >= 0; i-- {
data.Swap(first, first+i)
siftDown(data, lo, i, first)
}
}
// Quicksort, following Bentley and McIlroy,
// ``Engineering a Sort Function,'' SP&E November 1993.
// medianOfThree moves the median of the three values data[a], data[b], data[c] into data[a].
func medianOfThree(data Interface, a, b, c int) {
m0 := b
m1 := a
m2 := c
// bubble sort on 3 elements
if data.Less(m1, m0) {
data.Swap(m1, m0)
}
if data.Less(m2, m1) {
data.Swap(m2, m1)
}
if data.Less(m1, m0) {
data.Swap(m1, m0)
}
// now data[m0] <= data[m1] <= data[m2]
}
func swapRange(data Interface, a, b, n int) {
for i := 0; i < n; i++ {
data.Swap(a+i, b+i)
}
}
func doPivot(data Interface, lo, hi int) (midlo, midhi int) {
m := lo + (hi-lo)/2 // Written like this to avoid integer overflow.
if hi-lo > 40 {
// Tukey's ``Ninther,'' median of three medians of three.
s := (hi - lo) / 8
medianOfThree(data, lo, lo+s, lo+2*s)
medianOfThree(data, m, m-s, m+s)
medianOfThree(data, hi-1, hi-1-s, hi-1-2*s)
}
medianOfThree(data, lo, m, hi-1)
// Invariants are:
// data[lo] = pivot (set up by ChoosePivot)
// data[lo <= i < a] = pivot
// data[a <= i < b] < pivot
// data[b <= i < c] is unexamined
// data[c <= i < d] > pivot
// data[d <= i < hi] = pivot
//
// Once b meets c, can swap the "= pivot" sections
// into the middle of the slice.
pivot := lo
a, b, c, d := lo+1, lo+1, hi, hi
for b < c {
if data.Less(b, pivot) { // data[b] < pivot
b++
continue
}
if !data.Less(pivot, b) { // data[b] = pivot
data.Swap(a, b)
a++
b++
continue
}
if data.Less(pivot, c-1) { // data[c-1] > pivot
c--
continue
}
if !data.Less(c-1, pivot) { // data[c-1] = pivot
data.Swap(c-1, d-1)
c--
d--
continue
}
// data[b] > pivot; data[c-1] < pivot
data.Swap(b, c-1)
b++
c--
}
n := min(b-a, a-lo)
swapRange(data, lo, b-n, n)
n = min(hi-d, d-c)
swapRange(data, c, hi-n, n)
return lo + b - a, hi - (d - c)
}
func quickSort(data Interface, a, b, maxDepth int) {
for b-a > 7 {
if maxDepth == 0 {
heapSort(data, a, b)
return
}
maxDepth--
mlo, mhi := doPivot(data, a, b)
// Avoiding recursion on the larger subproblem guarantees
// a stack depth of at most lg(b-a).
if mlo-a < b-mhi {
quickSort(data, a, mlo, maxDepth)
a = mhi // i.e., quickSort(data, mhi, b)
} else {
quickSort(data, mhi, b, maxDepth)
b = mlo // i.e., quickSort(data, a, mlo)
}
}
if b-a > 1 {
insertionSort(data, a, b)
}
}
// Sort sorts data.
// It makes one call to data.Len to determine n, and O(n*log(n)) calls to
// data.Less and data.Swap. The sort is not guaranteed to be stable.
func Sort(data Interface) {
// Switch to heapsort if depth of 2*ceil(lg(n+1)) is reached.
n := data.Len()
maxDepth := 0
for i := n; i > 0; i >>= 1 {
maxDepth++
}
maxDepth *= 2
quickSort(data, 0, n, maxDepth)
}
// IsSorted reports whether data is sorted.
func IsSorted(data Interface) bool {
n := data.Len()
for i := n - 1; i > 0; i-- {
if data.Less(i, i-1) {
return false
}
}
return true
}
// Convenience types for common cases
// IntSlice attaches the methods of Interface to []int, sorting in increasing order.
type IntSlice []int
func (p IntSlice) Len() int { return len(p) }
func (p IntSlice) Less(i, j int) bool { return p[i] < p[j] }
func (p IntSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// Sort is a convenience method.
func (p IntSlice) Sort() { Sort(p) }
// Float64Slice attaches the methods of Interface to []float64, sorting in increasing order.
type Float64Slice []float64
func (p Float64Slice) Len() int { return len(p) }
func (p Float64Slice) Less(i, j int) bool { return p[i] < p[j] || math.IsNaN(p[i]) && !math.IsNaN(p[j]) }
func (p Float64Slice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// Sort is a convenience method.
func (p Float64Slice) Sort() { Sort(p) }
// StringSlice attaches the methods of Interface to []string, sorting in increasing order.
type StringSlice []string
func (p StringSlice) Len() int { return len(p) }
func (p StringSlice) Less(i, j int) bool { return p[i] < p[j] }
func (p StringSlice) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
// Sort is a convenience method.
func (p StringSlice) Sort() { Sort(p) }
// Convenience wrappers for common cases
// Ints sorts a slice of ints in increasing order.
func Ints(a []int) { Sort(IntSlice(a)) }
// Float64s sorts a slice of float64s in increasing order.
func Float64s(a []float64) { Sort(Float64Slice(a)) }
// Strings sorts a slice of strings in increasing order.
func Strings(a []string) { Sort(StringSlice(a)) }
// IntsAreSorted tests whether a slice of ints is sorted in increasing order.
func IntsAreSorted(a []int) bool { return IsSorted(IntSlice(a)) }
// Float64sAreSorted tests whether a slice of float64s is sorted in increasing order.
func Float64sAreSorted(a []float64) bool { return IsSorted(Float64Slice(a)) }
// StringsAreSorted tests whether a slice of strings is sorted in increasing order.
func StringsAreSorted(a []string) bool { return IsSorted(StringSlice(a)) }
| rflanagan/reginaldflanagan-project1 | src/pkg/sort/sort.go | GO | bsd-3-clause | 6,831 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Drawing.Printing
{
public abstract partial class PrintController
{
protected PrintController()
{
}
public virtual bool IsPreview => false;
/// <summary>
/// When overridden in a derived class, begins the control sequence of when and how to print a page in a document.
/// </summary>
public virtual Graphics OnStartPage(PrintDocument document, PrintPageEventArgs e)
{
return null;
}
/// <summary>
/// When overridden in a derived class, completes the control sequence of when and how to print a page in a document.
/// </summary>
public virtual void OnEndPage(PrintDocument document, PrintPageEventArgs e)
{
}
}
}
| shimingsg/corefx | src/System.Drawing.Common/src/System/Drawing/Printing/PrintController.cs | C# | mit | 989 |
var test = require("testling")
, sinon = require("sinon")
, some = require("../../").someSync
, createItem = require("..").createItem
test("some calls each iterator", function (t) {
var item = createItem()
, iterator = sinon.spy(function (v) {
if (v === "b1") {
return v
}
})
var result = some(item, iterator)
t.ok(iterator.calledTwice, "iterator is not called two times")
t.deepEqual(iterator.args[0], ["a1", "a", item],
"iterator called with wrong arguments")
t.deepEqual(iterator.args[1], ["b1", "b", item],
"iterator called with wrong arguments")
t.equal(result, "b1", "result is incorrect")
t.end()
})
test("some returns false if all fail", function (t) {
var item = createItem()
, iterator = sinon.spy(function (v) {
return null
})
var result = some(item, iterator)
t.ok(iterator.calledThrice, "iterator was not called three times")
t.equal(result, null, "result is not false")
t.end()
})
test("some calls iterator with correct this value", function (t) {
var item = createItem()
, iterator = sinon.spy()
, thisValue = {}
some(item, iterator, thisValue)
t.ok(iterator.calledOn(thisValue), "this value is incorrect")
t.end()
}) | xinyzhang9/xinyzhang9.github.io | spirits2/node_modules/iterators/test/sync/some.js | JavaScript | mit | 1,336 |
package com.common.utils;
//
//import android.annotation.TargetApi;
//import android.app.Activity;
//import android.app.AlertDialog;
//import android.app.ProgressDialog;
//import android.content.Context;
//import android.content.Intent;
//import android.graphics.Bitmap;
//import android.net.Uri;
//import android.os.Build;
//import android.os.Bundle;
//import android.provider.MediaStore;
//import android.util.Log;
//import android.view.View;
//import android.view.View.OnClickListener;
//import android.widget.ImageView;
//import android.widget.TextView;
//import android.widget.Toast;
//import android.widget.VideoView;
//
//import com.common.utils.social.DialogListener;
//import com.common.utils.social.SocialAuthAdapter;
//import com.common.utils.social.SocialAuthError;
//import com.common.utils.social.SocialAuthListener;
//
//import org.brickred.socialauth.Contact;
//import org.brickred.socialauth.Profile;
//
//import java.io.IOException;
//import java.io.Serializable;
//import java.util.List;
//
public class CommonActivity //extends Activity implements OnClickListener {
{
//
// TextView text1, text2, text3, text4, text5, text6, text7, text8;
// Context mContext;
// Activity mActivity;
// private AlertDialog dialog;
// private ProgressDialog mDialog;
// private SocialAuthAdapter adapter;
// private int provider = 0;
// private ImageView ivImage;
// private VideoView vvVideo;
// private ProgressDialog pDialog;
//
// @Override
// protected void onCreate(Bundle savedInstanceState) {
// super.onCreate(savedInstanceState);
// setContentView(R.layout.activity_main);
// mContext = CommonActivity.this;
// mActivity = CommonActivity.this;
// adapter = new SocialAuthAdapter(new ResponseListener());
//
// ivImage = (ImageView) findViewById(R.id.imageView1);
// vvVideo = (VideoView) findViewById(R.id.videoView1);
// pDialog = new ProgressDialog(this);
// pDialog.setMessage("Please Wait");
//
// text1 = (TextView) findViewById(R.id.text1);
// text2 = (TextView) findViewById(R.id.text2);
// text3 = (TextView) findViewById(R.id.text3);
// text4 = (TextView) findViewById(R.id.text4);
// text5 = (TextView) findViewById(R.id.text5);
// text6 = (TextView) findViewById(R.id.text6);
// text7 = (TextView) findViewById(R.id.text7);
// text8 = (TextView) findViewById(R.id.text8);
//
// text1.setOnClickListener(this);
// text2.setOnClickListener(this);
// text3.setOnClickListener(this);
// text4.setOnClickListener(this);
// text5.setOnClickListener(this);
// text6.setOnClickListener(this);
// text7.setOnClickListener(this);
// text8.setOnClickListener(this);
//
// }
//
//
// @Override
// public void onClick(View v) {
//
// if (v == text1) {
//// Common.captureImage(this, 101);
// provider = 1;
// pDialog.show();
// adapter.authorize(CommonActivity.this, SocialAuthAdapter.Provider.LINKEDIN);
// } else if (v == text2) {
// text2.setVisibility(View.GONE);
// adapter.signOut(CommonActivity.this, SocialAuthAdapter.Provider.LINKEDIN.toString());
// } else if (v == text3) {
//// Common.recordVideo(this, 102);
// provider = 2;
// pDialog.show();
// adapter.authorize(CommonActivity.this, SocialAuthAdapter.Provider.FACEBOOK);
// } else if (v == text4) {
// text4.setVisibility(View.GONE);
// adapter.signOut(CommonActivity.this, SocialAuthAdapter.Provider.FACEBOOK.toString());
// } else if (v == text5) {
//// Common.pickImage(this, 103);
// provider = 3;
// pDialog.show();
// adapter.authorize(CommonActivity.this, SocialAuthAdapter.Provider.TWITTER);
// } else if (v == text6) {
// text6.setVisibility(View.GONE);
// adapter.signOut(CommonActivity.this, SocialAuthAdapter.Provider.TWITTER.toString());
// } else if (v == text7) {
//// Common.pickVideo(this, 104);
// provider = 4;
// pDialog.show();
// adapter.authorize(CommonActivity.this, SocialAuthAdapter.Provider.GOOGLEPLUS);
// } else if (v == text8) {
// text8.setVisibility(View.GONE);
// adapter.signOut(CommonActivity.this, SocialAuthAdapter.Provider.GOOGLEPLUS.toString());
// }
// }
//
// @Override
// protected void onSaveInstanceState(Bundle outState) {
// super.onSaveInstanceState(outState);
// Common.onSaveInstanceState(outState);
// }
//
// @Override
// protected void onRestoreInstanceState(Bundle savedInstanceState) {
// super.onRestoreInstanceState(savedInstanceState);
// Common.onRestoreInstanceState(savedInstanceState);
// }
//
// @TargetApi(Build.VERSION_CODES.KITKAT)
// @Override
// protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// super.onActivityResult(requestCode, resultCode, data);
//
// if (resultCode == RESULT_OK) {
// switch (requestCode) {
// case 101:
// Common.previewCapturedImage(ivImage);
// break;
// case 102:
// Common.previewVideo(vvVideo);
// break;
// case 103:
// Uri uri = data.getData();
// try {
// Bitmap bm = MediaStore.Images.Media.getBitmap(this.getContentResolver(), uri);
// ivImage.setImageBitmap(bm);
// } catch (IOException e) {
// e.printStackTrace();
// }
//// BitmapFactory.Options options = new BitmapFactory.Options();
//// final Bitmap bitmap = BitmapFactory.decodeFile(uri.getPath(), options);
// break;
// case 104:
//
//
// Uri fileUri = data.getData();
//// final int takeFlags = data.getFlags()
//// & (Intent.FLAG_GRANT_READ_URI_PERMISSION
//// | Intent.FLAG_GRANT_WRITE_URI_PERMISSION);
//// // Check for the freshest data.
//// getContentResolver().takePersistableUriPermission(fileUri, takeFlags);
////// Uri fileUri = MediaStore.Video.Media.getContentUri(data.getDataString());
//// Log.d("URI " , " video Uri : " + fileUri.getPath());
// vvVideo.setVideoPath(Common.getPath(this, fileUri));
// // start playing
// vvVideo.start();
// }
// }
// }
//
//
//
// // To get status of message after authentication
// private final class MessageListener implements SocialAuthListener<Integer> {
// @Override
// public void onExecute(String provider, Integer t) {
// Integer status = t;
// if (status.intValue() == 200 || status.intValue() == 201 || status.intValue() == 204)
// Toast.makeText(CommonActivity.this, "Message posted on" + provider, Toast.LENGTH_LONG).show();
// else
// Toast.makeText(CommonActivity.this, "Message not posted" + provider, Toast.LENGTH_LONG).show();
// }
//
// @Override
// public void onError(SocialAuthError e) {
// e.getInnerException().printStackTrace();
// }
// }
//
// private final class ResponseListener implements DialogListener {
//
// @Override
// public void onComplete(Bundle values) {
//
// Log.d("Custom-UI", "Successful");
//
// // Changing Sign In Text to Sign Out
// pDialog.dismiss();
// Toast.makeText(CommonActivity.this, "Connected", Toast.LENGTH_SHORT).show();
//
//// mDialog = new ProgressDialog(CommonActivity.this);
//// mDialog.requestWindowFeature(Window.FEATURE_NO_TITLE);
//// mDialog.setMessage("Loading...");
//// mDialog.show();
// switch (provider) {
// case 1:
// text2.setVisibility(View.VISIBLE);
// break;
// case 2:
// text4.setVisibility(View.VISIBLE);
// break;
// case 3:
// text6.setVisibility(View.VISIBLE);
// break;
// case 4:
// text8.setVisibility(View.VISIBLE);
// break;
// }
//// if (provider != 4 && provider != 1){
//// Bitmap bitmap = Common.drawableTobitmap(CommonActivity.this, R.drawable.facebook);
//// try {
//// adapter.uploadImageAsync("Image Message", "icon.png", bitmap, 0,
//// new UploadImageListener());
//// } catch (Exception e) {
//// e.printStackTrace();
//// }
//// Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
//// photoPickerIntent.setType("image/*");
//// startActivityForResult(photoPickerIntent, 101);
//// }
//// adapter.updateStatus("Hi Test Status Update @" + Calendar.getInstance().getTimeInMillis(), new MessageListener(), false);
//// adapter.getUserProfileAsync(new ProfileDataListener());
// adapter.getContactListAsync(new ContactDataListener());
//
// }
//
// @Override
// public void onError(SocialAuthError error) {
// Log.d("Custom-UI", "Error");
// pDialog.dismiss();
// error.printStackTrace();
// }
//
// @Override
// public void onCancel() {
// Log.d("Custom-UI", "Cancelled");
// pDialog.dismiss();
// }
//
// @Override
// public void onBack() {
// pDialog.dismiss();
// Log.d("Custom-UI", "Dialog Closed by pressing Back Key");
//
// }
// }
//
// private final class ContactDataListener implements SocialAuthListener<List<Contact>> {
//
// @Override
// public void onExecute(String provider, List<Contact> t) {
//
// Log.d("Custom-UI", "Receiving Data");
//// mDialog.dismiss();
// List<Contact> contactsList = t;
//
// if (contactsList != null && contactsList.size() > 0) {
// Intent intent = new Intent(CommonActivity.this, ContactActivity.class);
// intent.putExtra("provider", provider);
// intent.putExtra("contact", (Serializable) contactsList);
// startActivity(intent);
// } else {
// Log.d("Custom-UI", "Contact List Empty");
// }
// }
//
// @Override
// public void onError(SocialAuthError e) {
//
// }
// }
// // To receive the profile response after authentication
// private final class ProfileDataListener implements SocialAuthListener<Profile> {
//
// @Override
// public void onExecute(String provider, Profile t) {
//
// Log.d("Custom-UI", "Receiving Data");
//// mDialog.dismiss();
// Profile profileMap = t;
//
// Intent intent = new Intent(CommonActivity.this, ProfileActivity.class);
// intent.putExtra("provider", provider);
// intent.putExtra("profile", profileMap);
// startActivity(intent);
// }
//
// @Override
// public void onError(SocialAuthError e) {
//
// }
// }
//
// // To get status of image upload after authentication
// private final class UploadImageListener implements SocialAuthListener<Integer> {
//
// @Override
// public void onExecute(String provider, Integer t) {
//// mDialog.dismiss();
// Integer status = t;
// Log.d("Custom-UI", String.valueOf(status));
// if (status.intValue() == 200 || status.intValue() == 201 || status.intValue() == 204)
// Toast.makeText(CommonActivity.this, "Image Uploaded", Toast.LENGTH_SHORT).show();
// else
// Toast.makeText(CommonActivity.this, "Image not Uploaded", Toast.LENGTH_SHORT).show();
// }
//
// @Override
// public void onError(SocialAuthError e) {
//
// }
// }
//
//
} | goofwear/android | utils/src/main/java/com/common/utils/CommonActivity.java | Java | mit | 12,589 |
<?php
namespace Concrete\Core\Permission;
use Concrete\Core\Foundation\Repetition\AbstractRepetition;
use Database;
use Loader;
class Duration extends AbstractRepetition
{
protected $pdID;
/**
* @param \Concrete\Core\Permission\Access\ListItem\ListItem[] $list
* @return \Concrete\Core\Permission\Access\ListItem\ListItem[]
*/
public static function filterByActive($list)
{
$filteredList = array();
foreach ($list as $l) {
$pd = $l->getPermissionDurationObject();
if (is_object($pd)) {
if ($pd->isActive()) {
$filteredList[] = $l;
}
} else {
$filteredList[] = $l;
}
}
return $filteredList;
}
/**
* @return Duration|null
*/
public static function createFromRequest()
{
$dt = Loader::helper('form/date_time');
$dateStart = $dt->translate('pdStartDate');
$dateEnd = $dt->translate('pdEndDate');
if ($dateStart || $dateEnd) {
// create a Duration object
$pd = new Duration();
if ($_REQUEST['pdStartDateAllDayActivate']) {
$pd->setStartDateAllDay(1);
$dateStart = date('Y-m-d 00:00:00', strtotime($dateStart));
} else {
$pd->setStartDateAllDay(0);
}
if ($_REQUEST['pdEndDateAllDayActivate']) {
$pd->setEndDateAllDay(1);
$dateEnd = date('Y-m-d 23:59:59', strtotime($dateEnd));
} else {
$pd->setEndDateAllDay(0);
}
$pd->setStartDate($dateStart);
$pd->setEndDate($dateEnd);
if ($_POST['pdRepeatPeriod'] && $_POST['pdRepeat']) {
if ($_POST['pdRepeatPeriod'] == 'daily') {
$pd->setRepeatPeriod(Duration::REPEAT_DAILY);
$pd->setRepeatEveryNum($_POST['pdRepeatPeriodDaysEvery']);
} elseif ($_POST['pdRepeatPeriod'] == 'weekly') {
$pd->setRepeatPeriod(Duration::REPEAT_WEEKLY);
$pd->setRepeatEveryNum($_POST['pdRepeatPeriodWeeksEvery']);
$pd->setRepeatPeriodWeekDays($_POST['pdRepeatPeriodWeeksDays']);
} elseif ($_POST['pdRepeatPeriod'] == 'monthly') {
$pd->setRepeatPeriod(Duration::REPEAT_MONTHLY);
$repeat_by = $_POST['pdRepeatPeriodMonthsRepeatBy'];
$repeat = self::MONTHLY_REPEAT_WEEKLY;
switch ($repeat_by) {
case 'week':
$repeat = self::MONTHLY_REPEAT_WEEKLY;
break;
case 'month':
$repeat = self::MONTHLY_REPEAT_MONTHLY;
break;
case 'lastweekday':
$repeat = self::MONTHLY_REPEAT_LAST_WEEKDAY;
$dotw = $_POST['pdRepeatPeriodMonthsRepeatLastDay'] ?: 0;
$pd->setRepeatMonthLastWeekday($dotw);
break;
}
$pd->setRepeatMonthBy($repeat);
$pd->setRepeatEveryNum($_POST['pdRepeatPeriodMonthsEvery']);
}
$pd->setRepeatPeriodEnd($dt->translate('pdEndRepeatDateSpecific'));
} else {
$pd->setRepeatPeriod(Duration::REPEAT_NONE);
}
$pd->save();
return $pd;
} else {
unset($pd);
}
return null;
}
/**
* @param $pdID
* @return \Concrete\Core\Permission\Duration
*/
public static function getByID($pdID)
{
$db = Database::connection();
$pdObject = $db->fetchColumn('SELECT pdObject FROM PermissionDurationObjects WHERE pdID = ?', array($pdID));
if ($pdObject) {
$pd = unserialize($pdObject);
return $pd;
}
return null;
}
public function save()
{
$db = Database::connection();
if (!$this->pdID) {
$pd = new Duration();
$pdObject = serialize($pd);
$db->executeQuery('INSERT INTO PermissionDurationObjects (pdObject) VALUES (?)', array($pdObject));
$this->pdID = $db->lastInsertId();
}
$pdObject = serialize($this);
$db->executeQuery(
'UPDATE PermissionDurationObjects SET pdObject = ? WHERE pdID = ?',
array($pdObject, $this->pdID)
);
}
public function getID()
{
return $this->getPermissionDurationID();
}
public function getPermissionDurationID()
{
return $this->pdID;
}
}
| lifejuggler/audrey_site | updates/concrete5.7.5.6/concrete/src/Permission/Duration.php | PHP | mit | 4,819 |
/****************************************************************************************
* Copyright (c) 2011 Norbert Nagold <norbert.nagold@gmail.com> *
* Copyright (c) 2014 Houssam Salem <houssam.salem.au@gmail.com> *
* *
* This program is free software; you can redistribute it and/or modify it under *
* the terms of the GNU General Public License as published by the Free Software *
* Foundation; either version 3 of the License, or (at your option) any later *
* version. *
* *
* This program is distributed in the hope that it will be useful, but WITHOUT ANY *
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A *
* PARTICULAR PURPOSE. See the GNU General Public License for more details. *
* *
* You should have received a copy of the GNU General Public License along with *
* this program. If not, see <http://www.gnu.org/licenses/>. *
****************************************************************************************/
package com.ichi2.libanki;
import android.database.Cursor;
import android.util.Pair;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
public class Note implements Cloneable {
private Collection mCol;
private long mId;
private String mGuId;
private JSONObject mModel;
private long mMid;
private List<String> mTags;
private String[] mFields;
private int mFlags;
private String mData;
private Map<String, Pair<Integer, JSONObject>> mFMap;
private long mScm;
private int mUsn;
private long mMod;
private boolean mNewlyAdded;
public Note(Collection col, Long id) {
this(col, null, id);
}
public Note(Collection col, JSONObject model) {
this(col, model, null);
}
public Note(Collection col, JSONObject model, Long id) {
assert !(model != null && id != null);
mCol = col;
if (id != null) {
mId = id;
load();
} else {
mId = Utils.timestampID(mCol.getDb(), "notes");
mGuId = Utils.guid64();
mModel = model;
try {
mMid = model.getLong("id");
mTags = new ArrayList<String>();
mFields = new String[model.getJSONArray("flds").length()];
Arrays.fill(mFields, "");
} catch (JSONException e) {
throw new RuntimeException(e);
}
mFlags = 0;
mData = "";
mFMap = mCol.getModels().fieldMap(mModel);
mScm = mCol.getScm();
}
}
public void load() {
Cursor cursor = null;
try {
cursor = mCol.getDb().getDatabase()
.rawQuery("SELECT guid, mid, mod, usn, tags, flds, flags, data FROM notes WHERE id = " + mId, null);
if (!cursor.moveToFirst()) {
throw new RuntimeException("Notes.load(): No result from query for note " + mId);
}
mGuId = cursor.getString(0);
mMid = cursor.getLong(1);
mMod = cursor.getLong(2);
mUsn = cursor.getInt(3);
mTags = mCol.getTags().split(cursor.getString(4));
mFields = Utils.splitFields(cursor.getString(5));
mFlags = cursor.getInt(6);
mData = cursor.getString(7);
mModel = mCol.getModels().get(mMid);
mFMap = mCol.getModels().fieldMap(mModel);
mScm = mCol.getScm();
} finally {
if (cursor != null) {
cursor.close();
}
}
}
/*
* If fields or tags have changed, write changes to disk.
*/
public void flush() {
flush(null);
}
public void flush(Long mod) {
flush(mod, true);
}
public void flush(Long mod, boolean changeUsn) {
assert mScm == mCol.getScm();
_preFlush();
if (changeUsn) {
mUsn = mCol.usn();
}
String sfld = Utils.stripHTMLMedia(mFields[mCol.getModels().sortIdx(mModel)]);
String tags = stringTags();
String fields = joinedFields();
if (mod == null && mCol.getDb().queryScalar(String.format(Locale.US,
"select 1 from notes where id = ? and tags = ? and flds = ?",
mId, tags, fields)) > 0) {
return;
}
long csum = Utils.fieldChecksum(mFields[0]);
mMod = mod != null ? mod : Utils.intNow();
mCol.getDb().execute("insert or replace into notes values (?,?,?,?,?,?,?,?,?,?,?)",
new Object[] { mId, mGuId, mMid, mMod, mUsn, tags, fields, sfld, csum, mFlags, mData });
mCol.getTags().register(mTags);
_postFlush();
}
public String joinedFields() {
return Utils.joinFields(mFields);
}
public ArrayList<Card> cards() {
ArrayList<Card> cards = new ArrayList<Card>();
Cursor cur = null;
try {
cur = mCol.getDb().getDatabase()
.rawQuery("SELECT id FROM cards WHERE nid = " + mId + " ORDER BY ord", null);
while (cur.moveToNext()) {
cards.add(mCol.getCard(cur.getLong(0)));
}
} finally {
if (cur != null) {
cur.close();
}
}
return cards;
}
public JSONObject model() {
return mModel;
}
/**
* Dict interface
* ***********************************************************
*/
public String[] keys() {
return (String[])mFMap.keySet().toArray();
}
public String[] values() {
return mFields;
}
public String[][] items() {
// TODO: Revisit this method. The field order returned differs from Anki.
// The items here are only used in the note editor, so it's a low priority.
String[][] result = new String[mFMap.size()][2];
for (String fname : mFMap.keySet()) {
int i = mFMap.get(fname).first;
result[i][0] = fname;
result[i][1] = mFields[i];
}
return result;
}
private int _fieldOrd(String key) {
return mFMap.get(key).first;
}
public String getitem(String key) {
return mFields[_fieldOrd(key)];
}
public void setitem(String key, String value) {
mFields[_fieldOrd(key)] = value;
}
public boolean contains(String key) {
return mFMap.containsKey(key);
}
/**
* Tags
* ***********************************************************
*/
public boolean hasTag(String tag) {
return mCol.getTags().inList(tag, mTags);
}
public String stringTags() {
return mCol.getTags().join(mCol.getTags().canonify(mTags));
}
public void setTagsFromStr(String str) {
mTags = mCol.getTags().split(str);
}
public void delTag(String tag) {
List<String> rem = new LinkedList<String>();
for (String t : mTags) {
if (t.equalsIgnoreCase(tag)) {
rem.add(t);
}
}
for (String r : rem) {
mTags.remove(r);
}
}
/*
* duplicates will be stripped on save
*/
public void addTag(String tag) {
mTags.add(tag);
}
/**
* Unique/duplicate check
* ***********************************************************
*/
/**
*
* @return 1 if first is empty; 2 if first is a duplicate, null otherwise.
*/
public Integer dupeOrEmpty() {
String val = mFields[0];
if (val.trim().length() == 0) {
return 1;
}
long csum = Utils.fieldChecksum(val);
// find any matching csums and compare
for (String flds : mCol.getDb().queryColumn(
String.class,
"SELECT flds FROM notes WHERE csum = " + csum + " AND id != " + (mId != 0 ? mId : 0) + " AND mid = "
+ mMid, 0)) {
if (Utils.stripHTMLMedia(
Utils.splitFields(flds)[0]).equals(Utils.stripHTMLMedia(mFields[0]))) {
return 2;
}
}
return null;
}
/**
* Flushing cloze notes
* ***********************************************************
*/
/*
* have we been added yet?
*/
private void _preFlush() {
mNewlyAdded = mCol.getDb().queryScalar("SELECT 1 FROM cards WHERE nid = " + mId) == 0;
}
/*
* generate missing cards
*/
private void _postFlush() {
if (!mNewlyAdded) {
mCol.genCards(new long[] { mId });
}
}
/*
* ***********************************************************
* The methods below are not in LibAnki.
* ***********************************************************
*/
public long getMid() {
return mMid;
}
/**
* @return the mId
*/
public long getId() {
// TODO: Conflicting method name and return value. Reconsider.
return mId;
}
public Collection getCol() {
return mCol;
}
public String getSFld() {
return mCol.getDb().queryString("SELECT sfld FROM notes WHERE id = " + mId);
}
public String[] getFields() {
return mFields;
}
public void setField(int index, String value) {
mFields[index] = value;
}
public long getMod() {
return mMod;
}
public Note clone() {
try {
return (Note)super.clone();
} catch (CloneNotSupportedException e) {
throw new RuntimeException(e);
}
}
public List<String> getTags() {
return mTags;
}
}
| thinhhung/Anki-Android | AnkiDroid/src/main/java/com/ichi2/libanki/Note.java | Java | gpl-3.0 | 10,327 |
<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Class for loading/storing competency frameworks from the DB.
*
* @package core_competency
* @copyright 2015 Damyon Wiese
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
namespace core_competency;
defined('MOODLE_INTERNAL') || die();
use stdClass;
use cm_info;
use context;
use context_helper;
use context_system;
use context_course;
use context_module;
use context_user;
use coding_exception;
use require_login_exception;
use moodle_exception;
use moodle_url;
use required_capability_exception;
/**
* Class for doing things with competency frameworks.
*
* @copyright 2015 Damyon Wiese
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
class api {
/**
* Returns whether competencies are enabled.
*
* This method should never do more than checking the config setting, the reason
* being that some other code could be checking the config value directly
* to avoid having to load this entire file into memory.
*
* @return boolean True when enabled.
*/
public static function is_enabled() {
return get_config('core_competency', 'enabled');
}
/**
* Throws an exception if competencies are not enabled.
*
* @return void
* @throws moodle_exception
*/
public static function require_enabled() {
if (!static::is_enabled()) {
throw new moodle_exception('competenciesarenotenabled', 'core_competency');
}
}
/**
* Checks whether a scale is used anywhere in the plugin.
*
* This public API has two exceptions:
* - It MUST NOT perform any capability checks.
* - It MUST ignore whether competencies are enabled or not ({@link self::is_enabled()}).
*
* @param int $scaleid The scale ID.
* @return bool
*/
public static function is_scale_used_anywhere($scaleid) {
global $DB;
$sql = "SELECT s.id
FROM {scale} s
LEFT JOIN {" . competency_framework::TABLE ."} f
ON f.scaleid = :scaleid1
LEFT JOIN {" . competency::TABLE ."} c
ON c.scaleid = :scaleid2
WHERE f.id IS NOT NULL
OR c.id IS NOT NULL";
return $DB->record_exists_sql($sql, ['scaleid1' => $scaleid, 'scaleid2' => $scaleid]);
}
/**
* Validate if current user have acces to the course_module if hidden.
*
* @param mixed $cmmixed The cm_info class, course module record or its ID.
* @param bool $throwexception Throw an exception or not.
* @return bool
*/
protected static function validate_course_module($cmmixed, $throwexception = true) {
$cm = $cmmixed;
if (!is_object($cm)) {
$cmrecord = get_coursemodule_from_id(null, $cmmixed);
$modinfo = get_fast_modinfo($cmrecord->course);
$cm = $modinfo->get_cm($cmmixed);
} else if (!$cm instanceof cm_info) {
// Assume we got a course module record.
$modinfo = get_fast_modinfo($cm->course);
$cm = $modinfo->get_cm($cm->id);
}
if (!$cm->uservisible) {
if ($throwexception) {
throw new require_login_exception('Course module is hidden');
} else {
return false;
}
}
return true;
}
/**
* Validate if current user have acces to the course if hidden.
*
* @param mixed $courseorid The course or it ID.
* @param bool $throwexception Throw an exception or not.
* @return bool
*/
protected static function validate_course($courseorid, $throwexception = true) {
$course = $courseorid;
if (!is_object($course)) {
$course = get_course($course);
}
$coursecontext = context_course::instance($course->id);
if (!$course->visible and !has_capability('moodle/course:viewhiddencourses', $coursecontext)) {
if ($throwexception) {
throw new require_login_exception('Course is hidden');
} else {
return false;
}
}
return true;
}
/**
* Create a competency from a record containing all the data for the class.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param stdClass $record Record containing all the data for an instance of the class.
* @return competency
*/
public static function create_competency(stdClass $record) {
static::require_enabled();
$competency = new competency(0, $record);
// First we do a permissions check.
require_capability('moodle/competency:competencymanage', $competency->get_context());
// Reset the sortorder, use reorder instead.
$competency->set('sortorder', 0);
$competency->create();
\core\event\competency_created::create_from_competency($competency)->trigger();
// Reset the rule of the parent.
$parent = $competency->get_parent();
if ($parent) {
$parent->reset_rule();
$parent->update();
}
return $competency;
}
/**
* Delete a competency by id.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param int $id The record to delete. This will delete alot of related data - you better be sure.
* @return boolean
*/
public static function delete_competency($id) {
global $DB;
static::require_enabled();
$competency = new competency($id);
// First we do a permissions check.
require_capability('moodle/competency:competencymanage', $competency->get_context());
$events = array();
$competencyids = array(intval($competency->get('id')));
$contextid = $competency->get_context()->id;
$competencyids = array_merge(competency::get_descendants_ids($competency), $competencyids);
if (!competency::can_all_be_deleted($competencyids)) {
return false;
}
$transaction = $DB->start_delegated_transaction();
try {
// Reset the rule of the parent.
$parent = $competency->get_parent();
if ($parent) {
$parent->reset_rule();
$parent->update();
}
// Delete the competency separately so the after_delete event can be triggered.
$competency->delete();
// Delete the competencies.
competency::delete_multiple($competencyids);
// Delete the competencies relation.
related_competency::delete_multiple_relations($competencyids);
// Delete competency evidences.
user_evidence_competency::delete_by_competencyids($competencyids);
// Register the competencies deleted events.
$events = \core\event\competency_deleted::create_multiple_from_competencyids($competencyids, $contextid);
} catch (\Exception $e) {
$transaction->rollback($e);
}
$transaction->allow_commit();
// Trigger events.
foreach ($events as $event) {
$event->trigger();
}
return true;
}
/**
* Reorder this competency.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param int $id The id of the competency to move.
* @return boolean
*/
public static function move_down_competency($id) {
static::require_enabled();
$current = new competency($id);
// First we do a permissions check.
require_capability('moodle/competency:competencymanage', $current->get_context());
$max = self::count_competencies(array('parentid' => $current->get('parentid'),
'competencyframeworkid' => $current->get('competencyframeworkid')));
if ($max > 0) {
$max--;
}
$sortorder = $current->get('sortorder');
if ($sortorder >= $max) {
return false;
}
$sortorder = $sortorder + 1;
$current->set('sortorder', $sortorder);
$filters = array('parentid' => $current->get('parentid'),
'competencyframeworkid' => $current->get('competencyframeworkid'),
'sortorder' => $sortorder);
$children = self::list_competencies($filters, 'id');
foreach ($children as $needtoswap) {
$needtoswap->set('sortorder', $sortorder - 1);
$needtoswap->update();
}
// OK - all set.
$result = $current->update();
return $result;
}
/**
* Reorder this competency.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param int $id The id of the competency to move.
* @return boolean
*/
public static function move_up_competency($id) {
static::require_enabled();
$current = new competency($id);
// First we do a permissions check.
require_capability('moodle/competency:competencymanage', $current->get_context());
$sortorder = $current->get('sortorder');
if ($sortorder == 0) {
return false;
}
$sortorder = $sortorder - 1;
$current->set('sortorder', $sortorder);
$filters = array('parentid' => $current->get('parentid'),
'competencyframeworkid' => $current->get('competencyframeworkid'),
'sortorder' => $sortorder);
$children = self::list_competencies($filters, 'id');
foreach ($children as $needtoswap) {
$needtoswap->set('sortorder', $sortorder + 1);
$needtoswap->update();
}
// OK - all set.
$result = $current->update();
return $result;
}
/**
* Move this competency so it sits in a new parent.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param int $id The id of the competency to move.
* @param int $newparentid The new parent id for the competency.
* @return boolean
*/
public static function set_parent_competency($id, $newparentid) {
global $DB;
static::require_enabled();
$current = new competency($id);
// First we do a permissions check.
require_capability('moodle/competency:competencymanage', $current->get_context());
if ($id == $newparentid) {
throw new coding_exception('Can not set a competency as a parent of itself.');
} if ($newparentid == $current->get('parentid')) {
throw new coding_exception('Can not move a competency to the same location.');
}
// Some great variable assignment right here.
$currentparent = $current->get_parent();
$parent = !empty($newparentid) ? new competency($newparentid) : null;
$parentpath = !empty($parent) ? $parent->get('path') : '/0/';
// We're going to change quite a few things.
$transaction = $DB->start_delegated_transaction();
// If we are moving a node to a child of itself:
// - promote all the child nodes by one level.
// - remove the rule on self.
// - re-read the parent.
$newparents = explode('/', $parentpath);
if (in_array($current->get('id'), $newparents)) {
$children = competency::get_records(array('parentid' => $current->get('id')), 'id');
foreach ($children as $child) {
$child->set('parentid', $current->get('parentid'));
$child->update();
}
// Reset the rule on self as our children have changed.
$current->reset_rule();
// The destination parent is one of our descendants, we need to re-fetch its values (path, parentid).
$parent->read();
}
// Reset the rules of initial parent and destination.
if (!empty($currentparent)) {
$currentparent->reset_rule();
$currentparent->update();
}
if (!empty($parent)) {
$parent->reset_rule();
$parent->update();
}
// Do the actual move.
$current->set('parentid', $newparentid);
$result = $current->update();
// All right, let's commit this.
$transaction->allow_commit();
return $result;
}
/**
* Update the details for a competency.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param stdClass $record The new details for the competency.
* Note - must contain an id that points to the competency to update.
*
* @return boolean
*/
public static function update_competency($record) {
static::require_enabled();
$competency = new competency($record->id);
// First we do a permissions check.
require_capability('moodle/competency:competencymanage', $competency->get_context());
// Some things should not be changed in an update - they should use a more specific method.
$record->sortorder = $competency->get('sortorder');
$record->parentid = $competency->get('parentid');
$record->competencyframeworkid = $competency->get('competencyframeworkid');
$competency->from_record($record);
require_capability('moodle/competency:competencymanage', $competency->get_context());
// OK - all set.
$result = $competency->update();
// Trigger the update event.
\core\event\competency_updated::create_from_competency($competency)->trigger();
return $result;
}
/**
* Read a the details for a single competency and return a record.
*
* Requires moodle/competency:competencyview capability at the system context.
*
* @param int $id The id of the competency to read.
* @param bool $includerelated Include related tags or not.
* @return stdClass
*/
public static function read_competency($id, $includerelated = false) {
static::require_enabled();
$competency = new competency($id);
// First we do a permissions check.
$context = $competency->get_context();
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'), $context)) {
throw new required_capability_exception($context, 'moodle/competency:competencyview', 'nopermissions', '');
}
// OK - all set.
if ($includerelated) {
$relatedcompetency = new related_competency();
if ($related = $relatedcompetency->list_relations($id)) {
$competency->relatedcompetencies = $related;
}
}
return $competency;
}
/**
* Perform a text search based and return all results and their parents.
*
* Requires moodle/competency:competencyview capability at the framework context.
*
* @param string $textsearch A string to search for.
* @param int $competencyframeworkid The id of the framework to limit the search.
* @return array of competencies
*/
public static function search_competencies($textsearch, $competencyframeworkid) {
static::require_enabled();
$framework = new competency_framework($competencyframeworkid);
// First we do a permissions check.
$context = $framework->get_context();
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'), $context)) {
throw new required_capability_exception($context, 'moodle/competency:competencyview', 'nopermissions', '');
}
// OK - all set.
$competencies = competency::search($textsearch, $competencyframeworkid);
return $competencies;
}
/**
* Perform a search based on the provided filters and return a paginated list of records.
*
* Requires moodle/competency:competencyview capability at some context.
*
* @param array $filters A list of filters to apply to the list.
* @param string $sort The column to sort on
* @param string $order ('ASC' or 'DESC')
* @param int $skip Number of records to skip (pagination)
* @param int $limit Max of records to return (pagination)
* @return array of competencies
*/
public static function list_competencies($filters, $sort = '', $order = 'ASC', $skip = 0, $limit = 0) {
static::require_enabled();
if (!isset($filters['competencyframeworkid'])) {
$context = context_system::instance();
} else {
$framework = new competency_framework($filters['competencyframeworkid']);
$context = $framework->get_context();
}
// First we do a permissions check.
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'), $context)) {
throw new required_capability_exception($context, 'moodle/competency:competencyview', 'nopermissions', '');
}
// OK - all set.
return competency::get_records($filters, $sort, $order, $skip, $limit);
}
/**
* Perform a search based on the provided filters and return a paginated list of records.
*
* Requires moodle/competency:competencyview capability at some context.
*
* @param array $filters A list of filters to apply to the list.
* @return int
*/
public static function count_competencies($filters) {
static::require_enabled();
if (!isset($filters['competencyframeworkid'])) {
$context = context_system::instance();
} else {
$framework = new competency_framework($filters['competencyframeworkid']);
$context = $framework->get_context();
}
// First we do a permissions check.
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'), $context)) {
throw new required_capability_exception($context, 'moodle/competency:competencyview', 'nopermissions', '');
}
// OK - all set.
return competency::count_records($filters);
}
/**
* Create a competency framework from a record containing all the data for the class.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param stdClass $record Record containing all the data for an instance of the class.
* @return competency_framework
*/
public static function create_framework(stdClass $record) {
static::require_enabled();
$framework = new competency_framework(0, $record);
require_capability('moodle/competency:competencymanage', $framework->get_context());
// Account for different formats of taxonomies.
if (isset($record->taxonomies)) {
$framework->set('taxonomies', $record->taxonomies);
}
$framework = $framework->create();
// Trigger a competency framework created event.
\core\event\competency_framework_created::create_from_framework($framework)->trigger();
return $framework;
}
/**
* Duplicate a competency framework by id.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param int $id The record to duplicate. All competencies associated and related will be duplicated.
* @return competency_framework the framework duplicated
*/
public static function duplicate_framework($id) {
global $DB;
static::require_enabled();
$framework = new competency_framework($id);
require_capability('moodle/competency:competencymanage', $framework->get_context());
// Starting transaction.
$transaction = $DB->start_delegated_transaction();
try {
// Get a uniq idnumber based on the origin framework.
$idnumber = competency_framework::get_unused_idnumber($framework->get('idnumber'));
$framework->set('idnumber', $idnumber);
// Adding the suffix copy to the shortname.
$framework->set('shortname', get_string('duplicateditemname', 'core_competency', $framework->get('shortname')));
$framework->set('id', 0);
$framework = $framework->create();
// Array that match the old competencies ids with the new one to use when copying related competencies.
$frameworkcompetency = competency::get_framework_tree($id);
$matchids = self::duplicate_competency_tree($framework->get('id'), $frameworkcompetency, 0, 0);
// Copy the related competencies.
$relcomps = related_competency::get_multiple_relations(array_keys($matchids));
foreach ($relcomps as $relcomp) {
$compid = $relcomp->get('competencyid');
$relcompid = $relcomp->get('relatedcompetencyid');
if (isset($matchids[$compid]) && isset($matchids[$relcompid])) {
$newcompid = $matchids[$compid]->get('id');
$newrelcompid = $matchids[$relcompid]->get('id');
if ($newcompid < $newrelcompid) {
$relcomp->set('competencyid', $newcompid);
$relcomp->set('relatedcompetencyid', $newrelcompid);
} else {
$relcomp->set('competencyid', $newrelcompid);
$relcomp->set('relatedcompetencyid', $newcompid);
}
$relcomp->set('id', 0);
$relcomp->create();
} else {
// Debugging message when there is no match found.
debugging('related competency id not found');
}
}
// Setting rules on duplicated competencies.
self::migrate_competency_tree_rules($frameworkcompetency, $matchids);
$transaction->allow_commit();
} catch (\Exception $e) {
$transaction->rollback($e);
}
// Trigger a competency framework created event.
\core\event\competency_framework_created::create_from_framework($framework)->trigger();
return $framework;
}
/**
* Delete a competency framework by id.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param int $id The record to delete. This will delete alot of related data - you better be sure.
* @return boolean
*/
public static function delete_framework($id) {
global $DB;
static::require_enabled();
$framework = new competency_framework($id);
require_capability('moodle/competency:competencymanage', $framework->get_context());
$events = array();
$competenciesid = competency::get_ids_by_frameworkid($id);
$contextid = $framework->get('contextid');
if (!competency::can_all_be_deleted($competenciesid)) {
return false;
}
$transaction = $DB->start_delegated_transaction();
try {
if (!empty($competenciesid)) {
// Delete competencies.
competency::delete_by_frameworkid($id);
// Delete the related competencies.
related_competency::delete_multiple_relations($competenciesid);
// Delete the evidences for competencies.
user_evidence_competency::delete_by_competencyids($competenciesid);
}
// Create a competency framework deleted event.
$event = \core\event\competency_framework_deleted::create_from_framework($framework);
$result = $framework->delete();
// Register the deleted events competencies.
$events = \core\event\competency_deleted::create_multiple_from_competencyids($competenciesid, $contextid);
} catch (\Exception $e) {
$transaction->rollback($e);
}
// Commit the transaction.
$transaction->allow_commit();
// If all operations are successfull then trigger the delete event.
$event->trigger();
// Trigger deleted event competencies.
foreach ($events as $event) {
$event->trigger();
}
return $result;
}
/**
* Update the details for a competency framework.
*
* Requires moodle/competency:competencymanage capability at the system context.
*
* @param stdClass $record The new details for the framework. Note - must contain an id that points to the framework to update.
* @return boolean
*/
public static function update_framework($record) {
static::require_enabled();
$framework = new competency_framework($record->id);
// Check the permissions before update.
require_capability('moodle/competency:competencymanage', $framework->get_context());
// Account for different formats of taxonomies.
$framework->from_record($record);
if (isset($record->taxonomies)) {
$framework->set('taxonomies', $record->taxonomies);
}
// Trigger a competency framework updated event.
\core\event\competency_framework_updated::create_from_framework($framework)->trigger();
return $framework->update();
}
/**
* Read a the details for a single competency framework and return a record.
*
* Requires moodle/competency:competencyview capability at the system context.
*
* @param int $id The id of the framework to read.
* @return competency_framework
*/
public static function read_framework($id) {
static::require_enabled();
$framework = new competency_framework($id);
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'),
$framework->get_context())) {
throw new required_capability_exception($framework->get_context(), 'moodle/competency:competencyview',
'nopermissions', '');
}
return $framework;
}
/**
* Logg the competency framework viewed event.
*
* @param competency_framework|int $frameworkorid The competency_framework object or competency framework id
* @return bool
*/
public static function competency_framework_viewed($frameworkorid) {
static::require_enabled();
$framework = $frameworkorid;
if (!is_object($framework)) {
$framework = new competency_framework($framework);
}
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'),
$framework->get_context())) {
throw new required_capability_exception($framework->get_context(), 'moodle/competency:competencyview',
'nopermissions', '');
}
\core\event\competency_framework_viewed::create_from_framework($framework)->trigger();
return true;
}
/**
* Logg the competency viewed event.
*
* @param competency|int $competencyorid The competency object or competency id
* @return bool
*/
public static function competency_viewed($competencyorid) {
static::require_enabled();
$competency = $competencyorid;
if (!is_object($competency)) {
$competency = new competency($competency);
}
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'),
$competency->get_context())) {
throw new required_capability_exception($competency->get_context(), 'moodle/competency:competencyview',
'nopermissions', '');
}
\core\event\competency_viewed::create_from_competency($competency)->trigger();
return true;
}
/**
* Perform a search based on the provided filters and return a paginated list of records.
*
* Requires moodle/competency:competencyview capability at the system context.
*
* @param string $sort The column to sort on
* @param string $order ('ASC' or 'DESC')
* @param int $skip Number of records to skip (pagination)
* @param int $limit Max of records to return (pagination)
* @param context $context The parent context of the frameworks.
* @param string $includes Defines what other contexts to fetch frameworks from.
* Accepted values are:
* - children: All descendants
* - parents: All parents, grand parents, etc...
* - self: Context passed only.
* @param bool $onlyvisible If true return only visible frameworks
* @param string $query A string to use to filter down the frameworks.
* @return array of competency_framework
*/
public static function list_frameworks($sort, $order, $skip, $limit, $context, $includes = 'children',
$onlyvisible = false, $query = '') {
global $DB;
static::require_enabled();
// Get all the relevant contexts.
$contexts = self::get_related_contexts($context, $includes,
array('moodle/competency:competencyview', 'moodle/competency:competencymanage'));
if (empty($contexts)) {
throw new required_capability_exception($context, 'moodle/competency:competencyview', 'nopermissions', '');
}
// OK - all set.
list($insql, $inparams) = $DB->get_in_or_equal(array_keys($contexts), SQL_PARAMS_NAMED);
$select = "contextid $insql";
if ($onlyvisible) {
$select .= " AND visible = :visible";
$inparams['visible'] = 1;
}
if (!empty($query) || is_numeric($query)) {
$sqlnamelike = $DB->sql_like('shortname', ':namelike', false);
$sqlidnlike = $DB->sql_like('idnumber', ':idnlike', false);
$select .= " AND ($sqlnamelike OR $sqlidnlike) ";
$inparams['namelike'] = '%' . $DB->sql_like_escape($query) . '%';
$inparams['idnlike'] = '%' . $DB->sql_like_escape($query) . '%';
}
return competency_framework::get_records_select($select, $inparams, $sort . ' ' . $order, '*', $skip, $limit);
}
/**
* Perform a search based on the provided filters and return a paginated list of records.
*
* Requires moodle/competency:competencyview capability at the system context.
*
* @param context $context The parent context of the frameworks.
* @param string $includes Defines what other contexts to fetch frameworks from.
* Accepted values are:
* - children: All descendants
* - parents: All parents, grand parents, etc...
* - self: Context passed only.
* @return int
*/
public static function count_frameworks($context, $includes) {
global $DB;
static::require_enabled();
// Get all the relevant contexts.
$contexts = self::get_related_contexts($context, $includes,
array('moodle/competency:competencyview', 'moodle/competency:competencymanage'));
if (empty($contexts)) {
throw new required_capability_exception($context, 'moodle/competency:competencyview', 'nopermissions', '');
}
// OK - all set.
list($insql, $inparams) = $DB->get_in_or_equal(array_keys($contexts), SQL_PARAMS_NAMED);
return competency_framework::count_records_select("contextid $insql", $inparams);
}
/**
* Fetches all the relevant contexts.
*
* Note: This currently only supports system, category and user contexts. However user contexts
* behave a bit differently and will fallback on the system context. This is what makes the most
* sense because a user context does not have descendants, and only has system as a parent.
*
* @param context $context The context to start from.
* @param string $includes Defines what other contexts to find.
* Accepted values are:
* - children: All descendants
* - parents: All parents, grand parents, etc...
* - self: Context passed only.
* @param array $hasanycapability Array of capabilities passed to {@link has_any_capability()} in each context.
* @return context[] An array of contexts where keys are context IDs.
*/
public static function get_related_contexts($context, $includes, array $hasanycapability = null) {
global $DB;
static::require_enabled();
if (!in_array($includes, array('children', 'parents', 'self'))) {
throw new coding_exception('Invalid parameter value for \'includes\'.');
}
// If context user swap it for the context_system.
if ($context->contextlevel == CONTEXT_USER) {
$context = context_system::instance();
}
$contexts = array($context->id => $context);
if ($includes == 'children') {
$params = array('coursecatlevel' => CONTEXT_COURSECAT, 'path' => $context->path . '/%');
$pathlike = $DB->sql_like('path', ':path');
$sql = "contextlevel = :coursecatlevel AND $pathlike";
$rs = $DB->get_recordset_select('context', $sql, $params);
foreach ($rs as $record) {
$ctxid = $record->id;
context_helper::preload_from_record($record);
$contexts[$ctxid] = context::instance_by_id($ctxid);
}
$rs->close();
} else if ($includes == 'parents') {
$children = $context->get_parent_contexts();
foreach ($children as $ctx) {
$contexts[$ctx->id] = $ctx;
}
}
// Filter according to the capabilities required.
if (!empty($hasanycapability)) {
foreach ($contexts as $key => $ctx) {
if (!has_any_capability($hasanycapability, $ctx)) {
unset($contexts[$key]);
}
}
}
return $contexts;
}
/**
* Count all the courses using a competency.
*
* @param int $competencyid The id of the competency to check.
* @return int
*/
public static function count_courses_using_competency($competencyid) {
static::require_enabled();
// OK - all set.
$courses = course_competency::list_courses_min($competencyid);
$count = 0;
// Now check permissions on each course.
foreach ($courses as $course) {
if (!self::validate_course($course, false)) {
continue;
}
$context = context_course::instance($course->id);
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $context)) {
continue;
}
$count++;
}
return $count;
}
/**
* List all the courses modules using a competency in a course.
*
* @param int $competencyid The id of the competency to check.
* @param int $courseid The id of the course to check.
* @return array[int] Array of course modules ids.
*/
public static function list_course_modules_using_competency($competencyid, $courseid) {
static::require_enabled();
$result = array();
self::validate_course($courseid);
$coursecontext = context_course::instance($courseid);
// We will not check each module - course permissions should be enough.
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $coursecontext)) {
throw new required_capability_exception($coursecontext, 'moodle/competency:coursecompetencyview', 'nopermissions', '');
}
$cmlist = course_module_competency::list_course_modules($competencyid, $courseid);
foreach ($cmlist as $cmid) {
if (self::validate_course_module($cmid, false)) {
array_push($result, $cmid);
}
}
return $result;
}
/**
* List all the competencies linked to a course module.
*
* @param mixed $cmorid The course module, or its ID.
* @return array[competency] Array of competency records.
*/
public static function list_course_module_competencies_in_course_module($cmorid) {
static::require_enabled();
$cm = $cmorid;
if (!is_object($cmorid)) {
$cm = get_coursemodule_from_id('', $cmorid, 0, true, MUST_EXIST);
}
// Check the user have access to the course module.
self::validate_course_module($cm);
$context = context_module::instance($cm->id);
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $context)) {
throw new required_capability_exception($context, 'moodle/competency:coursecompetencyview', 'nopermissions', '');
}
$result = array();
$cmclist = course_module_competency::list_course_module_competencies($cm->id);
foreach ($cmclist as $id => $cmc) {
array_push($result, $cmc);
}
return $result;
}
/**
* List all the courses using a competency.
*
* @param int $competencyid The id of the competency to check.
* @return array[stdClass] Array of stdClass containing id and shortname.
*/
public static function list_courses_using_competency($competencyid) {
static::require_enabled();
// OK - all set.
$courses = course_competency::list_courses($competencyid);
$result = array();
// Now check permissions on each course.
foreach ($courses as $id => $course) {
$context = context_course::instance($course->id);
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $context)) {
unset($courses[$id]);
continue;
}
if (!self::validate_course($course, false)) {
unset($courses[$id]);
continue;
}
array_push($result, $course);
}
return $result;
}
/**
* Count the proficient competencies in a course for one user.
*
* @param int $courseid The id of the course to check.
* @param int $userid The id of the user to check.
* @return int
*/
public static function count_proficient_competencies_in_course_for_user($courseid, $userid) {
static::require_enabled();
// Check the user have access to the course.
self::validate_course($courseid);
// First we do a permissions check.
$context = context_course::instance($courseid);
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $context)) {
throw new required_capability_exception($context, 'moodle/competency:coursecompetencyview', 'nopermissions', '');
}
// OK - all set.
return user_competency_course::count_proficient_competencies($courseid, $userid);
}
/**
* Count all the competencies in a course.
*
* @param int $courseid The id of the course to check.
* @return int
*/
public static function count_competencies_in_course($courseid) {
static::require_enabled();
// Check the user have access to the course.
self::validate_course($courseid);
// First we do a permissions check.
$context = context_course::instance($courseid);
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $context)) {
throw new required_capability_exception($context, 'moodle/competency:coursecompetencyview', 'nopermissions', '');
}
// OK - all set.
return course_competency::count_competencies($courseid);
}
/**
* List the competencies associated to a course.
*
* @param mixed $courseorid The course, or its ID.
* @return array( array(
* 'competency' => \core_competency\competency,
* 'coursecompetency' => \core_competency\course_competency
* ))
*/
public static function list_course_competencies($courseorid) {
static::require_enabled();
$course = $courseorid;
if (!is_object($courseorid)) {
$course = get_course($courseorid);
}
// Check the user have access to the course.
self::validate_course($course);
$context = context_course::instance($course->id);
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $context)) {
throw new required_capability_exception($context, 'moodle/competency:coursecompetencyview', 'nopermissions', '');
}
$result = array();
// TODO We could improve the performance of this into one single query.
$coursecompetencies = course_competency::list_course_competencies($course->id);
$competencies = course_competency::list_competencies($course->id);
// Build the return values.
foreach ($coursecompetencies as $key => $coursecompetency) {
$result[] = array(
'competency' => $competencies[$coursecompetency->get('competencyid')],
'coursecompetency' => $coursecompetency
);
}
return $result;
}
/**
* Get a user competency.
*
* @param int $userid The user ID.
* @param int $competencyid The competency ID.
* @return user_competency
*/
public static function get_user_competency($userid, $competencyid) {
static::require_enabled();
$existing = user_competency::get_multiple($userid, array($competencyid));
$uc = array_pop($existing);
if (!$uc) {
$uc = user_competency::create_relation($userid, $competencyid);
$uc->create();
}
if (!$uc->can_read()) {
throw new required_capability_exception($uc->get_context(), 'moodle/competency:usercompetencyview',
'nopermissions', '');
}
return $uc;
}
/**
* Get a user competency by ID.
*
* @param int $usercompetencyid The user competency ID.
* @return user_competency
*/
public static function get_user_competency_by_id($usercompetencyid) {
static::require_enabled();
$uc = new user_competency($usercompetencyid);
if (!$uc->can_read()) {
throw new required_capability_exception($uc->get_context(), 'moodle/competency:usercompetencyview',
'nopermissions', '');
}
return $uc;
}
/**
* List the competencies associated to a course module.
*
* @param mixed $cmorid The course module, or its ID.
* @return array( array(
* 'competency' => \core_competency\competency,
* 'coursemodulecompetency' => \core_competency\course_module_competency
* ))
*/
public static function list_course_module_competencies($cmorid) {
static::require_enabled();
$cm = $cmorid;
if (!is_object($cmorid)) {
$cm = get_coursemodule_from_id('', $cmorid, 0, true, MUST_EXIST);
}
// Check the user have access to the course module.
self::validate_course_module($cm);
$context = context_module::instance($cm->id);
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $context)) {
throw new required_capability_exception($context, 'moodle/competency:coursecompetencyview', 'nopermissions', '');
}
$result = array();
// TODO We could improve the performance of this into one single query.
$coursemodulecompetencies = course_competency::list_course_module_competencies($cm->id);
$competencies = course_module_competency::list_competencies($cm->id);
// Build the return values.
foreach ($coursemodulecompetencies as $key => $coursemodulecompetency) {
$result[] = array(
'competency' => $competencies[$coursemodulecompetency->get('competencyid')],
'coursemodulecompetency' => $coursemodulecompetency
);
}
return $result;
}
/**
* Get a user competency in a course.
*
* @param int $courseid The id of the course to check.
* @param int $userid The id of the course to check.
* @param int $competencyid The id of the competency.
* @return user_competency_course
*/
public static function get_user_competency_in_course($courseid, $userid, $competencyid) {
static::require_enabled();
// First we do a permissions check.
$context = context_course::instance($courseid);
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $context)) {
throw new required_capability_exception($context, 'moodle/competency:coursecompetencyview', 'nopermissions', '');
} else if (!user_competency::can_read_user_in_course($userid, $courseid)) {
throw new required_capability_exception($context, 'moodle/competency:usercompetencyview', 'nopermissions', '');
}
// This will throw an exception if the competency does not belong to the course.
$competency = course_competency::get_competency($courseid, $competencyid);
$params = array('courseid' => $courseid, 'userid' => $userid, 'competencyid' => $competencyid);
$exists = user_competency_course::get_record($params);
// Create missing.
if ($exists) {
$ucc = $exists;
} else {
$ucc = user_competency_course::create_relation($userid, $competency->get('id'), $courseid);
$ucc->create();
}
return $ucc;
}
/**
* List all the user competencies in a course.
*
* @param int $courseid The id of the course to check.
* @param int $userid The id of the course to check.
* @return array of user_competency_course objects
*/
public static function list_user_competencies_in_course($courseid, $userid) {
static::require_enabled();
// First we do a permissions check.
$context = context_course::instance($courseid);
$onlyvisible = 1;
$capabilities = array('moodle/competency:coursecompetencyview', 'moodle/competency:coursecompetencymanage');
if (!has_any_capability($capabilities, $context)) {
throw new required_capability_exception($context, 'moodle/competency:coursecompetencyview', 'nopermissions', '');
} else if (!user_competency::can_read_user_in_course($userid, $courseid)) {
throw new required_capability_exception($context, 'moodle/competency:usercompetencyview', 'nopermissions', '');
}
// OK - all set.
$competencylist = course_competency::list_competencies($courseid, false);
$existing = user_competency_course::get_multiple($userid, $courseid, $competencylist);
// Create missing.
$orderedusercompetencycourses = array();
$somemissing = false;
foreach ($competencylist as $coursecompetency) {
$found = false;
foreach ($existing as $usercompetencycourse) {
if ($usercompetencycourse->get('competencyid') == $coursecompetency->get('id')) {
$found = true;
$orderedusercompetencycourses[$usercompetencycourse->get('id')] = $usercompetencycourse;
break;
}
}
if (!$found) {
$ucc = user_competency_course::create_relation($userid, $coursecompetency->get('id'), $courseid);
$ucc->create();
$orderedusercompetencycourses[$ucc->get('id')] = $ucc;
}
}
return $orderedusercompetencycourses;
}
/**
* List the user competencies to review.
*
* The method returns values in this format:
*
* array(
* 'competencies' => array(
* (stdClass)(
* 'usercompetency' => (user_competency),
* 'competency' => (competency),
* 'user' => (user)
* )
* ),
* 'count' => (int)
* )
*
* @param int $skip The number of records to skip.
* @param int $limit The number of results to return.
* @param int $userid The user we're getting the competencies to review for.
* @return array Containing the keys 'count', and 'competencies'. The 'competencies' key contains an object
* which contains 'competency', 'usercompetency' and 'user'.
*/
public static function list_user_competencies_to_review($skip = 0, $limit = 50, $userid = null) {
global $DB, $USER;
static::require_enabled();
if ($userid === null) {
$userid = $USER->id;
}
$capability = 'moodle/competency:usercompetencyreview';
$ucfields = user_competency::get_sql_fields('uc', 'uc_');
$compfields = competency::get_sql_fields('c', 'c_');
$usercols = array('id') + get_user_fieldnames();
$userfields = array();
foreach ($usercols as $field) {
$userfields[] = "u." . $field . " AS usr_" . $field;
}
$userfields = implode(',', $userfields);
$select = "SELECT $ucfields, $compfields, $userfields";
$countselect = "SELECT COUNT('x')";
$sql = " FROM {" . user_competency::TABLE . "} uc
JOIN {" . competency::TABLE . "} c
ON c.id = uc.competencyid
JOIN {user} u
ON u.id = uc.userid
WHERE (uc.status = :waitingforreview
OR (uc.status = :inreview AND uc.reviewerid = :reviewerid))
AND u.deleted = 0";
$ordersql = " ORDER BY c.shortname ASC";
$params = array(
'inreview' => user_competency::STATUS_IN_REVIEW,
'reviewerid' => $userid,
'waitingforreview' => user_competency::STATUS_WAITING_FOR_REVIEW,
);
$countsql = $countselect . $sql;
// Primary check to avoid the hard work of getting the users in which the user has permission.
$count = $DB->count_records_sql($countselect . $sql, $params);
if ($count < 1) {
return array('count' => 0, 'competencies' => array());
}
// TODO MDL-52243 Use core function.
list($insql, $inparams) = self::filter_users_with_capability_on_user_context_sql(
$capability, $userid, SQL_PARAMS_NAMED);
$params += $inparams;
$countsql = $countselect . $sql . " AND uc.userid $insql";
$getsql = $select . $sql . " AND uc.userid $insql " . $ordersql;
// Extracting the results.
$competencies = array();
$records = $DB->get_recordset_sql($getsql, $params, $skip, $limit);
foreach ($records as $record) {
$objects = (object) array(
'usercompetency' => new user_competency(0, user_competency::extract_record($record, 'uc_')),
'competency' => new competency(0, competency::extract_record($record, 'c_')),
'user' => persistent::extract_record($record, 'usr_'),
);
$competencies[] = $objects;
}
$records->close();
return array(
'count' => $DB->count_records_sql($countsql, $params),
'competencies' => $competencies
);
}
/**
* Add a competency to this course module.
*
* @param mixed $cmorid The course module, or id of the course module
* @param int $competencyid The id of the competency
* @return bool
*/
public static function add_competency_to_course_module($cmorid, $competencyid) {
static::require_enabled();
$cm = $cmorid;
if (!is_object($cmorid)) {
$cm = get_coursemodule_from_id('', $cmorid, 0, true, MUST_EXIST);
}
// Check the user have access to the course module.
self::validate_course_module($cm);
// First we do a permissions check.
$context = context_module::instance($cm->id);
require_capability('moodle/competency:coursecompetencymanage', $context);
// Check that the competency belongs to the course.
$exists = course_competency::get_records(array('courseid' => $cm->course, 'competencyid' => $competencyid));
if (!$exists) {
throw new coding_exception('Cannot add a competency to a module if it does not belong to the course');
}
$record = new stdClass();
$record->cmid = $cm->id;
$record->competencyid = $competencyid;
$coursemodulecompetency = new course_module_competency();
$exists = $coursemodulecompetency->get_records(array('cmid' => $cm->id, 'competencyid' => $competencyid));
if (!$exists) {
$coursemodulecompetency->from_record($record);
if ($coursemodulecompetency->create()) {
return true;
}
}
return false;
}
/**
* Remove a competency from this course module.
*
* @param mixed $cmorid The course module, or id of the course module
* @param int $competencyid The id of the competency
* @return bool
*/
public static function remove_competency_from_course_module($cmorid, $competencyid) {
static::require_enabled();
$cm = $cmorid;
if (!is_object($cmorid)) {
$cm = get_coursemodule_from_id('', $cmorid, 0, true, MUST_EXIST);
}
// Check the user have access to the course module.
self::validate_course_module($cm);
// First we do a permissions check.
$context = context_module::instance($cm->id);
require_capability('moodle/competency:coursecompetencymanage', $context);
$record = new stdClass();
$record->cmid = $cm->id;
$record->competencyid = $competencyid;
$competency = new competency($competencyid);
$exists = course_module_competency::get_record(array('cmid' => $cm->id, 'competencyid' => $competencyid));
if ($exists) {
return $exists->delete();
}
return false;
}
/**
* Move the course module competency up or down in the display list.
*
* Requires moodle/competency:coursecompetencymanage capability at the course module context.
*
* @param mixed $cmorid The course module, or id of the course module
* @param int $competencyidfrom The id of the competency we are moving.
* @param int $competencyidto The id of the competency we are moving to.
* @return boolean
*/
public static function reorder_course_module_competency($cmorid, $competencyidfrom, $competencyidto) {
static::require_enabled();
$cm = $cmorid;
if (!is_object($cmorid)) {
$cm = get_coursemodule_from_id('', $cmorid, 0, true, MUST_EXIST);
}
// Check the user have access to the course module.
self::validate_course_module($cm);
// First we do a permissions check.
$context = context_module::instance($cm->id);
require_capability('moodle/competency:coursecompetencymanage', $context);
$down = true;
$matches = course_module_competency::get_records(array('cmid' => $cm->id, 'competencyid' => $competencyidfrom));
if (count($matches) == 0) {
throw new coding_exception('The link does not exist');
}
$competencyfrom = array_pop($matches);
$matches = course_module_competency::get_records(array('cmid' => $cm->id, 'competencyid' => $competencyidto));
if (count($matches) == 0) {
throw new coding_exception('The link does not exist');
}
$competencyto = array_pop($matches);
$all = course_module_competency::get_records(array('cmid' => $cm->id), 'sortorder', 'ASC', 0, 0);
if ($competencyfrom->get('sortorder') > $competencyto->get('sortorder')) {
// We are moving up, so put it before the "to" item.
$down = false;
}
foreach ($all as $id => $coursemodulecompetency) {
$sort = $coursemodulecompetency->get('sortorder');
if ($down && $sort > $competencyfrom->get('sortorder') && $sort <= $competencyto->get('sortorder')) {
$coursemodulecompetency->set('sortorder', $coursemodulecompetency->get('sortorder') - 1);
$coursemodulecompetency->update();
} else if (!$down && $sort >= $competencyto->get('sortorder') && $sort < $competencyfrom->get('sortorder')) {
$coursemodulecompetency->set('sortorder', $coursemodulecompetency->get('sortorder') + 1);
$coursemodulecompetency->update();
}
}
$competencyfrom->set('sortorder', $competencyto->get('sortorder'));
return $competencyfrom->update();
}
/**
* Update ruleoutcome value for a course module competency.
*
* @param int|course_module_competency $coursemodulecompetencyorid The course_module_competency, or its ID.
* @param int $ruleoutcome The value of ruleoutcome.
* @return bool True on success.
*/
public static function set_course_module_competency_ruleoutcome($coursemodulecompetencyorid, $ruleoutcome) {
static::require_enabled();
$coursemodulecompetency = $coursemodulecompetencyorid;
if (!is_object($coursemodulecompetency)) {
$coursemodulecompetency = new course_module_competency($coursemodulecompetencyorid);
}
$cm = get_coursemodule_from_id('', $coursemodulecompetency->get('cmid'), 0, true, MUST_EXIST);
self::validate_course_module($cm);
$context = context_module::instance($cm->id);
require_capability('moodle/competency:coursecompetencymanage', $context);
$coursemodulecompetency->set('ruleoutcome', $ruleoutcome);
return $coursemodulecompetency->update();
}
/**
* Add a competency to this course.
*
* @param int $courseid The id of the course
* @param int $competencyid The id of the competency
* @return bool
*/
public static function add_competency_to_course($courseid, $competencyid) {
static::require_enabled();
// Check the user have access to the course.
self::validate_course($courseid);
// First we do a permissions check.
$context = context_course::instance($courseid);
require_capability('moodle/competency:coursecompetencymanage', $context);
$record = new stdClass();
$record->courseid = $courseid;
$record->competencyid = $competencyid;
$competency = new competency($competencyid);
// Can not add a competency that belong to a hidden framework.
if ($competency->get_framework()->get('visible') == false) {
throw new coding_exception('A competency belonging to hidden framework can not be linked to course');
}
$coursecompetency = new course_competency();
$exists = $coursecompetency->get_records(array('courseid' => $courseid, 'competencyid' => $competencyid));
if (!$exists) {
$coursecompetency->from_record($record);
if ($coursecompetency->create()) {
return true;
}
}
return false;
}
/**
* Remove a competency from this course.
*
* @param int $courseid The id of the course
* @param int $competencyid The id of the competency
* @return bool
*/
public static function remove_competency_from_course($courseid, $competencyid) {
static::require_enabled();
// Check the user have access to the course.
self::validate_course($courseid);
// First we do a permissions check.
$context = context_course::instance($courseid);
require_capability('moodle/competency:coursecompetencymanage', $context);
$record = new stdClass();
$record->courseid = $courseid;
$record->competencyid = $competencyid;
$coursecompetency = new course_competency();
$exists = course_competency::get_record(array('courseid' => $courseid, 'competencyid' => $competencyid));
if ($exists) {
// Delete all course_module_competencies for this competency in this course.
$cmcs = course_module_competency::get_records_by_competencyid_in_course($competencyid, $courseid);
foreach ($cmcs as $cmc) {
$cmc->delete();
}
return $exists->delete();
}
return false;
}
/**
* Move the course competency up or down in the display list.
*
* Requires moodle/competency:coursecompetencymanage capability at the course context.
*
* @param int $courseid The course
* @param int $competencyidfrom The id of the competency we are moving.
* @param int $competencyidto The id of the competency we are moving to.
* @return boolean
*/
public static function reorder_course_competency($courseid, $competencyidfrom, $competencyidto) {
static::require_enabled();
// Check the user have access to the course.
self::validate_course($courseid);
// First we do a permissions check.
$context = context_course::instance($courseid);
require_capability('moodle/competency:coursecompetencymanage', $context);
$down = true;
$coursecompetency = new course_competency();
$matches = $coursecompetency->get_records(array('courseid' => $courseid, 'competencyid' => $competencyidfrom));
if (count($matches) == 0) {
throw new coding_exception('The link does not exist');
}
$competencyfrom = array_pop($matches);
$matches = $coursecompetency->get_records(array('courseid' => $courseid, 'competencyid' => $competencyidto));
if (count($matches) == 0) {
throw new coding_exception('The link does not exist');
}
$competencyto = array_pop($matches);
$all = $coursecompetency->get_records(array('courseid' => $courseid), 'sortorder', 'ASC', 0, 0);
if ($competencyfrom->get('sortorder') > $competencyto->get('sortorder')) {
// We are moving up, so put it before the "to" item.
$down = false;
}
foreach ($all as $id => $coursecompetency) {
$sort = $coursecompetency->get('sortorder');
if ($down && $sort > $competencyfrom->get('sortorder') && $sort <= $competencyto->get('sortorder')) {
$coursecompetency->set('sortorder', $coursecompetency->get('sortorder') - 1);
$coursecompetency->update();
} else if (!$down && $sort >= $competencyto->get('sortorder') && $sort < $competencyfrom->get('sortorder')) {
$coursecompetency->set('sortorder', $coursecompetency->get('sortorder') + 1);
$coursecompetency->update();
}
}
$competencyfrom->set('sortorder', $competencyto->get('sortorder'));
return $competencyfrom->update();
}
/**
* Update ruleoutcome value for a course competency.
*
* @param int|course_competency $coursecompetencyorid The course_competency, or its ID.
* @param int $ruleoutcome The value of ruleoutcome.
* @return bool True on success.
*/
public static function set_course_competency_ruleoutcome($coursecompetencyorid, $ruleoutcome) {
static::require_enabled();
$coursecompetency = $coursecompetencyorid;
if (!is_object($coursecompetency)) {
$coursecompetency = new course_competency($coursecompetencyorid);
}
$courseid = $coursecompetency->get('courseid');
self::validate_course($courseid);
$coursecontext = context_course::instance($courseid);
require_capability('moodle/competency:coursecompetencymanage', $coursecontext);
$coursecompetency->set('ruleoutcome', $ruleoutcome);
return $coursecompetency->update();
}
/**
* Create a learning plan template from a record containing all the data for the class.
*
* Requires moodle/competency:templatemanage capability.
*
* @param stdClass $record Record containing all the data for an instance of the class.
* @return template
*/
public static function create_template(stdClass $record) {
static::require_enabled();
$template = new template(0, $record);
// First we do a permissions check.
if (!$template->can_manage()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templatemanage',
'nopermissions', '');
}
// OK - all set.
$template = $template->create();
// Trigger a template created event.
\core\event\competency_template_created::create_from_template($template)->trigger();
return $template;
}
/**
* Duplicate a learning plan template.
*
* Requires moodle/competency:templatemanage capability at the template context.
*
* @param int $id the template id.
* @return template
*/
public static function duplicate_template($id) {
static::require_enabled();
$template = new template($id);
// First we do a permissions check.
if (!$template->can_manage()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templatemanage',
'nopermissions', '');
}
// OK - all set.
$competencies = template_competency::list_competencies($id, false);
// Adding the suffix copy.
$template->set('shortname', get_string('duplicateditemname', 'core_competency', $template->get('shortname')));
$template->set('id', 0);
$duplicatedtemplate = $template->create();
// Associate each competency for the duplicated template.
foreach ($competencies as $competency) {
self::add_competency_to_template($duplicatedtemplate->get('id'), $competency->get('id'));
}
// Trigger a template created event.
\core\event\competency_template_created::create_from_template($duplicatedtemplate)->trigger();
return $duplicatedtemplate;
}
/**
* Delete a learning plan template by id.
* If the learning plan template has associated cohorts they will be deleted.
*
* Requires moodle/competency:templatemanage capability.
*
* @param int $id The record to delete.
* @param boolean $deleteplans True to delete plans associaated to template, false to unlink them.
* @return boolean
*/
public static function delete_template($id, $deleteplans = true) {
global $DB;
static::require_enabled();
$template = new template($id);
// First we do a permissions check.
if (!$template->can_manage()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templatemanage',
'nopermissions', '');
}
$transaction = $DB->start_delegated_transaction();
$success = true;
// Check if there are cohorts associated.
$templatecohorts = template_cohort::get_relations_by_templateid($template->get('id'));
foreach ($templatecohorts as $templatecohort) {
$success = $templatecohort->delete();
if (!$success) {
break;
}
}
// Still OK, delete or unlink the plans from the template.
if ($success) {
$plans = plan::get_records(array('templateid' => $template->get('id')));
foreach ($plans as $plan) {
$success = $deleteplans ? self::delete_plan($plan->get('id')) : self::unlink_plan_from_template($plan);
if (!$success) {
break;
}
}
}
// Still OK, delete the template comptencies.
if ($success) {
$success = template_competency::delete_by_templateid($template->get('id'));
}
// OK - all set.
if ($success) {
// Create a template deleted event.
$event = \core\event\competency_template_deleted::create_from_template($template);
$success = $template->delete();
}
if ($success) {
// Trigger a template deleted event.
$event->trigger();
// Commit the transaction.
$transaction->allow_commit();
} else {
$transaction->rollback(new moodle_exception('Error while deleting the template.'));
}
return $success;
}
/**
* Update the details for a learning plan template.
*
* Requires moodle/competency:templatemanage capability.
*
* @param stdClass $record The new details for the template. Note - must contain an id that points to the template to update.
* @return boolean
*/
public static function update_template($record) {
global $DB;
static::require_enabled();
$template = new template($record->id);
// First we do a permissions check.
if (!$template->can_manage()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templatemanage',
'nopermissions', '');
} else if (isset($record->contextid) && $record->contextid != $template->get('contextid')) {
// We can never change the context of a template.
throw new coding_exception('Changing the context of an existing tempalte is forbidden.');
}
$updateplans = false;
$before = $template->to_record();
$template->from_record($record);
$after = $template->to_record();
// Should we update the related plans?
if ($before->duedate != $after->duedate ||
$before->shortname != $after->shortname ||
$before->description != $after->description ||
$before->descriptionformat != $after->descriptionformat) {
$updateplans = true;
}
$transaction = $DB->start_delegated_transaction();
$success = $template->update();
if (!$success) {
$transaction->rollback(new moodle_exception('Error while updating the template.'));
return $success;
}
// Trigger a template updated event.
\core\event\competency_template_updated::create_from_template($template)->trigger();
if ($updateplans) {
plan::update_multiple_from_template($template);
}
$transaction->allow_commit();
return $success;
}
/**
* Read a the details for a single learning plan template and return a record.
*
* Requires moodle/competency:templateview capability at the system context.
*
* @param int $id The id of the template to read.
* @return template
*/
public static function read_template($id) {
static::require_enabled();
$template = new template($id);
$context = $template->get_context();
// First we do a permissions check.
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
// OK - all set.
return $template;
}
/**
* Perform a search based on the provided filters and return a paginated list of records.
*
* Requires moodle/competency:templateview capability at the system context.
*
* @param string $sort The column to sort on
* @param string $order ('ASC' or 'DESC')
* @param int $skip Number of records to skip (pagination)
* @param int $limit Max of records to return (pagination)
* @param context $context The parent context of the frameworks.
* @param string $includes Defines what other contexts to fetch frameworks from.
* Accepted values are:
* - children: All descendants
* - parents: All parents, grand parents, etc...
* - self: Context passed only.
* @param bool $onlyvisible If should list only visible templates
* @return array of competency_framework
*/
public static function list_templates($sort, $order, $skip, $limit, $context, $includes = 'children', $onlyvisible = false) {
global $DB;
static::require_enabled();
// Get all the relevant contexts.
$contexts = self::get_related_contexts($context, $includes,
array('moodle/competency:templateview', 'moodle/competency:templatemanage'));
// First we do a permissions check.
if (empty($contexts)) {
throw new required_capability_exception($context, 'moodle/competency:templateview', 'nopermissions', '');
}
// Make the order by.
$orderby = '';
if (!empty($sort)) {
$orderby = $sort . ' ' . $order;
}
// OK - all set.
$template = new template();
list($insql, $params) = $DB->get_in_or_equal(array_keys($contexts), SQL_PARAMS_NAMED);
$select = "contextid $insql";
if ($onlyvisible) {
$select .= " AND visible = :visible";
$params['visible'] = 1;
}
return $template->get_records_select($select, $params, $orderby, '*', $skip, $limit);
}
/**
* Perform a search based on the provided filters and return how many results there are.
*
* Requires moodle/competency:templateview capability at the system context.
*
* @param context $context The parent context of the frameworks.
* @param string $includes Defines what other contexts to fetch frameworks from.
* Accepted values are:
* - children: All descendants
* - parents: All parents, grand parents, etc...
* - self: Context passed only.
* @return int
*/
public static function count_templates($context, $includes) {
global $DB;
static::require_enabled();
// First we do a permissions check.
$contexts = self::get_related_contexts($context, $includes,
array('moodle/competency:templateview', 'moodle/competency:templatemanage'));
if (empty($contexts)) {
throw new required_capability_exception($context, 'moodle/competency:templateview', 'nopermissions', '');
}
// OK - all set.
$template = new template();
list($insql, $inparams) = $DB->get_in_or_equal(array_keys($contexts), SQL_PARAMS_NAMED);
return $template->count_records_select("contextid $insql", $inparams);
}
/**
* Count all the templates using a competency.
*
* @param int $competencyid The id of the competency to check.
* @return int
*/
public static function count_templates_using_competency($competencyid) {
static::require_enabled();
// First we do a permissions check.
$context = context_system::instance();
$onlyvisible = 1;
$capabilities = array('moodle/competency:templateview', 'moodle/competency:templatemanage');
if (!has_any_capability($capabilities, $context)) {
throw new required_capability_exception($context, 'moodle/competency:templateview', 'nopermissions', '');
}
if (has_capability('moodle/competency:templatemanage', $context)) {
$onlyvisible = 0;
}
// OK - all set.
return template_competency::count_templates($competencyid, $onlyvisible);
}
/**
* List all the learning plan templatesd using a competency.
*
* @param int $competencyid The id of the competency to check.
* @return array[stdClass] Array of stdClass containing id and shortname.
*/
public static function list_templates_using_competency($competencyid) {
static::require_enabled();
// First we do a permissions check.
$context = context_system::instance();
$onlyvisible = 1;
$capabilities = array('moodle/competency:templateview', 'moodle/competency:templatemanage');
if (!has_any_capability($capabilities, $context)) {
throw new required_capability_exception($context, 'moodle/competency:templateview', 'nopermissions', '');
}
if (has_capability('moodle/competency:templatemanage', $context)) {
$onlyvisible = 0;
}
// OK - all set.
return template_competency::list_templates($competencyid, $onlyvisible);
}
/**
* Count all the competencies in a learning plan template.
*
* @param template|int $templateorid The template or its ID.
* @return int
*/
public static function count_competencies_in_template($templateorid) {
static::require_enabled();
// First we do a permissions check.
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
// OK - all set.
return template_competency::count_competencies($template->get('id'));
}
/**
* Count all the competencies in a learning plan template with no linked courses.
*
* @param template|int $templateorid The template or its ID.
* @return int
*/
public static function count_competencies_in_template_with_no_courses($templateorid) {
// First we do a permissions check.
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
// OK - all set.
return template_competency::count_competencies_with_no_courses($template->get('id'));
}
/**
* List all the competencies in a template.
*
* @param template|int $templateorid The template or its ID.
* @return array of competencies
*/
public static function list_competencies_in_template($templateorid) {
static::require_enabled();
// First we do a permissions check.
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
// OK - all set.
return template_competency::list_competencies($template->get('id'));
}
/**
* Add a competency to this template.
*
* @param int $templateid The id of the template
* @param int $competencyid The id of the competency
* @return bool
*/
public static function add_competency_to_template($templateid, $competencyid) {
static::require_enabled();
// First we do a permissions check.
$template = new template($templateid);
if (!$template->can_manage()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templatemanage',
'nopermissions', '');
}
$record = new stdClass();
$record->templateid = $templateid;
$record->competencyid = $competencyid;
$competency = new competency($competencyid);
// Can not add a competency that belong to a hidden framework.
if ($competency->get_framework()->get('visible') == false) {
throw new coding_exception('A competency belonging to hidden framework can not be added');
}
$exists = template_competency::get_records(array('templateid' => $templateid, 'competencyid' => $competencyid));
if (!$exists) {
$templatecompetency = new template_competency(0, $record);
$templatecompetency->create();
return true;
}
return false;
}
/**
* Remove a competency from this template.
*
* @param int $templateid The id of the template
* @param int $competencyid The id of the competency
* @return bool
*/
public static function remove_competency_from_template($templateid, $competencyid) {
static::require_enabled();
// First we do a permissions check.
$template = new template($templateid);
if (!$template->can_manage()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templatemanage',
'nopermissions', '');
}
$record = new stdClass();
$record->templateid = $templateid;
$record->competencyid = $competencyid;
$competency = new competency($competencyid);
$exists = template_competency::get_records(array('templateid' => $templateid, 'competencyid' => $competencyid));
if ($exists) {
$link = array_pop($exists);
return $link->delete();
}
return false;
}
/**
* Move the template competency up or down in the display list.
*
* Requires moodle/competency:templatemanage capability at the system context.
*
* @param int $templateid The template id
* @param int $competencyidfrom The id of the competency we are moving.
* @param int $competencyidto The id of the competency we are moving to.
* @return boolean
*/
public static function reorder_template_competency($templateid, $competencyidfrom, $competencyidto) {
static::require_enabled();
$template = new template($templateid);
// First we do a permissions check.
if (!$template->can_manage()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templatemanage',
'nopermissions', '');
}
$down = true;
$matches = template_competency::get_records(array('templateid' => $templateid, 'competencyid' => $competencyidfrom));
if (count($matches) == 0) {
throw new coding_exception('The link does not exist');
}
$competencyfrom = array_pop($matches);
$matches = template_competency::get_records(array('templateid' => $templateid, 'competencyid' => $competencyidto));
if (count($matches) == 0) {
throw new coding_exception('The link does not exist');
}
$competencyto = array_pop($matches);
$all = template_competency::get_records(array('templateid' => $templateid), 'sortorder', 'ASC', 0, 0);
if ($competencyfrom->get('sortorder') > $competencyto->get('sortorder')) {
// We are moving up, so put it before the "to" item.
$down = false;
}
foreach ($all as $id => $templatecompetency) {
$sort = $templatecompetency->get('sortorder');
if ($down && $sort > $competencyfrom->get('sortorder') && $sort <= $competencyto->get('sortorder')) {
$templatecompetency->set('sortorder', $templatecompetency->get('sortorder') - 1);
$templatecompetency->update();
} else if (!$down && $sort >= $competencyto->get('sortorder') && $sort < $competencyfrom->get('sortorder')) {
$templatecompetency->set('sortorder', $templatecompetency->get('sortorder') + 1);
$templatecompetency->update();
}
}
$competencyfrom->set('sortorder', $competencyto->get('sortorder'));
return $competencyfrom->update();
}
/**
* Create a relation between a template and a cohort.
*
* This silently ignores when the relation already existed.
*
* @param template|int $templateorid The template or its ID.
* @param stdClass|int $cohortorid The cohort ot its ID.
* @return template_cohort
*/
public static function create_template_cohort($templateorid, $cohortorid) {
global $DB;
static::require_enabled();
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
require_capability('moodle/competency:templatemanage', $template->get_context());
$cohort = $cohortorid;
if (!is_object($cohort)) {
$cohort = $DB->get_record('cohort', array('id' => $cohort), '*', MUST_EXIST);
}
// Replicate logic in cohort_can_view_cohort() because we can't use it directly as we don't have a course context.
$cohortcontext = context::instance_by_id($cohort->contextid);
if (!$cohort->visible && !has_capability('moodle/cohort:view', $cohortcontext)) {
throw new required_capability_exception($cohortcontext, 'moodle/cohort:view', 'nopermissions', '');
}
$tplcohort = template_cohort::get_relation($template->get('id'), $cohort->id);
if (!$tplcohort->get('id')) {
$tplcohort->create();
}
return $tplcohort;
}
/**
* Remove a relation between a template and a cohort.
*
* @param template|int $templateorid The template or its ID.
* @param stdClass|int $cohortorid The cohort ot its ID.
* @return boolean True on success or when the relation did not exist.
*/
public static function delete_template_cohort($templateorid, $cohortorid) {
global $DB;
static::require_enabled();
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
require_capability('moodle/competency:templatemanage', $template->get_context());
$cohort = $cohortorid;
if (!is_object($cohort)) {
$cohort = $DB->get_record('cohort', array('id' => $cohort), '*', MUST_EXIST);
}
$tplcohort = template_cohort::get_relation($template->get('id'), $cohort->id);
if (!$tplcohort->get('id')) {
return true;
}
return $tplcohort->delete();
}
/**
* Lists user plans.
*
* @param int $userid
* @return \core_competency\plan[]
*/
public static function list_user_plans($userid) {
global $DB, $USER;
static::require_enabled();
$select = 'userid = :userid';
$params = array('userid' => $userid);
$context = context_user::instance($userid);
// Check that we can read something here.
if (!plan::can_read_user($userid) && !plan::can_read_user_draft($userid)) {
throw new required_capability_exception($context, 'moodle/competency:planview', 'nopermissions', '');
}
// The user cannot view the drafts.
if (!plan::can_read_user_draft($userid)) {
list($insql, $inparams) = $DB->get_in_or_equal(plan::get_draft_statuses(), SQL_PARAMS_NAMED, 'param', false);
$select .= " AND status $insql";
$params += $inparams;
}
// The user cannot view the non-drafts.
if (!plan::can_read_user($userid)) {
list($insql, $inparams) = $DB->get_in_or_equal(array(plan::STATUS_ACTIVE, plan::STATUS_COMPLETE),
SQL_PARAMS_NAMED, 'param', false);
$select .= " AND status $insql";
$params += $inparams;
}
return plan::get_records_select($select, $params, 'name ASC');
}
/**
* List the plans to review.
*
* The method returns values in this format:
*
* array(
* 'plans' => array(
* (stdClass)(
* 'plan' => (plan),
* 'template' => (template),
* 'owner' => (stdClass)
* )
* ),
* 'count' => (int)
* )
*
* @param int $skip The number of records to skip.
* @param int $limit The number of results to return.
* @param int $userid The user we're getting the plans to review for.
* @return array Containing the keys 'count', and 'plans'. The 'plans' key contains an object
* which contains 'plan', 'template' and 'owner'.
*/
public static function list_plans_to_review($skip = 0, $limit = 100, $userid = null) {
global $DB, $USER;
static::require_enabled();
if ($userid === null) {
$userid = $USER->id;
}
$planfields = plan::get_sql_fields('p', 'plan_');
$tplfields = template::get_sql_fields('t', 'tpl_');
$usercols = array('id') + get_user_fieldnames();
$userfields = array();
foreach ($usercols as $field) {
$userfields[] = "u." . $field . " AS usr_" . $field;
}
$userfields = implode(',', $userfields);
$select = "SELECT $planfields, $tplfields, $userfields";
$countselect = "SELECT COUNT('x')";
$sql = " FROM {" . plan::TABLE . "} p
JOIN {user} u
ON u.id = p.userid
LEFT JOIN {" . template::TABLE . "} t
ON t.id = p.templateid
WHERE (p.status = :waitingforreview
OR (p.status = :inreview AND p.reviewerid = :reviewerid))
AND p.userid != :userid";
$params = array(
'waitingforreview' => plan::STATUS_WAITING_FOR_REVIEW,
'inreview' => plan::STATUS_IN_REVIEW,
'reviewerid' => $userid,
'userid' => $userid
);
// Primary check to avoid the hard work of getting the users in which the user has permission.
$count = $DB->count_records_sql($countselect . $sql, $params);
if ($count < 1) {
return array('count' => 0, 'plans' => array());
}
// TODO MDL-52243 Use core function.
list($insql, $inparams) = self::filter_users_with_capability_on_user_context_sql('moodle/competency:planreview',
$userid, SQL_PARAMS_NAMED);
$sql .= " AND p.userid $insql";
$params += $inparams;
// Order by ID just to have some ordering in place.
$ordersql = " ORDER BY p.id ASC";
$plans = array();
$records = $DB->get_recordset_sql($select . $sql . $ordersql, $params, $skip, $limit);
foreach ($records as $record) {
$plan = new plan(0, plan::extract_record($record, 'plan_'));
$template = null;
if ($plan->is_based_on_template()) {
$template = new template(0, template::extract_record($record, 'tpl_'));
}
$plans[] = (object) array(
'plan' => $plan,
'template' => $template,
'owner' => persistent::extract_record($record, 'usr_'),
);
}
$records->close();
return array(
'count' => $DB->count_records_sql($countselect . $sql, $params),
'plans' => $plans
);
}
/**
* Creates a learning plan based on the provided data.
*
* @param stdClass $record
* @return \core_competency\plan
*/
public static function create_plan(stdClass $record) {
global $USER;
static::require_enabled();
$plan = new plan(0, $record);
if ($plan->is_based_on_template()) {
throw new coding_exception('To create a plan from a template use api::create_plan_from_template().');
} else if ($plan->get('status') == plan::STATUS_COMPLETE) {
throw new coding_exception('A plan cannot be created as complete.');
}
if (!$plan->can_manage()) {
$context = context_user::instance($plan->get('userid'));
throw new required_capability_exception($context, 'moodle/competency:planmanage', 'nopermissions', '');
}
$plan->create();
// Trigger created event.
\core\event\competency_plan_created::create_from_plan($plan)->trigger();
return $plan;
}
/**
* Create a learning plan from a template.
*
* @param mixed $templateorid The template object or ID.
* @param int $userid
* @return false|\core_competency\plan Returns false when the plan already exists.
*/
public static function create_plan_from_template($templateorid, $userid) {
static::require_enabled();
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
// The user must be able to view the template to use it as a base for a plan.
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
// Can not create plan from a hidden template.
if ($template->get('visible') == false) {
throw new coding_exception('A plan can not be created from a hidden template');
}
// Convert the template to a plan.
$record = $template->to_record();
$record->templateid = $record->id;
$record->userid = $userid;
$record->name = $record->shortname;
$record->status = plan::STATUS_ACTIVE;
unset($record->id);
unset($record->timecreated);
unset($record->timemodified);
unset($record->usermodified);
// Remove extra keys.
$properties = plan::properties_definition();
foreach ($record as $key => $value) {
if (!array_key_exists($key, $properties)) {
unset($record->$key);
}
}
$plan = new plan(0, $record);
if (!$plan->can_manage()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage',
'nopermissions', '');
}
// We first apply the permission checks as we wouldn't want to leak information by returning early that
// the plan already exists.
if (plan::record_exists_select('templateid = :templateid AND userid = :userid', array(
'templateid' => $template->get('id'), 'userid' => $userid))) {
return false;
}
$plan->create();
// Trigger created event.
\core\event\competency_plan_created::create_from_plan($plan)->trigger();
return $plan;
}
/**
* Create learning plans from a template and cohort.
*
* @param mixed $templateorid The template object or ID.
* @param int $cohortid The cohort ID.
* @param bool $recreateunlinked When true the plans that were unlinked from this template will be re-created.
* @return int The number of plans created.
*/
public static function create_plans_from_template_cohort($templateorid, $cohortid, $recreateunlinked = false) {
global $DB, $CFG;
static::require_enabled();
require_once($CFG->dirroot . '/cohort/lib.php');
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
// The user must be able to view the template to use it as a base for a plan.
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
// Can not create plan from a hidden template.
if ($template->get('visible') == false) {
throw new coding_exception('A plan can not be created from a hidden template');
}
// Replicate logic in cohort_can_view_cohort() because we can't use it directly as we don't have a course context.
$cohort = $DB->get_record('cohort', array('id' => $cohortid), '*', MUST_EXIST);
$cohortcontext = context::instance_by_id($cohort->contextid);
if (!$cohort->visible && !has_capability('moodle/cohort:view', $cohortcontext)) {
throw new required_capability_exception($cohortcontext, 'moodle/cohort:view', 'nopermissions', '');
}
// Convert the template to a plan.
$recordbase = $template->to_record();
$recordbase->templateid = $recordbase->id;
$recordbase->name = $recordbase->shortname;
$recordbase->status = plan::STATUS_ACTIVE;
unset($recordbase->id);
unset($recordbase->timecreated);
unset($recordbase->timemodified);
unset($recordbase->usermodified);
// Remove extra keys.
$properties = plan::properties_definition();
foreach ($recordbase as $key => $value) {
if (!array_key_exists($key, $properties)) {
unset($recordbase->$key);
}
}
// Create the plans.
$created = 0;
$userids = template_cohort::get_missing_plans($template->get('id'), $cohortid, $recreateunlinked);
foreach ($userids as $userid) {
$record = (object) (array) $recordbase;
$record->userid = $userid;
$plan = new plan(0, $record);
if (!$plan->can_manage()) {
// Silently skip members where permissions are lacking.
continue;
}
$plan->create();
// Trigger created event.
\core\event\competency_plan_created::create_from_plan($plan)->trigger();
$created++;
}
return $created;
}
/**
* Unlink a plan from its template.
*
* @param \core_competency\plan|int $planorid The plan or its ID.
* @return bool
*/
public static function unlink_plan_from_template($planorid) {
global $DB;
static::require_enabled();
$plan = $planorid;
if (!is_object($planorid)) {
$plan = new plan($planorid);
}
// The user must be allowed to manage the plans of the user, nothing about the template.
if (!$plan->can_manage()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
}
// Only plan with status DRAFT or ACTIVE can be unliked..
if ($plan->get('status') == plan::STATUS_COMPLETE) {
throw new coding_exception('Only draft or active plan can be unliked from a template');
}
// Early exit, it's already done...
if (!$plan->is_based_on_template()) {
return true;
}
// Fetch the template.
$template = new template($plan->get('templateid'));
// Now, proceed by copying all competencies to the plan, then update the plan.
$transaction = $DB->start_delegated_transaction();
$competencies = template_competency::list_competencies($template->get('id'), false);
$i = 0;
foreach ($competencies as $competency) {
$record = (object) array(
'planid' => $plan->get('id'),
'competencyid' => $competency->get('id'),
'sortorder' => $i++
);
$pc = new plan_competency(null, $record);
$pc->create();
}
$plan->set('origtemplateid', $template->get('id'));
$plan->set('templateid', null);
$success = $plan->update();
$transaction->allow_commit();
// Trigger unlinked event.
\core\event\competency_plan_unlinked::create_from_plan($plan)->trigger();
return $success;
}
/**
* Updates a plan.
*
* @param stdClass $record
* @return \core_competency\plan
*/
public static function update_plan(stdClass $record) {
static::require_enabled();
$plan = new plan($record->id);
// Validate that the plan as it is can be managed.
if (!$plan->can_manage()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
} else if ($plan->get('status') == plan::STATUS_COMPLETE) {
// A completed plan cannot be edited.
throw new coding_exception('Completed plan cannot be edited.');
} else if ($plan->is_based_on_template()) {
// Prevent a plan based on a template to be edited.
throw new coding_exception('Cannot update a plan that is based on a template.');
} else if (isset($record->templateid) && $plan->get('templateid') != $record->templateid) {
// Prevent a plan to be based on a template.
throw new coding_exception('Cannot base a plan on a template.');
} else if (isset($record->userid) && $plan->get('userid') != $record->userid) {
// Prevent change of ownership as the capabilities are checked against that.
throw new coding_exception('A plan cannot be transfered to another user');
} else if (isset($record->status) && $plan->get('status') != $record->status) {
// Prevent change of status.
throw new coding_exception('To change the status of a plan use the appropriate methods.');
}
$plan->from_record($record);
$plan->update();
// Trigger updated event.
\core\event\competency_plan_updated::create_from_plan($plan)->trigger();
return $plan;
}
/**
* Returns a plan data.
*
* @param int $id
* @return \core_competency\plan
*/
public static function read_plan($id) {
static::require_enabled();
$plan = new plan($id);
if (!$plan->can_read()) {
$context = context_user::instance($plan->get('userid'));
throw new required_capability_exception($context, 'moodle/competency:planview', 'nopermissions', '');
}
return $plan;
}
/**
* Plan event viewed.
*
* @param mixed $planorid The id or the plan.
* @return boolean
*/
public static function plan_viewed($planorid) {
static::require_enabled();
$plan = $planorid;
if (!is_object($plan)) {
$plan = new plan($plan);
}
// First we do a permissions check.
if (!$plan->can_read()) {
$context = context_user::instance($plan->get('userid'));
throw new required_capability_exception($context, 'moodle/competency:planview', 'nopermissions', '');
}
// Trigger a template viewed event.
\core\event\competency_plan_viewed::create_from_plan($plan)->trigger();
return true;
}
/**
* Deletes a plan.
*
* Plans based on a template can be removed just like any other one.
*
* @param int $id
* @return bool Success?
*/
public static function delete_plan($id) {
global $DB;
static::require_enabled();
$plan = new plan($id);
if (!$plan->can_manage()) {
$context = context_user::instance($plan->get('userid'));
throw new required_capability_exception($context, 'moodle/competency:planmanage', 'nopermissions', '');
}
// Wrap the suppression in a DB transaction.
$transaction = $DB->start_delegated_transaction();
// Delete plan competencies.
$plancomps = plan_competency::get_records(array('planid' => $plan->get('id')));
foreach ($plancomps as $plancomp) {
$plancomp->delete();
}
// Delete archive user competencies if the status of the plan is complete.
if ($plan->get('status') == plan::STATUS_COMPLETE) {
self::remove_archived_user_competencies_in_plan($plan);
}
$event = \core\event\competency_plan_deleted::create_from_plan($plan);
$success = $plan->delete();
$transaction->allow_commit();
// Trigger deleted event.
$event->trigger();
return $success;
}
/**
* Cancel the review of a plan.
*
* @param int|plan $planorid The plan, or its ID.
* @return bool
*/
public static function plan_cancel_review_request($planorid) {
static::require_enabled();
$plan = $planorid;
if (!is_object($plan)) {
$plan = new plan($plan);
}
// We need to be able to view the plan at least.
if (!$plan->can_read()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planview', 'nopermissions', '');
}
if ($plan->is_based_on_template()) {
throw new coding_exception('Template plans cannot be reviewed.'); // This should never happen.
} else if ($plan->get('status') != plan::STATUS_WAITING_FOR_REVIEW) {
throw new coding_exception('The plan review cannot be cancelled at this stage.');
} else if (!$plan->can_request_review()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
}
$plan->set('status', plan::STATUS_DRAFT);
$result = $plan->update();
// Trigger review request cancelled event.
\core\event\competency_plan_review_request_cancelled::create_from_plan($plan)->trigger();
return $result;
}
/**
* Request the review of a plan.
*
* @param int|plan $planorid The plan, or its ID.
* @return bool
*/
public static function plan_request_review($planorid) {
static::require_enabled();
$plan = $planorid;
if (!is_object($plan)) {
$plan = new plan($plan);
}
// We need to be able to view the plan at least.
if (!$plan->can_read()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planview', 'nopermissions', '');
}
if ($plan->is_based_on_template()) {
throw new coding_exception('Template plans cannot be reviewed.'); // This should never happen.
} else if ($plan->get('status') != plan::STATUS_DRAFT) {
throw new coding_exception('The plan cannot be sent for review at this stage.');
} else if (!$plan->can_request_review()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
}
$plan->set('status', plan::STATUS_WAITING_FOR_REVIEW);
$result = $plan->update();
// Trigger review requested event.
\core\event\competency_plan_review_requested::create_from_plan($plan)->trigger();
return $result;
}
/**
* Start the review of a plan.
*
* @param int|plan $planorid The plan, or its ID.
* @return bool
*/
public static function plan_start_review($planorid) {
global $USER;
static::require_enabled();
$plan = $planorid;
if (!is_object($plan)) {
$plan = new plan($plan);
}
// We need to be able to view the plan at least.
if (!$plan->can_read()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planview', 'nopermissions', '');
}
if ($plan->is_based_on_template()) {
throw new coding_exception('Template plans cannot be reviewed.'); // This should never happen.
} else if ($plan->get('status') != plan::STATUS_WAITING_FOR_REVIEW) {
throw new coding_exception('The plan review cannot be started at this stage.');
} else if (!$plan->can_review()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
}
$plan->set('status', plan::STATUS_IN_REVIEW);
$plan->set('reviewerid', $USER->id);
$result = $plan->update();
// Trigger review started event.
\core\event\competency_plan_review_started::create_from_plan($plan)->trigger();
return $result;
}
/**
* Stop reviewing a plan.
*
* @param int|plan $planorid The plan, or its ID.
* @return bool
*/
public static function plan_stop_review($planorid) {
static::require_enabled();
$plan = $planorid;
if (!is_object($plan)) {
$plan = new plan($plan);
}
// We need to be able to view the plan at least.
if (!$plan->can_read()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planview', 'nopermissions', '');
}
if ($plan->is_based_on_template()) {
throw new coding_exception('Template plans cannot be reviewed.'); // This should never happen.
} else if ($plan->get('status') != plan::STATUS_IN_REVIEW) {
throw new coding_exception('The plan review cannot be stopped at this stage.');
} else if (!$plan->can_review()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
}
$plan->set('status', plan::STATUS_DRAFT);
$plan->set('reviewerid', null);
$result = $plan->update();
// Trigger review stopped event.
\core\event\competency_plan_review_stopped::create_from_plan($plan)->trigger();
return $result;
}
/**
* Approve a plan.
*
* This means making the plan active.
*
* @param int|plan $planorid The plan, or its ID.
* @return bool
*/
public static function approve_plan($planorid) {
static::require_enabled();
$plan = $planorid;
if (!is_object($plan)) {
$plan = new plan($plan);
}
// We need to be able to view the plan at least.
if (!$plan->can_read()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planview', 'nopermissions', '');
}
// We can approve a plan that is either a draft, in review, or waiting for review.
if ($plan->is_based_on_template()) {
throw new coding_exception('Template plans are already approved.'); // This should never happen.
} else if (!$plan->is_draft()) {
throw new coding_exception('The plan cannot be approved at this stage.');
} else if (!$plan->can_review()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
}
$plan->set('status', plan::STATUS_ACTIVE);
$plan->set('reviewerid', null);
$result = $plan->update();
// Trigger approved event.
\core\event\competency_plan_approved::create_from_plan($plan)->trigger();
return $result;
}
/**
* Unapprove a plan.
*
* This means making the plan draft.
*
* @param int|plan $planorid The plan, or its ID.
* @return bool
*/
public static function unapprove_plan($planorid) {
static::require_enabled();
$plan = $planorid;
if (!is_object($plan)) {
$plan = new plan($plan);
}
// We need to be able to view the plan at least.
if (!$plan->can_read()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planview', 'nopermissions', '');
}
if ($plan->is_based_on_template()) {
throw new coding_exception('Template plans are always approved.'); // This should never happen.
} else if ($plan->get('status') != plan::STATUS_ACTIVE) {
throw new coding_exception('The plan cannot be sent back to draft at this stage.');
} else if (!$plan->can_review()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
}
$plan->set('status', plan::STATUS_DRAFT);
$result = $plan->update();
// Trigger unapproved event.
\core\event\competency_plan_unapproved::create_from_plan($plan)->trigger();
return $result;
}
/**
* Complete a plan.
*
* @param int|plan $planorid The plan, or its ID.
* @return bool
*/
public static function complete_plan($planorid) {
global $DB;
static::require_enabled();
$plan = $planorid;
if (!is_object($planorid)) {
$plan = new plan($planorid);
}
// Validate that the plan can be managed.
if (!$plan->can_manage()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
}
// Check if the plan was already completed.
if ($plan->get('status') == plan::STATUS_COMPLETE) {
throw new coding_exception('The plan is already completed.');
}
$originalstatus = $plan->get('status');
$plan->set('status', plan::STATUS_COMPLETE);
// The user should also be able to manage the plan when it's completed.
if (!$plan->can_manage()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
}
// Put back original status because archive needs it to extract competencies from the right table.
$plan->set('status', $originalstatus);
// Do the things.
$transaction = $DB->start_delegated_transaction();
self::archive_user_competencies_in_plan($plan);
$plan->set('status', plan::STATUS_COMPLETE);
$success = $plan->update();
if (!$success) {
$transaction->rollback(new moodle_exception('The plan could not be updated.'));
return $success;
}
$transaction->allow_commit();
// Trigger updated event.
\core\event\competency_plan_completed::create_from_plan($plan)->trigger();
return $success;
}
/**
* Reopen a plan.
*
* @param int|plan $planorid The plan, or its ID.
* @return bool
*/
public static function reopen_plan($planorid) {
global $DB;
static::require_enabled();
$plan = $planorid;
if (!is_object($planorid)) {
$plan = new plan($planorid);
}
// Validate that the plan as it is can be managed.
if (!$plan->can_manage()) {
$context = context_user::instance($plan->get('userid'));
throw new required_capability_exception($context, 'moodle/competency:planmanage', 'nopermissions', '');
}
$beforestatus = $plan->get('status');
$plan->set('status', plan::STATUS_ACTIVE);
// Validate if status can be changed.
if (!$plan->can_manage()) {
$context = context_user::instance($plan->get('userid'));
throw new required_capability_exception($context, 'moodle/competency:planmanage', 'nopermissions', '');
}
// Wrap the updates in a DB transaction.
$transaction = $DB->start_delegated_transaction();
// Delete archived user competencies if the status of the plan is changed from complete to another status.
$mustremovearchivedcompetencies = ($beforestatus == plan::STATUS_COMPLETE && $plan->get('status') != plan::STATUS_COMPLETE);
if ($mustremovearchivedcompetencies) {
self::remove_archived_user_competencies_in_plan($plan);
}
// If duedate less than or equal to duedate_threshold unset it.
if ($plan->get('duedate') <= time() + plan::DUEDATE_THRESHOLD) {
$plan->set('duedate', 0);
}
$success = $plan->update();
if (!$success) {
$transaction->rollback(new moodle_exception('The plan could not be updated.'));
return $success;
}
$transaction->allow_commit();
// Trigger reopened event.
\core\event\competency_plan_reopened::create_from_plan($plan)->trigger();
return $success;
}
/**
* Get a single competency from the user plan.
*
* @param int $planorid The plan, or its ID.
* @param int $competencyid The competency id.
* @return (object) array(
* 'competency' => \core_competency\competency,
* 'usercompetency' => \core_competency\user_competency
* 'usercompetencyplan' => \core_competency\user_competency_plan
* )
* The values of of keys usercompetency and usercompetencyplan cannot be defined at the same time.
*/
public static function get_plan_competency($planorid, $competencyid) {
static::require_enabled();
$plan = $planorid;
if (!is_object($planorid)) {
$plan = new plan($planorid);
}
if (!user_competency::can_read_user($plan->get('userid'))) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:usercompetencyview',
'nopermissions', '');
}
$competency = $plan->get_competency($competencyid);
// Get user competencies from user_competency_plan if the plan status is set to complete.
$iscompletedplan = $plan->get('status') == plan::STATUS_COMPLETE;
if ($iscompletedplan) {
$usercompetencies = user_competency_plan::get_multiple($plan->get('userid'), $plan->get('id'), array($competencyid));
$ucresultkey = 'usercompetencyplan';
} else {
$usercompetencies = user_competency::get_multiple($plan->get('userid'), array($competencyid));
$ucresultkey = 'usercompetency';
}
$found = count($usercompetencies);
if ($found) {
$uc = array_pop($usercompetencies);
} else {
if ($iscompletedplan) {
throw new coding_exception('A user competency plan is missing');
} else {
$uc = user_competency::create_relation($plan->get('userid'), $competency->get('id'));
$uc->create();
}
}
$plancompetency = (object) array(
'competency' => $competency,
'usercompetency' => null,
'usercompetencyplan' => null
);
$plancompetency->$ucresultkey = $uc;
return $plancompetency;
}
/**
* List the competencies in a user plan.
*
* @param int $planorid The plan, or its ID.
* @return array((object) array(
* 'competency' => \core_competency\competency,
* 'usercompetency' => \core_competency\user_competency
* 'usercompetencyplan' => \core_competency\user_competency_plan
* ))
* The values of of keys usercompetency and usercompetencyplan cannot be defined at the same time.
*/
public static function list_plan_competencies($planorid) {
static::require_enabled();
$plan = $planorid;
if (!is_object($planorid)) {
$plan = new plan($planorid);
}
if (!$plan->can_read()) {
$context = context_user::instance($plan->get('userid'));
throw new required_capability_exception($context, 'moodle/competency:planview', 'nopermissions', '');
}
$result = array();
$competencies = $plan->get_competencies();
// Get user competencies from user_competency_plan if the plan status is set to complete.
$iscompletedplan = $plan->get('status') == plan::STATUS_COMPLETE;
if ($iscompletedplan) {
$usercompetencies = user_competency_plan::get_multiple($plan->get('userid'), $plan->get('id'), $competencies);
$ucresultkey = 'usercompetencyplan';
} else {
$usercompetencies = user_competency::get_multiple($plan->get('userid'), $competencies);
$ucresultkey = 'usercompetency';
}
// Build the return values.
foreach ($competencies as $key => $competency) {
$found = false;
foreach ($usercompetencies as $uckey => $uc) {
if ($uc->get('competencyid') == $competency->get('id')) {
$found = true;
unset($usercompetencies[$uckey]);
break;
}
}
if (!$found) {
if ($iscompletedplan) {
throw new coding_exception('A user competency plan is missing');
} else {
$uc = user_competency::create_relation($plan->get('userid'), $competency->get('id'));
}
}
$plancompetency = (object) array(
'competency' => $competency,
'usercompetency' => null,
'usercompetencyplan' => null
);
$plancompetency->$ucresultkey = $uc;
$result[] = $plancompetency;
}
return $result;
}
/**
* Add a competency to a plan.
*
* @param int $planid The id of the plan
* @param int $competencyid The id of the competency
* @return bool
*/
public static function add_competency_to_plan($planid, $competencyid) {
static::require_enabled();
$plan = new plan($planid);
// First we do a permissions check.
if (!$plan->can_manage()) {
throw new required_capability_exception($plan->get_context(), 'moodle/competency:planmanage', 'nopermissions', '');
} else if ($plan->is_based_on_template()) {
throw new coding_exception('A competency can not be added to a learning plan based on a template');
}
if (!$plan->can_be_edited()) {
throw new coding_exception('A competency can not be added to a learning plan completed');
}
$competency = new competency($competencyid);
// Can not add a competency that belong to a hidden framework.
if ($competency->get_framework()->get('visible') == false) {
throw new coding_exception('A competency belonging to hidden framework can not be added');
}
$exists = plan_competency::get_record(array('planid' => $planid, 'competencyid' => $competencyid));
if (!$exists) {
$record = new stdClass();
$record->planid = $planid;
$record->competencyid = $competencyid;
$plancompetency = new plan_competency(0, $record);
$plancompetency->create();
}
return true;
}
/**
* Remove a competency from a plan.
*
* @param int $planid The plan id
* @param int $competencyid The id of the competency
* @return bool
*/
public static function remove_competency_from_plan($planid, $competencyid) {
static::require_enabled();
$plan = new plan($planid);
// First we do a permissions check.
if (!$plan->can_manage()) {
$context = context_user::instance($plan->get('userid'));
throw new required_capability_exception($context, 'moodle/competency:planmanage', 'nopermissions', '');
} else if ($plan->is_based_on_template()) {
throw new coding_exception('A competency can not be removed from a learning plan based on a template');
}
if (!$plan->can_be_edited()) {
throw new coding_exception('A competency can not be removed from a learning plan completed');
}
$link = plan_competency::get_record(array('planid' => $planid, 'competencyid' => $competencyid));
if ($link) {
return $link->delete();
}
return false;
}
/**
* Move the plan competency up or down in the display list.
*
* Requires moodle/competency:planmanage capability at the system context.
*
* @param int $planid The plan id
* @param int $competencyidfrom The id of the competency we are moving.
* @param int $competencyidto The id of the competency we are moving to.
* @return boolean
*/
public static function reorder_plan_competency($planid, $competencyidfrom, $competencyidto) {
static::require_enabled();
$plan = new plan($planid);
// First we do a permissions check.
if (!$plan->can_manage()) {
$context = context_user::instance($plan->get('userid'));
throw new required_capability_exception($context, 'moodle/competency:planmanage', 'nopermissions', '');
} else if ($plan->is_based_on_template()) {
throw new coding_exception('A competency can not be reordered in a learning plan based on a template');
}
if (!$plan->can_be_edited()) {
throw new coding_exception('A competency can not be reordered in a learning plan completed');
}
$down = true;
$matches = plan_competency::get_records(array('planid' => $planid, 'competencyid' => $competencyidfrom));
if (count($matches) == 0) {
throw new coding_exception('The link does not exist');
}
$competencyfrom = array_pop($matches);
$matches = plan_competency::get_records(array('planid' => $planid, 'competencyid' => $competencyidto));
if (count($matches) == 0) {
throw new coding_exception('The link does not exist');
}
$competencyto = array_pop($matches);
$all = plan_competency::get_records(array('planid' => $planid), 'sortorder', 'ASC', 0, 0);
if ($competencyfrom->get('sortorder') > $competencyto->get('sortorder')) {
// We are moving up, so put it before the "to" item.
$down = false;
}
foreach ($all as $id => $plancompetency) {
$sort = $plancompetency->get('sortorder');
if ($down && $sort > $competencyfrom->get('sortorder') && $sort <= $competencyto->get('sortorder')) {
$plancompetency->set('sortorder', $plancompetency->get('sortorder') - 1);
$plancompetency->update();
} else if (!$down && $sort >= $competencyto->get('sortorder') && $sort < $competencyfrom->get('sortorder')) {
$plancompetency->set('sortorder', $plancompetency->get('sortorder') + 1);
$plancompetency->update();
}
}
$competencyfrom->set('sortorder', $competencyto->get('sortorder'));
return $competencyfrom->update();
}
/**
* Cancel a user competency review request.
*
* @param int $userid The user ID.
* @param int $competencyid The competency ID.
* @return bool
*/
public static function user_competency_cancel_review_request($userid, $competencyid) {
static::require_enabled();
$context = context_user::instance($userid);
$uc = user_competency::get_record(array('userid' => $userid, 'competencyid' => $competencyid));
if (!$uc || !$uc->can_read()) {
throw new required_capability_exception($context, 'moodle/competency:usercompetencyview', 'nopermissions', '');
} else if ($uc->get('status') != user_competency::STATUS_WAITING_FOR_REVIEW) {
throw new coding_exception('The competency can not be cancel review request at this stage.');
} else if (!$uc->can_request_review()) {
throw new required_capability_exception($context, 'moodle/competency:usercompetencyrequestreview', 'nopermissions', '');
}
$uc->set('status', user_competency::STATUS_IDLE);
$result = $uc->update();
if ($result) {
\core\event\competency_user_competency_review_request_cancelled::create_from_user_competency($uc)->trigger();
}
return $result;
}
/**
* Request a user competency review.
*
* @param int $userid The user ID.
* @param int $competencyid The competency ID.
* @return bool
*/
public static function user_competency_request_review($userid, $competencyid) {
static::require_enabled();
$uc = user_competency::get_record(array('userid' => $userid, 'competencyid' => $competencyid));
if (!$uc) {
$uc = user_competency::create_relation($userid, $competencyid);
$uc->create();
}
if (!$uc->can_read()) {
throw new required_capability_exception($uc->get_context(), 'moodle/competency:usercompetencyview',
'nopermissions', '');
} else if ($uc->get('status') != user_competency::STATUS_IDLE) {
throw new coding_exception('The competency can not be sent for review at this stage.');
} else if (!$uc->can_request_review()) {
throw new required_capability_exception($uc->get_context(), 'moodle/competency:usercompetencyrequestreview',
'nopermissions', '');
}
$uc->set('status', user_competency::STATUS_WAITING_FOR_REVIEW);
$result = $uc->update();
if ($result) {
\core\event\competency_user_competency_review_requested::create_from_user_competency($uc)->trigger();
}
return $result;
}
/**
* Start a user competency review.
*
* @param int $userid The user ID.
* @param int $competencyid The competency ID.
* @return bool
*/
public static function user_competency_start_review($userid, $competencyid) {
global $USER;
static::require_enabled();
$context = context_user::instance($userid);
$uc = user_competency::get_record(array('userid' => $userid, 'competencyid' => $competencyid));
if (!$uc || !$uc->can_read()) {
throw new required_capability_exception($context, 'moodle/competency:usercompetencyview', 'nopermissions', '');
} else if ($uc->get('status') != user_competency::STATUS_WAITING_FOR_REVIEW) {
throw new coding_exception('The competency review can not be started at this stage.');
} else if (!$uc->can_review()) {
throw new required_capability_exception($context, 'moodle/competency:usercompetencyreview', 'nopermissions', '');
}
$uc->set('status', user_competency::STATUS_IN_REVIEW);
$uc->set('reviewerid', $USER->id);
$result = $uc->update();
if ($result) {
\core\event\competency_user_competency_review_started::create_from_user_competency($uc)->trigger();
}
return $result;
}
/**
* Stop a user competency review.
*
* @param int $userid The user ID.
* @param int $competencyid The competency ID.
* @return bool
*/
public static function user_competency_stop_review($userid, $competencyid) {
static::require_enabled();
$context = context_user::instance($userid);
$uc = user_competency::get_record(array('userid' => $userid, 'competencyid' => $competencyid));
if (!$uc || !$uc->can_read()) {
throw new required_capability_exception($context, 'moodle/competency:usercompetencyview', 'nopermissions', '');
} else if ($uc->get('status') != user_competency::STATUS_IN_REVIEW) {
throw new coding_exception('The competency review can not be stopped at this stage.');
} else if (!$uc->can_review()) {
throw new required_capability_exception($context, 'moodle/competency:usercompetencyreview', 'nopermissions', '');
}
$uc->set('status', user_competency::STATUS_IDLE);
$result = $uc->update();
if ($result) {
\core\event\competency_user_competency_review_stopped::create_from_user_competency($uc)->trigger();
}
return $result;
}
/**
* Log user competency viewed event.
*
* @param user_competency|int $usercompetencyorid The user competency object or user competency id
* @return bool
*/
public static function user_competency_viewed($usercompetencyorid) {
static::require_enabled();
$uc = $usercompetencyorid;
if (!is_object($uc)) {
$uc = new user_competency($uc);
}
if (!$uc || !$uc->can_read()) {
throw new required_capability_exception($uc->get_context(), 'moodle/competency:usercompetencyview',
'nopermissions', '');
}
\core\event\competency_user_competency_viewed::create_from_user_competency_viewed($uc)->trigger();
return true;
}
/**
* Log user competency viewed in plan event.
*
* @param user_competency|int $usercompetencyorid The user competency object or user competency id
* @param int $planid The plan ID
* @return bool
*/
public static function user_competency_viewed_in_plan($usercompetencyorid, $planid) {
static::require_enabled();
$uc = $usercompetencyorid;
if (!is_object($uc)) {
$uc = new user_competency($uc);
}
if (!$uc || !$uc->can_read()) {
throw new required_capability_exception($uc->get_context(), 'moodle/competency:usercompetencyview',
'nopermissions', '');
}
$plan = new plan($planid);
if ($plan->get('status') == plan::STATUS_COMPLETE) {
throw new coding_exception('To log the user competency in completed plan use user_competency_plan_viewed method.');
}
\core\event\competency_user_competency_viewed_in_plan::create_from_user_competency_viewed_in_plan($uc, $planid)->trigger();
return true;
}
/**
* Log user competency viewed in course event.
*
* @param user_competency_course|int $usercoursecompetencyorid The user competency course object or its ID.
* @param int $courseid The course ID
* @return bool
*/
public static function user_competency_viewed_in_course($usercoursecompetencyorid) {
static::require_enabled();
$ucc = $usercoursecompetencyorid;
if (!is_object($ucc)) {
$ucc = new user_competency_course($ucc);
}
if (!$ucc || !user_competency::can_read_user_in_course($ucc->get('userid'), $ucc->get('courseid'))) {
throw new required_capability_exception($ucc->get_context(), 'moodle/competency:usercompetencyview',
'nopermissions', '');
}
// Validate the course, this will throw an exception if not valid.
self::validate_course($ucc->get('courseid'));
\core\event\competency_user_competency_viewed_in_course::create_from_user_competency_viewed_in_course($ucc)->trigger();
return true;
}
/**
* Log user competency plan viewed event.
*
* @param user_competency_plan|int $usercompetencyplanorid The user competency plan object or user competency plan id
* @return bool
*/
public static function user_competency_plan_viewed($usercompetencyplanorid) {
static::require_enabled();
$ucp = $usercompetencyplanorid;
if (!is_object($ucp)) {
$ucp = new user_competency_plan($ucp);
}
if (!$ucp || !user_competency::can_read_user($ucp->get('userid'))) {
throw new required_capability_exception($ucp->get_context(), 'moodle/competency:usercompetencyview',
'nopermissions', '');
}
$plan = new plan($ucp->get('planid'));
if ($plan->get('status') != plan::STATUS_COMPLETE) {
throw new coding_exception('To log the user competency in non-completed plan use '
. 'user_competency_viewed_in_plan method.');
}
\core\event\competency_user_competency_plan_viewed::create_from_user_competency_plan($ucp)->trigger();
return true;
}
/**
* Check if template has related data.
*
* @param int $templateid The id of the template to check.
* @return boolean
*/
public static function template_has_related_data($templateid) {
static::require_enabled();
// First we do a permissions check.
$template = new template($templateid);
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
// OK - all set.
return $template->has_plans();
}
/**
* List all the related competencies.
*
* @param int $competencyid The id of the competency to check.
* @return competency[]
*/
public static function list_related_competencies($competencyid) {
static::require_enabled();
$competency = new competency($competencyid);
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'),
$competency->get_context())) {
throw new required_capability_exception($competency->get_context(), 'moodle/competency:competencyview',
'nopermissions', '');
}
return $competency->get_related_competencies();
}
/**
* Add a related competency.
*
* @param int $competencyid The id of the competency
* @param int $relatedcompetencyid The id of the related competency.
* @return bool False when create failed, true on success, or if the relation already existed.
*/
public static function add_related_competency($competencyid, $relatedcompetencyid) {
static::require_enabled();
$competency1 = new competency($competencyid);
$competency2 = new competency($relatedcompetencyid);
require_capability('moodle/competency:competencymanage', $competency1->get_context());
$relatedcompetency = related_competency::get_relation($competency1->get('id'), $competency2->get('id'));
if (!$relatedcompetency->get('id')) {
$relatedcompetency->create();
return true;
}
return true;
}
/**
* Remove a related competency.
*
* @param int $competencyid The id of the competency.
* @param int $relatedcompetencyid The id of the related competency.
* @return bool True when it was deleted, false when it wasn't or the relation doesn't exist.
*/
public static function remove_related_competency($competencyid, $relatedcompetencyid) {
static::require_enabled();
$competency = new competency($competencyid);
// This only check if we have the permission in either competency because both competencies
// should belong to the same framework.
require_capability('moodle/competency:competencymanage', $competency->get_context());
$relatedcompetency = related_competency::get_relation($competencyid, $relatedcompetencyid);
if ($relatedcompetency->get('id')) {
return $relatedcompetency->delete();
}
return false;
}
/**
* Read a user evidence.
*
* @param int $id
* @return user_evidence
*/
public static function read_user_evidence($id) {
static::require_enabled();
$userevidence = new user_evidence($id);
if (!$userevidence->can_read()) {
$context = $userevidence->get_context();
throw new required_capability_exception($context, 'moodle/competency:userevidenceview', 'nopermissions', '');
}
return $userevidence;
}
/**
* Create a new user evidence.
*
* @param object $data The data.
* @param int $draftitemid The draft ID in which files have been saved.
* @return user_evidence
*/
public static function create_user_evidence($data, $draftitemid = null) {
static::require_enabled();
$userevidence = new user_evidence(null, $data);
$context = $userevidence->get_context();
if (!$userevidence->can_manage()) {
throw new required_capability_exception($context, 'moodle/competency:userevidencemanage', 'nopermissions', '');
}
$userevidence->create();
if (!empty($draftitemid)) {
$fileareaoptions = array('subdirs' => true);
$itemid = $userevidence->get('id');
file_save_draft_area_files($draftitemid, $context->id, 'core_competency', 'userevidence', $itemid, $fileareaoptions);
}
// Trigger an evidence of prior learning created event.
\core\event\competency_user_evidence_created::create_from_user_evidence($userevidence)->trigger();
return $userevidence;
}
/**
* Create a new user evidence.
*
* @param object $data The data.
* @param int $draftitemid The draft ID in which files have been saved.
* @return user_evidence
*/
public static function update_user_evidence($data, $draftitemid = null) {
static::require_enabled();
$userevidence = new user_evidence($data->id);
$context = $userevidence->get_context();
if (!$userevidence->can_manage()) {
throw new required_capability_exception($context, 'moodle/competency:userevidencemanage', 'nopermissions', '');
} else if (array_key_exists('userid', $data) && $data->userid != $userevidence->get('userid')) {
throw new coding_exception('Can not change the userid of a user evidence.');
}
$userevidence->from_record($data);
$userevidence->update();
if (!empty($draftitemid)) {
$fileareaoptions = array('subdirs' => true);
$itemid = $userevidence->get('id');
file_save_draft_area_files($draftitemid, $context->id, 'core_competency', 'userevidence', $itemid, $fileareaoptions);
}
// Trigger an evidence of prior learning updated event.
\core\event\competency_user_evidence_updated::create_from_user_evidence($userevidence)->trigger();
return $userevidence;
}
/**
* Delete a user evidence.
*
* @param int $id The user evidence ID.
* @return bool
*/
public static function delete_user_evidence($id) {
static::require_enabled();
$userevidence = new user_evidence($id);
$context = $userevidence->get_context();
if (!$userevidence->can_manage()) {
throw new required_capability_exception($context, 'moodle/competency:userevidencemanage', 'nopermissions', '');
}
// Delete the user evidence.
$userevidence->delete();
// Delete associated files.
$fs = get_file_storage();
$fs->delete_area_files($context->id, 'core_competency', 'userevidence', $id);
// Delete relation between evidence and competencies.
$userevidence->set('id', $id); // Restore the ID to fully mock the object.
$competencies = user_evidence_competency::get_competencies_by_userevidenceid($id);
foreach ($competencies as $competency) {
static::delete_user_evidence_competency($userevidence, $competency->get('id'));
}
// Trigger an evidence of prior learning deleted event.
\core\event\competency_user_evidence_deleted::create_from_user_evidence($userevidence)->trigger();
$userevidence->set('id', 0); // Restore the object.
return true;
}
/**
* List the user evidence of a user.
*
* @param int $userid The user ID.
* @return user_evidence[]
*/
public static function list_user_evidence($userid) {
static::require_enabled();
if (!user_evidence::can_read_user($userid)) {
$context = context_user::instance($userid);
throw new required_capability_exception($context, 'moodle/competency:userevidenceview', 'nopermissions', '');
}
$evidence = user_evidence::get_records(array('userid' => $userid), 'name');
return $evidence;
}
/**
* Link a user evidence with a competency.
*
* @param user_evidence|int $userevidenceorid User evidence or its ID.
* @param int $competencyid Competency ID.
* @return user_evidence_competency
*/
public static function create_user_evidence_competency($userevidenceorid, $competencyid) {
global $USER;
static::require_enabled();
$userevidence = $userevidenceorid;
if (!is_object($userevidence)) {
$userevidence = self::read_user_evidence($userevidence);
}
// Perform user evidence capability checks.
if (!$userevidence->can_manage()) {
$context = $userevidence->get_context();
throw new required_capability_exception($context, 'moodle/competency:userevidencemanage', 'nopermissions', '');
}
// Perform competency capability checks.
$competency = self::read_competency($competencyid);
// Get (and create) the relation.
$relation = user_evidence_competency::get_relation($userevidence->get('id'), $competency->get('id'));
if (!$relation->get('id')) {
$relation->create();
$link = url::user_evidence($userevidence->get('id'));
self::add_evidence(
$userevidence->get('userid'),
$competency,
$userevidence->get_context(),
evidence::ACTION_LOG,
'evidence_evidenceofpriorlearninglinked',
'core_competency',
$userevidence->get('name'),
false,
$link->out(false),
null,
$USER->id
);
}
return $relation;
}
/**
* Delete a relationship between a user evidence and a competency.
*
* @param user_evidence|int $userevidenceorid User evidence or its ID.
* @param int $competencyid Competency ID.
* @return bool
*/
public static function delete_user_evidence_competency($userevidenceorid, $competencyid) {
global $USER;
static::require_enabled();
$userevidence = $userevidenceorid;
if (!is_object($userevidence)) {
$userevidence = self::read_user_evidence($userevidence);
}
// Perform user evidence capability checks.
if (!$userevidence->can_manage()) {
$context = $userevidence->get_context();
throw new required_capability_exception($context, 'moodle/competency:userevidencemanage', 'nopermissions', '');
}
// Get (and delete) the relation.
$relation = user_evidence_competency::get_relation($userevidence->get('id'), $competencyid);
if (!$relation->get('id')) {
return true;
}
$success = $relation->delete();
if ($success) {
self::add_evidence(
$userevidence->get('userid'),
$competencyid,
$userevidence->get_context(),
evidence::ACTION_LOG,
'evidence_evidenceofpriorlearningunlinked',
'core_competency',
$userevidence->get('name'),
false,
null,
null,
$USER->id
);
}
return $success;
}
/**
* Send request review for user evidence competencies.
*
* @param int $id The user evidence ID.
* @return bool
*/
public static function request_review_of_user_evidence_linked_competencies($id) {
$userevidence = new user_evidence($id);
$context = $userevidence->get_context();
$userid = $userevidence->get('userid');
if (!$userevidence->can_manage()) {
throw new required_capability_exception($context, 'moodle/competency:userevidencemanage', 'nopermissions', '');
}
$usercompetencies = user_evidence_competency::get_user_competencies_by_userevidenceid($id);
foreach ($usercompetencies as $usercompetency) {
if ($usercompetency->get('status') == user_competency::STATUS_IDLE) {
static::user_competency_request_review($userid, $usercompetency->get('competencyid'));
}
}
return true;
}
/**
* Recursively duplicate competencies from a tree, we start duplicating from parents to children to have a correct path.
* This method does not copy the related competencies.
*
* @param int $frameworkid - framework id
* @param competency[] $tree - array of competencies object
* @param int $oldparent - old parent id
* @param int $newparent - new parent id
* @return competency[] $matchids - List of old competencies ids matched with new competencies object.
*/
protected static function duplicate_competency_tree($frameworkid, $tree, $oldparent = 0, $newparent = 0) {
$matchids = array();
foreach ($tree as $node) {
if ($node->competency->get('parentid') == $oldparent) {
$parentid = $node->competency->get('id');
// Create the competency.
$competency = new competency(0, $node->competency->to_record());
$competency->set('competencyframeworkid', $frameworkid);
$competency->set('parentid', $newparent);
$competency->set('path', '');
$competency->set('id', 0);
$competency->reset_rule();
$competency->create();
// Trigger the created event competency.
\core\event\competency_created::create_from_competency($competency)->trigger();
// Match the old id with the new one.
$matchids[$parentid] = $competency;
if (!empty($node->children)) {
// Duplicate children competency.
$childrenids = self::duplicate_competency_tree($frameworkid, $node->children, $parentid, $competency->get('id'));
// Array_merge does not keep keys when merging so we use the + operator.
$matchids = $matchids + $childrenids;
}
}
}
return $matchids;
}
/**
* Recursively migrate competency rules.
*
* @param competency[] $tree - array of competencies object
* @param competency[] $matchids - List of old competencies ids matched with new competencies object
*/
protected static function migrate_competency_tree_rules($tree, $matchids) {
foreach ($tree as $node) {
$oldcompid = $node->competency->get('id');
if ($node->competency->get('ruletype') && array_key_exists($oldcompid, $matchids)) {
try {
// Get the new competency.
$competency = $matchids[$oldcompid];
$class = $node->competency->get('ruletype');
$newruleconfig = $class::migrate_config($node->competency->get('ruleconfig'), $matchids);
$competency->set('ruleconfig', $newruleconfig);
$competency->set('ruletype', $class);
$competency->set('ruleoutcome', $node->competency->get('ruleoutcome'));
$competency->update();
} catch (\Exception $e) {
debugging('Could not migrate competency rule from: ' . $oldcompid . ' to: ' . $competency->get('id') . '.' .
' Exception: ' . $e->getMessage(), DEBUG_DEVELOPER);
$competency->reset_rule();
}
}
if (!empty($node->children)) {
self::migrate_competency_tree_rules($node->children, $matchids);
}
}
}
/**
* Archive user competencies in a plan.
*
* @param int $plan The plan object.
* @return void
*/
protected static function archive_user_competencies_in_plan($plan) {
// Check if the plan was already completed.
if ($plan->get('status') == plan::STATUS_COMPLETE) {
throw new coding_exception('The plan is already completed.');
}
$competencies = $plan->get_competencies();
$usercompetencies = user_competency::get_multiple($plan->get('userid'), $competencies);
$i = 0;
foreach ($competencies as $competency) {
$found = false;
foreach ($usercompetencies as $uckey => $uc) {
if ($uc->get('competencyid') == $competency->get('id')) {
$found = true;
$ucprecord = $uc->to_record();
$ucprecord->planid = $plan->get('id');
$ucprecord->sortorder = $i;
unset($ucprecord->id);
unset($ucprecord->status);
unset($ucprecord->reviewerid);
$usercompetencyplan = new user_competency_plan(0, $ucprecord);
$usercompetencyplan->create();
unset($usercompetencies[$uckey]);
break;
}
}
// If the user competency doesn't exist, we create a new relation in user_competency_plan.
if (!$found) {
$usercompetencyplan = user_competency_plan::create_relation($plan->get('userid'), $competency->get('id'),
$plan->get('id'));
$usercompetencyplan->set('sortorder', $i);
$usercompetencyplan->create();
}
$i++;
}
}
/**
* Delete archived user competencies in a plan.
*
* @param int $plan The plan object.
* @return void
*/
protected static function remove_archived_user_competencies_in_plan($plan) {
$competencies = $plan->get_competencies();
$usercompetenciesplan = user_competency_plan::get_multiple($plan->get('userid'), $plan->get('id'), $competencies);
foreach ($usercompetenciesplan as $ucpkey => $ucp) {
$ucp->delete();
}
}
/**
* List all the evidence for a user competency.
*
* @param int $userid The user id - only used if usercompetencyid is 0.
* @param int $competencyid The competency id - only used it usercompetencyid is 0.
* @param int $planid The plan id - not used yet - but can be used to only list archived evidence if a plan is completed.
* @param string $sort The field to sort the evidence by.
* @param string $order The ordering of the sorting.
* @param int $skip Number of records to skip.
* @param int $limit Number of records to return.
* @return \core_competency\evidence[]
* @return array of \core_competency\evidence
*/
public static function list_evidence($userid = 0, $competencyid = 0, $planid = 0, $sort = 'timecreated',
$order = 'DESC', $skip = 0, $limit = 0) {
static::require_enabled();
if (!user_competency::can_read_user($userid)) {
$context = context_user::instance($userid);
throw new required_capability_exception($context, 'moodle/competency:usercompetencyview', 'nopermissions', '');
}
$usercompetency = user_competency::get_record(array('userid' => $userid, 'competencyid' => $competencyid));
if (!$usercompetency) {
return array();
}
$plancompleted = false;
if ($planid != 0) {
$plan = new plan($planid);
if ($plan->get('status') == plan::STATUS_COMPLETE) {
$plancompleted = true;
}
}
$select = 'usercompetencyid = :usercompetencyid';
$params = array('usercompetencyid' => $usercompetency->get('id'));
if ($plancompleted) {
$select .= ' AND timecreated <= :timecompleted';
$params['timecompleted'] = $plan->get('timemodified');
}
$orderby = $sort . ' ' . $order;
$orderby .= !empty($orderby) ? ', id DESC' : 'id DESC'; // Prevent random ordering.
$evidence = evidence::get_records_select($select, $params, $orderby, '*', $skip, $limit);
return $evidence;
}
/**
* List all the evidence for a user competency in a course.
*
* @param int $userid The user ID.
* @param int $courseid The course ID.
* @param int $competencyid The competency ID.
* @param string $sort The field to sort the evidence by.
* @param string $order The ordering of the sorting.
* @param int $skip Number of records to skip.
* @param int $limit Number of records to return.
* @return \core_competency\evidence[]
*/
public static function list_evidence_in_course($userid = 0, $courseid = 0, $competencyid = 0, $sort = 'timecreated',
$order = 'DESC', $skip = 0, $limit = 0) {
static::require_enabled();
if (!user_competency::can_read_user_in_course($userid, $courseid)) {
$context = context_user::instance($userid);
throw new required_capability_exception($context, 'moodle/competency:usercompetencyview', 'nopermissions', '');
}
$usercompetency = user_competency::get_record(array('userid' => $userid, 'competencyid' => $competencyid));
if (!$usercompetency) {
return array();
}
$context = context_course::instance($courseid);
return evidence::get_records_for_usercompetency($usercompetency->get('id'), $context, $sort, $order, $skip, $limit);
}
/**
* Create an evidence from a list of parameters.
*
* Requires no capability because evidence can be added in many situations under any user.
*
* @param int $userid The user id for which evidence is added.
* @param competency|int $competencyorid The competency, or its id for which evidence is added.
* @param context|int $contextorid The context in which the evidence took place.
* @param int $action The type of action to take on the competency. \core_competency\evidence::ACTION_*.
* @param string $descidentifier The strings identifier.
* @param string $desccomponent The strings component.
* @param mixed $desca Any arguments the string requires.
* @param bool $recommend When true, the user competency will be sent for review.
* @param string $url The url the evidence may link to.
* @param int $grade The grade, or scale ID item.
* @param int $actionuserid The ID of the user who took the action of adding the evidence. Null when system.
* This should be used when the action was taken by a real person, this will allow
* to keep track of all the evidence given by a certain person.
* @param string $note A note to attach to the evidence.
* @return evidence
* @throws coding_exception
* @throws invalid_persistent_exception
* @throws moodle_exception
*/
public static function add_evidence($userid, $competencyorid, $contextorid, $action, $descidentifier, $desccomponent,
$desca = null, $recommend = false, $url = null, $grade = null, $actionuserid = null,
$note = null) {
global $DB;
static::require_enabled();
// Some clearly important variable assignments right there.
$competencyid = $competencyorid;
$competency = null;
if (is_object($competencyid)) {
$competency = $competencyid;
$competencyid = $competency->get('id');
}
$contextid = $contextorid;
$context = $contextorid;
if (is_object($contextorid)) {
$contextid = $contextorid->id;
} else {
$context = context::instance_by_id($contextorid);
}
$setucgrade = false;
$ucgrade = null;
$ucproficiency = null;
$usercompetencycourse = null;
// Fetch or create the user competency.
$usercompetency = user_competency::get_record(array('userid' => $userid, 'competencyid' => $competencyid));
if (!$usercompetency) {
$usercompetency = user_competency::create_relation($userid, $competencyid);
$usercompetency->create();
}
// What should we be doing?
switch ($action) {
// Completing a competency.
case evidence::ACTION_COMPLETE:
// The logic here goes like this:
//
// if rating outside a course
// - set the default grade and proficiency ONLY if there is no current grade
// else we are in a course
// - set the defautl grade and proficiency in the course ONLY if there is no current grade in the course
// - then check the course settings to see if we should push the rating outside the course
// - if we should push it
// --- push it only if the user_competency (outside the course) has no grade
// Done.
if ($grade !== null) {
throw new coding_exception("The grade MUST NOT be set with a 'completing' evidence.");
}
// Fetch the default grade to attach to the evidence.
if (empty($competency)) {
$competency = new competency($competencyid);
}
list($grade, $proficiency) = $competency->get_default_grade();
// Add user_competency_course record when in a course or module.
if (in_array($context->contextlevel, array(CONTEXT_COURSE, CONTEXT_MODULE))) {
$coursecontext = $context->get_course_context();
$courseid = $coursecontext->instanceid;
$filterparams = array(
'userid' => $userid,
'competencyid' => $competencyid,
'courseid' => $courseid
);
// Fetch or create user competency course.
$usercompetencycourse = user_competency_course::get_record($filterparams);
if (!$usercompetencycourse) {
$usercompetencycourse = user_competency_course::create_relation($userid, $competencyid, $courseid);
$usercompetencycourse->create();
}
// Only update the grade and proficiency if there is not already a grade.
if ($usercompetencycourse->get('grade') === null) {
// Set grade.
$usercompetencycourse->set('grade', $grade);
// Set proficiency.
$usercompetencycourse->set('proficiency', $proficiency);
}
// Check the course settings to see if we should push to user plans.
$coursesettings = course_competency_settings::get_by_courseid($courseid);
$setucgrade = $coursesettings->get('pushratingstouserplans');
if ($setucgrade) {
// Only push to user plans if there is not already a grade.
if ($usercompetency->get('grade') !== null) {
$setucgrade = false;
} else {
$ucgrade = $grade;
$ucproficiency = $proficiency;
}
}
} else {
// When completing the competency we fetch the default grade from the competency. But we only mark
// the user competency when a grade has not been set yet. Complete is an action to use with automated systems.
if ($usercompetency->get('grade') === null) {
$setucgrade = true;
$ucgrade = $grade;
$ucproficiency = $proficiency;
}
}
break;
// We override the grade, even overriding back to not set.
case evidence::ACTION_OVERRIDE:
$setucgrade = true;
$ucgrade = $grade;
if (empty($competency)) {
$competency = new competency($competencyid);
}
if ($ucgrade !== null) {
$ucproficiency = $competency->get_proficiency_of_grade($ucgrade);
}
// Add user_competency_course record when in a course or module.
if (in_array($context->contextlevel, array(CONTEXT_COURSE, CONTEXT_MODULE))) {
$coursecontext = $context->get_course_context();
$courseid = $coursecontext->instanceid;
$filterparams = array(
'userid' => $userid,
'competencyid' => $competencyid,
'courseid' => $courseid
);
// Fetch or create user competency course.
$usercompetencycourse = user_competency_course::get_record($filterparams);
if (!$usercompetencycourse) {
$usercompetencycourse = user_competency_course::create_relation($userid, $competencyid, $courseid);
$usercompetencycourse->create();
}
// Get proficiency.
$proficiency = $ucproficiency;
if ($proficiency === null) {
if (empty($competency)) {
$competency = new competency($competencyid);
}
$proficiency = $competency->get_proficiency_of_grade($grade);
}
// Set grade.
$usercompetencycourse->set('grade', $grade);
// Set proficiency.
$usercompetencycourse->set('proficiency', $proficiency);
$coursesettings = course_competency_settings::get_by_courseid($courseid);
if (!$coursesettings->get('pushratingstouserplans')) {
$setucgrade = false;
}
}
break;
// Simply logging an evidence.
case evidence::ACTION_LOG:
if ($grade !== null) {
throw new coding_exception("The grade MUST NOT be set when 'logging' an evidence.");
}
break;
// Whoops, this is not expected.
default:
throw new coding_exception('Unexpected action parameter when registering an evidence.');
break;
}
// Should we recommend?
if ($recommend && $usercompetency->get('status') == user_competency::STATUS_IDLE) {
$usercompetency->set('status', user_competency::STATUS_WAITING_FOR_REVIEW);
}
// Setting the grade and proficiency for the user competency.
$wascompleted = false;
if ($setucgrade == true) {
if (!$usercompetency->get('proficiency') && $ucproficiency) {
$wascompleted = true;
}
$usercompetency->set('grade', $ucgrade);
$usercompetency->set('proficiency', $ucproficiency);
}
// Prepare the evidence.
$record = new stdClass();
$record->usercompetencyid = $usercompetency->get('id');
$record->contextid = $contextid;
$record->action = $action;
$record->descidentifier = $descidentifier;
$record->desccomponent = $desccomponent;
$record->grade = $grade;
$record->actionuserid = $actionuserid;
$record->note = $note;
$evidence = new evidence(0, $record);
$evidence->set('desca', $desca);
$evidence->set('url', $url);
// Validate both models, we should not operate on one if the other will not save.
if (!$usercompetency->is_valid()) {
throw new invalid_persistent_exception($usercompetency->get_errors());
} else if (!$evidence->is_valid()) {
throw new invalid_persistent_exception($evidence->get_errors());
}
// Save the user_competency_course record.
if ($usercompetencycourse !== null) {
// Validate and update.
if (!$usercompetencycourse->is_valid()) {
throw new invalid_persistent_exception($usercompetencycourse->get_errors());
}
$usercompetencycourse->update();
}
// Finally save. Pheww!
$usercompetency->update();
$evidence->create();
// Trigger the evidence_created event.
\core\event\competency_evidence_created::create_from_evidence($evidence, $usercompetency, $recommend)->trigger();
// The competency was marked as completed, apply the rules.
if ($wascompleted) {
self::apply_competency_rules_from_usercompetency($usercompetency, $competency);
}
return $evidence;
}
/**
* Read an evidence.
* @param int $evidenceid The evidence ID.
* @return evidence
*/
public static function read_evidence($evidenceid) {
static::require_enabled();
$evidence = new evidence($evidenceid);
$uc = new user_competency($evidence->get('usercompetencyid'));
if (!$uc->can_read()) {
throw new required_capability_exception($uc->get_context(), 'moodle/competency:usercompetencyview',
'nopermissions', '');
}
return $evidence;
}
/**
* Delete an evidence.
*
* @param evidence|int $evidenceorid The evidence, or its ID.
* @return bool
*/
public static function delete_evidence($evidenceorid) {
$evidence = $evidenceorid;
if (!is_object($evidence)) {
$evidence = new evidence($evidenceorid);
}
$uc = new user_competency($evidence->get('usercompetencyid'));
if (!evidence::can_delete_user($uc->get('userid'))) {
throw new required_capability_exception($uc->get_context(), 'moodle/competency:evidencedelete', 'nopermissions', '');
}
return $evidence->delete();
}
/**
* Apply the competency rules from a user competency.
*
* The user competency passed should be one that was recently marked as complete.
* A user competency is considered 'complete' when it's proficiency value is true.
*
* This method will check if the parent of this usercompetency's competency has any
* rules and if so will see if they match. When matched it will take the required
* step to add evidence and trigger completion, etc...
*
* @param user_competency $usercompetency The user competency recently completed.
* @param competency|null $competency The competency of the user competency, useful to avoid unnecessary read.
* @return void
*/
protected static function apply_competency_rules_from_usercompetency(user_competency $usercompetency,
competency $competency = null) {
// Perform some basic checks.
if (!$usercompetency->get('proficiency')) {
throw new coding_exception('The user competency passed is not completed.');
}
if ($competency === null) {
$competency = $usercompetency->get_competency();
}
if ($competency->get('id') != $usercompetency->get('competencyid')) {
throw new coding_exception('Mismatch between user competency and competency.');
}
// Fetch the parent.
$parent = $competency->get_parent();
if ($parent === null) {
return;
}
// The parent should have a rule, and a meaningful outcome.
$ruleoutcome = $parent->get('ruleoutcome');
if ($ruleoutcome == competency::OUTCOME_NONE) {
return;
}
$rule = $parent->get_rule_object();
if ($rule === null) {
return;
}
// Fetch or create the user competency for the parent.
$userid = $usercompetency->get('userid');
$parentuc = user_competency::get_record(array('userid' => $userid, 'competencyid' => $parent->get('id')));
if (!$parentuc) {
$parentuc = user_competency::create_relation($userid, $parent->get('id'));
$parentuc->create();
}
// Does the rule match?
if (!$rule->matches($parentuc)) {
return;
}
// Figuring out what to do.
$recommend = false;
if ($ruleoutcome == competency::OUTCOME_EVIDENCE) {
$action = evidence::ACTION_LOG;
} else if ($ruleoutcome == competency::OUTCOME_RECOMMEND) {
$action = evidence::ACTION_LOG;
$recommend = true;
} else if ($ruleoutcome == competency::OUTCOME_COMPLETE) {
$action = evidence::ACTION_COMPLETE;
} else {
throw new moodle_exception('Unexpected rule outcome: ' + $ruleoutcome);
}
// Finally add an evidence.
static::add_evidence(
$userid,
$parent,
$parent->get_context()->id,
$action,
'evidence_competencyrule',
'core_competency',
null,
$recommend
);
}
/**
* Observe when a course module is marked as completed.
*
* Note that the user being logged in while this happens may be anyone.
* Do not rely on capability checks here!
*
* @param \core\event\course_module_completion_updated $event
* @return void
*/
public static function observe_course_module_completion_updated(\core\event\course_module_completion_updated $event) {
if (!static::is_enabled()) {
return;
}
$eventdata = $event->get_record_snapshot('course_modules_completion', $event->objectid);
if ($eventdata->completionstate == COMPLETION_COMPLETE
|| $eventdata->completionstate == COMPLETION_COMPLETE_PASS) {
$coursemodulecompetencies = course_module_competency::list_course_module_competencies($eventdata->coursemoduleid);
$cm = get_coursemodule_from_id(null, $eventdata->coursemoduleid);
$fastmodinfo = get_fast_modinfo($cm->course)->cms[$cm->id];
$cmname = $fastmodinfo->name;
$url = $fastmodinfo->url;
foreach ($coursemodulecompetencies as $coursemodulecompetency) {
$outcome = $coursemodulecompetency->get('ruleoutcome');
$action = null;
$recommend = false;
$strdesc = 'evidence_coursemodulecompleted';
if ($outcome == course_module_competency::OUTCOME_EVIDENCE) {
$action = evidence::ACTION_LOG;
} else if ($outcome == course_module_competency::OUTCOME_RECOMMEND) {
$action = evidence::ACTION_LOG;
$recommend = true;
} else if ($outcome == course_module_competency::OUTCOME_COMPLETE) {
$action = evidence::ACTION_COMPLETE;
} else {
throw new moodle_exception('Unexpected rule outcome: ' + $outcome);
}
static::add_evidence(
$event->relateduserid,
$coursemodulecompetency->get('competencyid'),
$event->contextid,
$action,
$strdesc,
'core_competency',
$cmname,
$recommend,
$url
);
}
}
}
/**
* Observe when a course is marked as completed.
*
* Note that the user being logged in while this happens may be anyone.
* Do not rely on capability checks here!
*
* @param \core\event\course_completed $event
* @return void
*/
public static function observe_course_completed(\core\event\course_completed $event) {
if (!static::is_enabled()) {
return;
}
$sql = 'courseid = :courseid AND ruleoutcome != :nooutcome';
$params = array(
'courseid' => $event->courseid,
'nooutcome' => course_competency::OUTCOME_NONE
);
$coursecompetencies = course_competency::get_records_select($sql, $params);
$course = get_course($event->courseid);
$courseshortname = format_string($course->shortname, null, array('context' => $event->contextid));
foreach ($coursecompetencies as $coursecompetency) {
$outcome = $coursecompetency->get('ruleoutcome');
$action = null;
$recommend = false;
$strdesc = 'evidence_coursecompleted';
if ($outcome == course_competency::OUTCOME_EVIDENCE) {
$action = evidence::ACTION_LOG;
} else if ($outcome == course_competency::OUTCOME_RECOMMEND) {
$action = evidence::ACTION_LOG;
$recommend = true;
} else if ($outcome == course_competency::OUTCOME_COMPLETE) {
$action = evidence::ACTION_COMPLETE;
} else {
throw new moodle_exception('Unexpected rule outcome: ' + $outcome);
}
static::add_evidence(
$event->relateduserid,
$coursecompetency->get('competencyid'),
$event->contextid,
$action,
$strdesc,
'core_competency',
$courseshortname,
$recommend,
$event->get_url()
);
}
}
/**
* Action to perform when a course module is deleted.
*
* Do not call this directly, this is reserved for core use.
*
* @param stdClass $cm The CM object.
* @return void
*/
public static function hook_course_module_deleted(stdClass $cm) {
global $DB;
$DB->delete_records(course_module_competency::TABLE, array('cmid' => $cm->id));
}
/**
* Action to perform when a course is deleted.
*
* Do not call this directly, this is reserved for core use.
*
* @param stdClass $course The course object.
* @return void
*/
public static function hook_course_deleted(stdClass $course) {
global $DB;
$DB->delete_records(course_competency::TABLE, array('courseid' => $course->id));
$DB->delete_records(course_competency_settings::TABLE, array('courseid' => $course->id));
$DB->delete_records(user_competency_course::TABLE, array('courseid' => $course->id));
}
/**
* Action to perform when a course is being reset.
*
* Do not call this directly, this is reserved for core use.
*
* @param int $courseid The course ID.
* @return void
*/
public static function hook_course_reset_competency_ratings($courseid) {
global $DB;
$DB->delete_records(user_competency_course::TABLE, array('courseid' => $courseid));
}
/**
* Action to perform when a cohort is deleted.
*
* Do not call this directly, this is reserved for core use.
*
* @param \stdClass $cohort The cohort object.
* @return void
*/
public static function hook_cohort_deleted(\stdClass $cohort) {
global $DB;
$DB->delete_records(template_cohort::TABLE, array('cohortid' => $cohort->id));
}
/**
* Manually grade a user competency.
*
* @param int $userid
* @param int $competencyid
* @param int $grade
* @param string $note A note to attach to the evidence
* @return array of \core_competency\user_competency
*/
public static function grade_competency($userid, $competencyid, $grade, $note = null) {
global $USER;
static::require_enabled();
$uc = static::get_user_competency($userid, $competencyid);
$context = $uc->get_context();
if (!user_competency::can_grade_user($uc->get('userid'))) {
throw new required_capability_exception($context, 'moodle/competency:competencygrade', 'nopermissions', '');
}
// Throws exception if competency not in plan.
$competency = $uc->get_competency();
$competencycontext = $competency->get_context();
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'),
$competencycontext)) {
throw new required_capability_exception($competencycontext, 'moodle/competency:competencyview', 'nopermissions', '');
}
$action = evidence::ACTION_OVERRIDE;
$desckey = 'evidence_manualoverride';
$result = self::add_evidence($uc->get('userid'),
$competency,
$context->id,
$action,
$desckey,
'core_competency',
null,
false,
null,
$grade,
$USER->id,
$note);
if ($result) {
$uc->read();
$event = \core\event\competency_user_competency_rated::create_from_user_competency($uc);
$event->trigger();
}
return $result;
}
/**
* Manually grade a user competency from the plans page.
*
* @param mixed $planorid
* @param int $competencyid
* @param int $grade
* @param string $note A note to attach to the evidence
* @return array of \core_competency\user_competency
*/
public static function grade_competency_in_plan($planorid, $competencyid, $grade, $note = null) {
global $USER;
static::require_enabled();
$plan = $planorid;
if (!is_object($planorid)) {
$plan = new plan($planorid);
}
$context = $plan->get_context();
if (!user_competency::can_grade_user($plan->get('userid'))) {
throw new required_capability_exception($context, 'moodle/competency:competencygrade', 'nopermissions', '');
}
// Throws exception if competency not in plan.
$competency = $plan->get_competency($competencyid);
$competencycontext = $competency->get_context();
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'),
$competencycontext)) {
throw new required_capability_exception($competencycontext, 'moodle/competency:competencyview', 'nopermissions', '');
}
$action = evidence::ACTION_OVERRIDE;
$desckey = 'evidence_manualoverrideinplan';
$result = self::add_evidence($plan->get('userid'),
$competency,
$context->id,
$action,
$desckey,
'core_competency',
$plan->get('name'),
false,
null,
$grade,
$USER->id,
$note);
if ($result) {
$uc = static::get_user_competency($plan->get('userid'), $competency->get('id'));
$event = \core\event\competency_user_competency_rated_in_plan::create_from_user_competency($uc, $plan->get('id'));
$event->trigger();
}
return $result;
}
/**
* Manually grade a user course competency from the course page.
*
* This may push the rating to the user competency
* if the course is configured this way.
*
* @param mixed $courseorid
* @param int $userid
* @param int $competencyid
* @param int $grade
* @param string $note A note to attach to the evidence
* @return array of \core_competency\user_competency
*/
public static function grade_competency_in_course($courseorid, $userid, $competencyid, $grade, $note = null) {
global $USER, $DB;
static::require_enabled();
$course = $courseorid;
if (!is_object($courseorid)) {
$course = $DB->get_record('course', array('id' => $courseorid));
}
$context = context_course::instance($course->id);
// Check that we can view the user competency details in the course.
if (!user_competency::can_read_user_in_course($userid, $course->id)) {
throw new required_capability_exception($context, 'moodle/competency:usercompetencyview', 'nopermissions', '');
}
// Validate the permission to grade.
if (!user_competency::can_grade_user_in_course($userid, $course->id)) {
throw new required_capability_exception($context, 'moodle/competency:competencygrade', 'nopermissions', '');
}
// Check that competency is in course and visible to the current user.
$competency = course_competency::get_competency($course->id, $competencyid);
$competencycontext = $competency->get_context();
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'),
$competencycontext)) {
throw new required_capability_exception($competencycontext, 'moodle/competency:competencyview', 'nopermissions', '');
}
// Check that the user is enrolled in the course, and is "gradable".
if (!is_enrolled($context, $userid, 'moodle/competency:coursecompetencygradable')) {
throw new coding_exception('The competency may not be rated at this time.');
}
$action = evidence::ACTION_OVERRIDE;
$desckey = 'evidence_manualoverrideincourse';
$result = self::add_evidence($userid,
$competency,
$context->id,
$action,
$desckey,
'core_competency',
$context->get_context_name(),
false,
null,
$grade,
$USER->id,
$note);
if ($result) {
$all = user_competency_course::get_multiple($userid, $course->id, array($competency->get('id')));
$uc = reset($all);
$event = \core\event\competency_user_competency_rated_in_course::create_from_user_competency_course($uc);
$event->trigger();
}
return $result;
}
/**
* Count the plans in the template, filtered by status.
*
* Requires moodle/competency:templateview capability at the system context.
*
* @param mixed $templateorid The id or the template.
* @param int $status One of the plan status constants (or 0 for all plans).
* @return int
*/
public static function count_plans_for_template($templateorid, $status = 0) {
static::require_enabled();
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
// First we do a permissions check.
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
return plan::count_records_for_template($template->get('id'), $status);
}
/**
* Count the user-completency-plans in the template, optionally filtered by proficiency.
*
* Requires moodle/competency:templateview capability at the system context.
*
* @param mixed $templateorid The id or the template.
* @param mixed $proficiency If true, filter by proficiency, if false filter by not proficient, if null - no filter.
* @return int
*/
public static function count_user_competency_plans_for_template($templateorid, $proficiency = null) {
static::require_enabled();
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
// First we do a permissions check.
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
return user_competency_plan::count_records_for_template($template->get('id'), $proficiency);
}
/**
* List the plans in the template, filtered by status.
*
* Requires moodle/competency:templateview capability at the system context.
*
* @param mixed $templateorid The id or the template.
* @param int $status One of the plan status constants (or 0 for all plans).
* @param int $skip The number of records to skip
* @param int $limit The max number of records to return
* @return plan[]
*/
public static function list_plans_for_template($templateorid, $status = 0, $skip = 0, $limit = 100) {
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
// First we do a permissions check.
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
return plan::get_records_for_template($template->get('id'), $status, $skip, $limit);
}
/**
* Get the most often not completed competency for this course.
*
* Requires moodle/competency:coursecompetencyview capability at the course context.
*
* @param int $courseid The course id
* @param int $skip The number of records to skip
* @param int $limit The max number of records to return
* @return competency[]
*/
public static function get_least_proficient_competencies_for_course($courseid, $skip = 0, $limit = 100) {
static::require_enabled();
$coursecontext = context_course::instance($courseid);
if (!has_any_capability(array('moodle/competency:competencyview', 'moodle/competency:competencymanage'), $coursecontext)) {
throw new required_capability_exception($coursecontext, 'moodle/competency:competencyview', 'nopermissions', '');
}
return user_competency_course::get_least_proficient_competencies_for_course($courseid, $skip, $limit);
}
/**
* Get the most often not completed competency for this template.
*
* Requires moodle/competency:templateview capability at the system context.
*
* @param mixed $templateorid The id or the template.
* @param int $skip The number of records to skip
* @param int $limit The max number of records to return
* @return competency[]
*/
public static function get_least_proficient_competencies_for_template($templateorid, $skip = 0, $limit = 100) {
static::require_enabled();
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
// First we do a permissions check.
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
return user_competency_plan::get_least_proficient_competencies_for_template($template->get('id'), $skip, $limit);
}
/**
* Template event viewed.
*
* Requires moodle/competency:templateview capability at the system context.
*
* @param mixed $templateorid The id or the template.
* @return boolean
*/
public static function template_viewed($templateorid) {
static::require_enabled();
$template = $templateorid;
if (!is_object($template)) {
$template = new template($template);
}
// First we do a permissions check.
if (!$template->can_read()) {
throw new required_capability_exception($template->get_context(), 'moodle/competency:templateview',
'nopermissions', '');
}
// Trigger a template viewed event.
\core\event\competency_template_viewed::create_from_template($template)->trigger();
return true;
}
/**
* Get the competency settings for a course.
*
* Requires moodle/competency:coursecompetencyview capability at the course context.
*
* @param int $courseid The course id
* @return course_competency_settings
*/
public static function read_course_competency_settings($courseid) {
static::require_enabled();
// First we do a permissions check.
if (!course_competency_settings::can_read($courseid)) {
$context = context_course::instance($courseid);
throw new required_capability_exception($context, 'moodle/competency:coursecompetencyview', 'nopermissions', '');
}
return course_competency_settings::get_by_courseid($courseid);
}
/**
* Update the competency settings for a course.
*
* Requires moodle/competency:coursecompetencyconfigure capability at the course context.
*
* @param int $courseid The course id
* @param stdClass $settings List of settings. The only valid setting ATM is pushratginstouserplans (boolean).
* @return bool
*/
public static function update_course_competency_settings($courseid, $settings) {
static::require_enabled();
$settings = (object) $settings;
// Get all the valid settings.
$pushratingstouserplans = isset($settings->pushratingstouserplans) ? $settings->pushratingstouserplans : false;
// First we do a permissions check.
if (!course_competency_settings::can_manage_course($courseid)) {
$context = context_course::instance($courseid);
throw new required_capability_exception($context, 'moodle/competency:coursecompetencyconfigure', 'nopermissions', '');
}
$exists = course_competency_settings::get_record(array('courseid' => $courseid));
// Now update or insert.
if ($exists) {
$settings = $exists;
$settings->set('pushratingstouserplans', $pushratingstouserplans);
return $settings->update();
} else {
$data = (object) array('courseid' => $courseid, 'pushratingstouserplans' => $pushratingstouserplans);
$settings = new course_competency_settings(0, $data);
$result = $settings->create();
return !empty($result);
}
}
/**
* Function used to return a list of users where the given user has a particular capability.
*
* This is used e.g. to find all the users where someone is able to manage their learning plans,
* it also would be useful for mentees etc.
*
* @param string $capability - The capability string we are filtering for. If '' is passed,
* an always matching filter is returned.
* @param int $userid - The user id we are using for the access checks. Defaults to current user.
* @param int $type - The type of named params to return (passed to $DB->get_in_or_equal).
* @param string $prefix - The type prefix for the db table (passed to $DB->get_in_or_equal).
* @return list($sql, $params) Same as $DB->get_in_or_equal().
* @todo MDL-52243 Move this function to lib/accesslib.php
*/
public static function filter_users_with_capability_on_user_context_sql($capability, $userid = 0, $type = SQL_PARAMS_QM,
$prefix='param') {
global $USER, $DB;
$allresultsfilter = array('> 0', array());
$noresultsfilter = array('= -1', array());
if (empty($capability)) {
return $allresultsfilter;
}
if (!$capinfo = get_capability_info($capability)) {
throw new coding_exception('Capability does not exist: ' . $capability);
}
if (empty($userid)) {
$userid = $USER->id;
}
// Make sure the guest account and not-logged-in users never get any risky caps no matter what the actual settings are.
if (($capinfo->captype === 'write') or ($capinfo->riskbitmask & (RISK_XSS | RISK_CONFIG | RISK_DATALOSS))) {
if (isguestuser($userid) or $userid == 0) {
return $noresultsfilter;
}
}
if (is_siteadmin($userid)) {
// No filtering for site admins.
return $allresultsfilter;
}
// Check capability on system level.
$syscontext = context_system::instance();
$hassystem = has_capability($capability, $syscontext, $userid);
$access = get_user_roles_sitewide_accessdata($userid);
// Build up a list of level 2 contexts (candidates to be user context).
$filtercontexts = array();
// Build list of roles to check overrides.
$roles = array();
foreach ($access['ra'] as $path => $role) {
$parts = explode('/', $path);
if (count($parts) == 3) {
$filtercontexts[$parts[2]] = $parts[2];
} else if (count($parts) > 3) {
// We know this is not a user context because there is another path with more than 2 levels.
unset($filtercontexts[$parts[2]]);
}
$roles = array_merge($roles, $role);
}
// Add all contexts in which a role may be overidden.
$rdefs = get_role_definitions($roles);
foreach ($rdefs as $roledef) {
foreach ($roledef as $path => $caps) {
if (!isset($caps[$capability])) {
// The capability is not mentioned, we can ignore.
continue;
}
$parts = explode('/', $path);
if (count($parts) === 3) {
// Only get potential user contexts, they only ever have 2 slashes /parentId/Id.
$filtercontexts[$parts[2]] = $parts[2];
}
}
}
// No interesting contexts - return all or no results.
if (empty($filtercontexts)) {
if ($hassystem) {
return $allresultsfilter;
} else {
return $noresultsfilter;
}
}
// Fetch all interesting contexts for further examination.
list($insql, $params) = $DB->get_in_or_equal($filtercontexts, SQL_PARAMS_NAMED);
$params['level'] = CONTEXT_USER;
$fields = context_helper::get_preload_record_columns_sql('ctx');
$interestingcontexts = $DB->get_recordset_sql('SELECT ' . $fields . '
FROM {context} ctx
WHERE ctx.contextlevel = :level
AND ctx.id ' . $insql . '
ORDER BY ctx.id', $params);
if ($hassystem) {
// If allowed at system, search for exceptions prohibiting the capability at user context.
$excludeusers = array();
foreach ($interestingcontexts as $contextrecord) {
$candidateuserid = $contextrecord->ctxinstance;
context_helper::preload_from_record($contextrecord);
$usercontext = context_user::instance($candidateuserid);
// Has capability should use the data already preloaded.
if (!has_capability($capability, $usercontext, $userid)) {
$excludeusers[$candidateuserid] = $candidateuserid;
}
}
// Construct SQL excluding users with this role assigned for this user.
if (empty($excludeusers)) {
$interestingcontexts->close();
return $allresultsfilter;
}
list($sql, $params) = $DB->get_in_or_equal($excludeusers, $type, $prefix, false);
} else {
// If not allowed at system, search for exceptions allowing the capability at user context.
$allowusers = array();
foreach ($interestingcontexts as $contextrecord) {
$candidateuserid = $contextrecord->ctxinstance;
context_helper::preload_from_record($contextrecord);
$usercontext = context_user::instance($candidateuserid);
// Has capability should use the data already preloaded.
if (has_capability($capability, $usercontext, $userid)) {
$allowusers[$candidateuserid] = $candidateuserid;
}
}
// Construct SQL excluding users with this role assigned for this user.
if (empty($allowusers)) {
$interestingcontexts->close();
return $noresultsfilter;
}
list($sql, $params) = $DB->get_in_or_equal($allowusers, $type, $prefix);
}
$interestingcontexts->close();
// Return the goods!.
return array($sql, $params);
}
}
| crazyserver/moodle | competency/classes/api.php | PHP | gpl-3.0 | 204,934 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.ignite.cache;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import org.apache.ignite.IgniteCache;
import org.apache.nifi.annotation.lifecycle.OnShutdown;
import org.apache.nifi.components.PropertyDescriptor;
import org.apache.nifi.expression.ExpressionLanguageScope;
import org.apache.nifi.expression.AttributeExpression.ResultType;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.Relationship;
import org.apache.nifi.processor.exception.ProcessException;
import org.apache.nifi.processor.util.StandardValidators;
import org.apache.nifi.processors.ignite.AbstractIgniteProcessor;
/**
* Base class of Ignite cache based processor
*/
public abstract class AbstractIgniteCacheProcessor extends AbstractIgniteProcessor {
/**
* Ignite cache name
*/
protected static final PropertyDescriptor CACHE_NAME = new PropertyDescriptor.Builder()
.displayName("Ignite Cache Name")
.name("ignite-cache-name")
.description("The name of the ignite cache")
.required(false)
.addValidator(StandardValidators.NON_EMPTY_VALIDATOR)
.build();
/**
* The Ignite cache key attribute
*/
public static final PropertyDescriptor IGNITE_CACHE_ENTRY_KEY = new PropertyDescriptor.Builder()
.displayName("Ignite Cache Entry Identifier")
.name("ignite-cache-entry-identifier")
.description("A FlowFile attribute, or attribute expression used " +
"for determining Ignite cache key for the Flow File content")
.required(true)
.addValidator(StandardValidators.createAttributeExpressionLanguageValidator(ResultType.STRING, true))
.expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES)
.build();
/**
* Relations
*/
protected static Set<Relationship> relationships;
/**
* Ignite cache name
*/
private String cacheName;
/**
* Get ignite cache instance
* @return ignite cache instance
*/
protected IgniteCache<String, byte[]> getIgniteCache() {
if ( getIgnite() == null )
return null;
else
return getIgnite().getOrCreateCache(cacheName);
}
static {
final Set<Relationship> rels = new HashSet<>();
rels.add(REL_SUCCESS);
rels.add(REL_FAILURE);
relationships = Collections.unmodifiableSet(rels);
}
@Override
public Set<Relationship> getRelationships() {
return relationships;
}
/**
* Initialize the ignite cache instance
* @param context process context
* @throws ProcessException if there is a problem while scheduling the processor
*/
public void initializeIgniteCache(ProcessContext context) throws ProcessException {
getLogger().info("Initializing Ignite cache");
try {
if ( getIgnite() == null ) {
getLogger().info("Initializing ignite as client");
super.initializeIgnite(context);
}
cacheName = context.getProperty(CACHE_NAME).getValue();
} catch (Exception e) {
getLogger().error("Failed to initialize ignite cache due to {}", new Object[] { e }, e);
throw new ProcessException(e);
}
}
/**
* Close Ignite cache instance and calls base class closeIgnite
*/
@OnShutdown
public void closeIgniteCache() {
if (getIgniteCache() != null) {
getLogger().info("Closing ignite cache");
getIgniteCache().close();
}
super.closeIgnite();
}
}
| Wesley-Lawrence/nifi | nifi-nar-bundles/nifi-ignite-bundle/nifi-ignite-processors/src/main/java/org/apache/nifi/processors/ignite/cache/AbstractIgniteCacheProcessor.java | Java | apache-2.0 | 4,532 |
<?php
/*
* Copyright 2014 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
class Google_Service_Storage_BucketRetentionPolicy extends Google_Model
{
public $effectiveTime;
public $isLocked;
public $retentionPeriod;
public function setEffectiveTime($effectiveTime)
{
$this->effectiveTime = $effectiveTime;
}
public function getEffectiveTime()
{
return $this->effectiveTime;
}
public function setIsLocked($isLocked)
{
$this->isLocked = $isLocked;
}
public function getIsLocked()
{
return $this->isLocked;
}
public function setRetentionPeriod($retentionPeriod)
{
$this->retentionPeriod = $retentionPeriod;
}
public function getRetentionPeriod()
{
return $this->retentionPeriod;
}
}
| drthomas21/WordPress_Tutorial | wordpress_htdocs/wp-content/plugins/swg-youtube-vids/vendor/google/apiclient-services/src/Google/Service/Storage/BucketRetentionPolicy.php | PHP | apache-2.0 | 1,269 |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.contrib.training.bucket."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import random
import numpy as np
from tensorflow.contrib.training.python.training import bucket_ops
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
from tensorflow.python.training import coordinator
from tensorflow.python.training import queue_runner_impl
def _which_bucket(bucket_edges, v):
"""Identify which bucket v falls into.
Args:
bucket_edges: int array, bucket edges
v: int scalar, index
Returns:
int scalar, the bucket.
If v < bucket_edges[0], return 0.
If bucket_edges[0] <= v < bucket_edges[1], return 1.
...
If bucket_edges[-2] <= v < bucket_edges[-1], return len(bucket_edges).
If v >= bucket_edges[-1], return len(bucket_edges) + 1
"""
v = np.asarray(v)
full = [0] + bucket_edges
found = np.where(np.logical_and(v >= full[:-1], v < full[1:]))[0]
if not found.size:
return len(full)
return found[0]
class BucketTest(test.TestCase):
def setUp(self):
ops.reset_default_graph()
self.scalar_int_feed = array_ops.placeholder(dtypes_lib.int32, ())
self.unk_int64_feed = array_ops.placeholder(dtypes_lib.int64, (None,))
self.vec3_str_feed = array_ops.placeholder(dtypes_lib.string, (3,))
self.sparse_c = sparse_tensor.SparseTensor(
indices=[[0]],
values=[1.0],
dense_shape=[1])
self._coord = coordinator.Coordinator()
# Make capacity very large so we can feed all the inputs in the
# main thread without blocking
input_queue = data_flow_ops.PaddingFIFOQueue(
5000,
dtypes=[dtypes_lib.int32, dtypes_lib.int64, dtypes_lib.string],
shapes=[(), (None,), (3,)])
self._input_enqueue_op = input_queue.enqueue(
(self.scalar_int_feed, self.unk_int64_feed, self.vec3_str_feed))
self.scalar_int, self.unk_int64, self.vec3_str = input_queue.dequeue()
self._threads = None
self._close_op = input_queue.close()
self._sess = None
def enqueue_inputs(self, sess, feed_dict):
sess.run(self._input_enqueue_op, feed_dict=feed_dict)
def start_queue_runners(self, sess):
# Store session to be able to close inputs later
if self._sess is None:
self._sess = sess
self._threads = queue_runner_impl.start_queue_runners(coord=self._coord)
def tearDown(self):
if self._sess is not None:
self._sess.run(self._close_op)
self._coord.request_stop()
self._coord.join(self._threads)
def testSingleBucket(self):
bucketed_dynamic = bucket_ops.bucket(
tensors=[self.scalar_int, self.unk_int64, self.vec3_str, self.sparse_c],
which_bucket=constant_op.constant(0),
num_buckets=2,
batch_size=32,
num_threads=10,
dynamic_pad=True)
# Check shape inference on bucketing outputs
self.assertAllEqual(
[[32], [32, None], [32, 3], [None, None]],
[out.get_shape().as_list() for out in bucketed_dynamic[1]])
with self.cached_session() as sess:
for v in range(32):
self.enqueue_inputs(sess, {
self.scalar_int_feed: v,
self.unk_int64_feed: v * [v],
self.vec3_str_feed: 3 * [str(v)]
})
self.start_queue_runners(sess)
# Get a single minibatch
bucketed_values = sess.run(bucketed_dynamic)
# (which_bucket, bucket_tensors).
self.assertEqual(2, len(bucketed_values))
# Count number of bucket_tensors.
self.assertEqual(4, len(bucketed_values[1]))
# Ensure bucket 0 was used for all minibatch entries.
self.assertAllEqual(0, bucketed_values[0])
expected_scalar_int = np.arange(32)
expected_unk_int64 = np.zeros((32, 31)).astype(np.int64)
for i in range(32):
expected_unk_int64[i, :i] = i
expected_vec3_str = np.vstack(3 * [np.arange(32).astype(bytes)]).T
# Must resort the output because num_threads > 1 leads to
# sometimes-inconsistent insertion order.
resort = np.argsort(bucketed_values[1][0])
self.assertAllEqual(expected_scalar_int, bucketed_values[1][0][resort])
self.assertAllEqual(expected_unk_int64, bucketed_values[1][1][resort])
self.assertAllEqual(expected_vec3_str, bucketed_values[1][2][resort])
def testBatchSizePerBucket(self):
which_bucket = control_flow_ops.cond(self.scalar_int < 5,
lambda: constant_op.constant(0),
lambda: constant_op.constant(1))
batch_sizes = [5, 10]
bucketed_dynamic = bucket_ops.bucket(
tensors=[self.scalar_int, self.unk_int64, self.vec3_str, self.sparse_c],
which_bucket=which_bucket,
num_buckets=2,
batch_size=batch_sizes,
num_threads=1,
dynamic_pad=True)
# Check shape inference on bucketing outputs
self.assertAllEqual(
[[None], [None, None], [None, 3], [None, None]],
[out.get_shape().as_list() for out in bucketed_dynamic[1]])
with self.cached_session() as sess:
for v in range(15):
self.enqueue_inputs(sess, {
self.scalar_int_feed: v,
self.unk_int64_feed: v * [v],
self.vec3_str_feed: 3 * [str(v)]
})
self.start_queue_runners(sess)
# Get two minibatches (one with small values, one with large).
bucketed_values_0 = sess.run(bucketed_dynamic)
bucketed_values_1 = sess.run(bucketed_dynamic)
# Figure out which output has the small values
if bucketed_values_0[0] < 5:
bucketed_values_large, bucketed_values_small = (bucketed_values_1,
bucketed_values_0)
else:
bucketed_values_small, bucketed_values_large = (bucketed_values_0,
bucketed_values_1)
# Ensure bucket 0 was used for all minibatch entries.
self.assertAllEqual(0, bucketed_values_small[0])
self.assertAllEqual(1, bucketed_values_large[0])
# Check that the batch sizes differ per bucket
self.assertEqual(5, len(bucketed_values_small[1][0]))
self.assertEqual(10, len(bucketed_values_large[1][0]))
def testEvenOddBuckets(self):
which_bucket = (self.scalar_int % 2)
bucketed_dynamic = bucket_ops.bucket(
tensors=[self.scalar_int, self.unk_int64, self.vec3_str, self.sparse_c],
which_bucket=which_bucket,
num_buckets=2,
batch_size=32,
num_threads=10,
dynamic_pad=True)
# Check shape inference on bucketing outputs
self.assertAllEqual(
[[32], [32, None], [32, 3], [None, None]],
[out.get_shape().as_list() for out in bucketed_dynamic[1]])
with self.cached_session() as sess:
for v in range(64):
self.enqueue_inputs(sess, {
self.scalar_int_feed: v,
self.unk_int64_feed: v * [v],
self.vec3_str_feed: 3 * [str(v)]
})
self.start_queue_runners(sess)
# Get two minibatches (one containing even values, one containing odds)
bucketed_values_0 = sess.run(bucketed_dynamic)
bucketed_values_1 = sess.run(bucketed_dynamic)
# (which_bucket, bucket_tensors).
self.assertEqual(2, len(bucketed_values_0))
self.assertEqual(2, len(bucketed_values_1))
# Count number of bucket_tensors.
self.assertEqual(4, len(bucketed_values_0[1]))
self.assertEqual(4, len(bucketed_values_1[1]))
# Figure out which output has the even values (there's
# randomness due to the multithreaded nature of bucketing)
if bucketed_values_0[0] % 2 == 1:
bucketed_values_even, bucketed_values_odd = (bucketed_values_1,
bucketed_values_0)
else:
bucketed_values_even, bucketed_values_odd = (bucketed_values_0,
bucketed_values_1)
# Ensure bucket 0 was used for all minibatch entries.
self.assertAllEqual(0, bucketed_values_even[0])
self.assertAllEqual(1, bucketed_values_odd[0])
# Test the first bucket outputted, the events starting at 0
expected_scalar_int = np.arange(0, 32 * 2, 2)
expected_unk_int64 = np.zeros((32, 31 * 2)).astype(np.int64)
for i in range(0, 32):
expected_unk_int64[i, :2 * i] = 2 * i
expected_vec3_str = np.vstack(3 *
[np.arange(0, 32 * 2, 2).astype(bytes)]).T
# Must resort the output because num_threads > 1 leads to
# sometimes-inconsistent insertion order.
resort = np.argsort(bucketed_values_even[1][0])
self.assertAllEqual(expected_scalar_int,
bucketed_values_even[1][0][resort])
self.assertAllEqual(expected_unk_int64,
bucketed_values_even[1][1][resort])
self.assertAllEqual(expected_vec3_str, bucketed_values_even[1][2][resort])
# Test the second bucket outputted, the odds starting at 1
expected_scalar_int = np.arange(1, 32 * 2 + 1, 2)
expected_unk_int64 = np.zeros((32, 31 * 2 + 1)).astype(np.int64)
for i in range(0, 32):
expected_unk_int64[i, :2 * i + 1] = 2 * i + 1
expected_vec3_str = np.vstack(
3 * [np.arange(1, 32 * 2 + 1, 2).astype(bytes)]).T
# Must resort the output because num_threads > 1 leads to
# sometimes-inconsistent insertion order.
resort = np.argsort(bucketed_values_odd[1][0])
self.assertAllEqual(expected_scalar_int,
bucketed_values_odd[1][0][resort])
self.assertAllEqual(expected_unk_int64, bucketed_values_odd[1][1][resort])
self.assertAllEqual(expected_vec3_str, bucketed_values_odd[1][2][resort])
def testEvenOddBucketsFilterOutAllOdd(self):
which_bucket = (self.scalar_int % 2)
keep_input = math_ops.equal(which_bucket, 0)
bucketed_dynamic = bucket_ops.bucket(
tensors=[self.scalar_int, self.unk_int64, self.vec3_str],
which_bucket=which_bucket,
num_buckets=2,
batch_size=32,
num_threads=10,
keep_input=keep_input,
dynamic_pad=True)
# Check shape inference on bucketing outputs
self.assertAllEqual(
[[32], [32, None], [32, 3]],
[out.get_shape().as_list() for out in bucketed_dynamic[1]])
with self.cached_session() as sess:
for v in range(128):
self.enqueue_inputs(sess, {
self.scalar_int_feed: v,
self.unk_int64_feed: v * [v],
self.vec3_str_feed: 3 * [str(v)]
})
self.start_queue_runners(sess)
# Get two minibatches ([0, 2, ...] and [64, 66, ...])
bucketed_values_even0 = sess.run(bucketed_dynamic)
bucketed_values_even1 = sess.run(bucketed_dynamic)
# Ensure that bucket 1 was completely filtered out
self.assertAllEqual(0, bucketed_values_even0[0])
self.assertAllEqual(0, bucketed_values_even1[0])
# Merge their output for sorting and comparison
bucketed_values_all_elem0 = np.concatenate((bucketed_values_even0[1][0],
bucketed_values_even1[1][0]))
self.assertAllEqual(
np.arange(0, 128, 2), sorted(bucketed_values_all_elem0))
def testFailOnWrongBucketCapacities(self):
with self.assertRaisesRegexp(ValueError, r"must have exactly num_buckets"):
bucket_ops.bucket( # 2 buckets and 3 capacities raises ValueError.
tensors=[self.scalar_int, self.unk_int64, self.vec3_str],
which_bucket=constant_op.constant(0), num_buckets=2,
batch_size=32, bucket_capacities=[3, 4, 5])
class BucketBySequenceLengthTest(test.TestCase):
def _testBucketBySequenceLength(self,
allow_small_batch,
bucket_capacities=None,
drain_entire_queue=True):
ops.reset_default_graph()
# All inputs must be identical lengths across tuple index.
# The input reader will get input_length from the first tuple
# entry.
data_len = 4
labels_len = 3
input_pairs = [(length, ([np.int64(length)] * data_len,
[str(length).encode("ascii")] * labels_len))
for length in (1, 3, 4, 5, 6, 10)]
lengths = array_ops.placeholder(dtypes_lib.int32, ())
data = array_ops.placeholder(dtypes_lib.int64, (data_len,))
labels = array_ops.placeholder(dtypes_lib.string, (labels_len,))
batch_size = 8
bucket_boundaries = [3, 4, 5, 10]
num_pairs_to_enqueue = 50 * batch_size + 100
# Make capacity very large so we can feed all the inputs in the
# main thread without blocking
input_queue = data_flow_ops.FIFOQueue(
5000, (dtypes_lib.int32, dtypes_lib.int64, dtypes_lib.string), (
(), (data_len,), (labels_len,)))
input_enqueue_op = input_queue.enqueue((lengths, data, labels))
lengths_t, data_t, labels_t = input_queue.dequeue()
close_input_op = input_queue.close()
(out_lengths_t, data_and_labels_t) = (bucket_ops.bucket_by_sequence_length(
input_length=lengths_t,
tensors=[data_t, labels_t],
batch_size=batch_size,
bucket_boundaries=bucket_boundaries,
bucket_capacities=bucket_capacities,
allow_smaller_final_batch=allow_small_batch,
num_threads=10))
expected_batch_size = None if allow_small_batch else batch_size
self.assertEqual(out_lengths_t.get_shape().as_list(), [expected_batch_size])
self.assertEqual(data_and_labels_t[0].get_shape().as_list(),
[expected_batch_size, data_len])
self.assertEqual(data_and_labels_t[1].get_shape().as_list(),
[expected_batch_size, labels_len])
def _read_test(sess):
num_pairs_dequeued = 0
try:
while drain_entire_queue or num_pairs_dequeued < 40 * batch_size:
(out_lengths, (data, labels)) = sess.run(
(out_lengths_t, data_and_labels_t))
num_pairs_dequeued += out_lengths.shape[0]
if allow_small_batch:
self.assertEqual(data_len, data.shape[1])
self.assertEqual(labels_len, labels.shape[1])
self.assertGreaterEqual(batch_size, out_lengths.shape[0])
self.assertGreaterEqual(batch_size, data.shape[0])
self.assertGreaterEqual(batch_size, labels.shape[0])
else:
self.assertEqual((batch_size, data_len), data.shape)
self.assertEqual((batch_size, labels_len), labels.shape)
self.assertEqual((batch_size,), out_lengths.shape)
for (lr, dr, tr) in zip(out_lengths, data, labels):
# Make sure length matches data (here it's the same value).
self.assertEqual(dr[0], lr)
# Make sure data & labels match.
self.assertEqual(dr[0], int(tr[0].decode("ascii")))
# Make sure for each row, data came from the same bucket.
self.assertEqual(
_which_bucket(bucket_boundaries, dr[0]),
_which_bucket(bucket_boundaries, dr[1]))
except errors.OutOfRangeError:
if allow_small_batch:
self.assertEqual(num_pairs_to_enqueue, num_pairs_dequeued)
else:
# Maximum left over in the queues should be at most one less than the
# batch_size, for every bucket.
num_buckets = len(bucket_boundaries) + 2
self.assertLessEqual(
num_pairs_to_enqueue - (batch_size - 1) * num_buckets,
num_pairs_dequeued)
with self.cached_session() as sess:
coord = coordinator.Coordinator()
# Feed the inputs, then close the input thread.
for _ in range(num_pairs_to_enqueue):
which = random.randint(0, len(input_pairs) - 1)
length, pair = input_pairs[which]
sess.run(input_enqueue_op,
feed_dict={lengths: length,
data: pair[0],
labels: pair[1]})
sess.run(close_input_op)
# Start the queue runners
threads = queue_runner_impl.start_queue_runners(coord=coord)
# Read off the top of the bucket and ensure correctness of output
_read_test(sess)
coord.request_stop()
coord.join(threads)
def testBucketBySequenceLength(self):
self._testBucketBySequenceLength(allow_small_batch=False)
def testBucketBySequenceLengthAllow(self):
self._testBucketBySequenceLength(allow_small_batch=True)
def testBucketBySequenceLengthBucketCapacities(self):
# Above bucket_boundaries = [3, 4, 5, 10] so we need 5 capacities.
with self.assertRaisesRegexp(ValueError, r"must have exactly num_buckets"):
self._testBucketBySequenceLength(allow_small_batch=False,
bucket_capacities=[32, 32, 32, 32])
# Test with different capacities.
capacities = [48, 40, 32, 24, 16]
self._testBucketBySequenceLength(allow_small_batch=True,
bucket_capacities=capacities)
def testBucketBySequenceLengthShutdown(self):
self._testBucketBySequenceLength(allow_small_batch=True,
drain_entire_queue=False)
if __name__ == "__main__":
test.main()
| kevin-coder/tensorflow-fork | tensorflow/contrib/training/python/training/bucket_ops_test.py | Python | apache-2.0 | 18,461 |
package com.shunwang.api.request.recharge;
import com.shunwang.api.SWApiException;
import com.shunwang.api.SWConstants;
import com.shunwang.api.SWObject;
import com.shunwang.api.SWRequest;
import com.shunwang.api.commons.codec.SWSignature;
import com.shunwang.api.commons.lang.StringUtils;
import com.shunwang.api.mapping.SWApiDocs;
import com.shunwang.api.mapping.SWApiField;
import com.shunwang.api.response.AbstractSWResponse;
import java.io.UnsupportedEncodingException;
import java.net.URLEncoder;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
/**
* 请求抽象
*
* @author min.da
* @since 2015-12-10
*/
public abstract class AbstractSWRequest<T extends AbstractSWResponse> extends SWObject implements SWRequest {
private static final long serialVersionUID = -1091344518656971762L;
public static final String SERVER_URL = "http://recharge.kedou.com";
/**
* 构建参数sign签名串的加密KEY
*/
private String secretKey;
@SWApiField(max = 32, desc = "调用方商户通行证用户名", orderby = 999)
private String companyMemberName;
@SWApiField(max = 18, desc = "当前时间,精确到秒,格式如:20090202080403", orderby = 888)
private String time;
@SWApiField(max = 32, desc = "签名", orderby = -999)
private String sign;
public AbstractSWRequest(String secretKey) {
this.secretKey = secretKey;
}
@Override
public Map<String, String> buildRequestParams() {
Map<String, String> params = new HashMap<String, String>();
params.put("companyMemberName", companyMemberName);
params.put("time", time);
params.put("sign", sign);
return params;
}
/**
* 构建签名参数
* @return 签名参数组
*/
@Override
public Map<String, String> buildSignatureParams() {
return new TreeMap<String, String>(buildRequestParams());
}
/**
* 构建签名明文串,按{@code buildSignatureParams}的结果连接签名字符串
* @return 签名明文串
*/
@Override
public String buildPlainText() {
Map<String, String> originParams = buildSignatureParams();
Set<String> keys = originParams.keySet();
StringBuilder plainText = new StringBuilder();
for(String key : keys) {
String value = originParams.get(key);
if (!isNullToEmpty() || value != null) {
plainText.append(value);
}
plainText.append(SWConstants.SIGN_SEPARATOR);
}
plainText.append(secretKey);
return plainText.toString();
}
/**
* 构建签名
* @return 请求签名
*/
@Override
public String buildSignature() {
String plainText = buildPlainText();
if (StringUtils.isBlank(plainText)) {
throw new IllegalArgumentException("签名明文不能为空");
}
return SWSignature.DigestUtils.md5Hex(encode(plainText).toUpperCase()).toUpperCase();
}
/**
* 验证请求参数
*/
@Override
public void validateRequestParams() {
if (StringUtils.isBlank(time)) {
throw new IllegalArgumentException("参数[time]不能为空");
}
if (StringUtils.isBlank(sign)) {
throw new IllegalArgumentException("参数[sign]不能为空");
}
}
/**
* Translates a string into <code>application/x-www-form-urlencoded</code>
* format using a specific encoding scheme. This method uses the
* supplied encoding scheme to obtain the bytes for unsafe
* characters.
*
* @param original original <code>String</code> to be translated.
* @return the translated <code>String</code>.
*/
protected String encode(String original) {
try {
return URLEncoder.encode(original, SWConstants.CHARSET_UTF8);
} catch (UnsupportedEncodingException e) {
throw new SWApiException(e);
}
}
@Override
public String getUrl() {
return SERVER_URL + getClass().getAnnotation(SWApiDocs.class).service();
}
@Override
public int getMethods() {
return getClass().getAnnotation(SWApiDocs.class).methods();
}
public abstract Class<T> getResponseClass();
/**
* 空值是否参与签名,默认不参与
*
* @return true不参与 false参与
*/
public boolean isNullToEmpty() {
return true;
}
public String getCompanyMemberName() {
return companyMemberName;
}
public void setCompanyMemberName(String companyMemberName) {
this.companyMemberName = companyMemberName;
}
public String getTime() {
return time;
}
public void setTime(String time) {
this.time = time;
}
public String getSign() {
return sign;
}
public void setSign(String sign) {
this.sign = sign;
}
public String getSecretKey() {
return secretKey;
}
}
| waitttttttttttttttttttttttting/apitools | apitools-web/src/main/config/apidemos/recharge/updateDisburseConvertState-UTF-8/src/main/java/com/shunwang/api/request/recharge/AbstractSWRequest.java | Java | apache-2.0 | 5,055 |
// Code generated by protoc-gen-gogo. DO NOT EDIT.
// source: vanity.proto
/*
Package vanity is a generated protocol buffer package.
It is generated from these files:
vanity.proto
It has these top-level messages:
A
*/
package vanity
import proto "github.com/gogo/protobuf/proto"
import fmt "fmt"
import math "math"
import strings "strings"
import reflect "reflect"
import io "io"
import github_com_gogo_protobuf_proto "github.com/gogo/protobuf/proto"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.GoGoProtoPackageIsVersion2 // please upgrade the proto package
type A struct {
Strings string `protobuf:"bytes,1,opt,name=Strings" json:"Strings"`
Int int64 `protobuf:"varint,2,req,name=Int" json:"Int"`
}
func (m *A) Reset() { *m = A{} }
func (*A) ProtoMessage() {}
func (*A) Descriptor() ([]byte, []int) { return fileDescriptorVanity, []int{0} }
func (m *A) GetStrings() string {
if m != nil {
return m.Strings
}
return ""
}
func (m *A) GetInt() int64 {
if m != nil {
return m.Int
}
return 0
}
func init() {
proto.RegisterType((*A)(nil), "vanity.A")
}
func (this *A) Equal(that interface{}) bool {
if that == nil {
if this == nil {
return true
}
return false
}
that1, ok := that.(*A)
if !ok {
that2, ok := that.(A)
if ok {
that1 = &that2
} else {
return false
}
}
if that1 == nil {
if this == nil {
return true
}
return false
} else if this == nil {
return false
}
if this.Strings != that1.Strings {
return false
}
if this.Int != that1.Int {
return false
}
return true
}
func (this *A) GoString() string {
if this == nil {
return "nil"
}
s := make([]string, 0, 6)
s = append(s, "&vanity.A{")
s = append(s, "Strings: "+fmt.Sprintf("%#v", this.Strings)+",\n")
s = append(s, "Int: "+fmt.Sprintf("%#v", this.Int)+",\n")
s = append(s, "}")
return strings.Join(s, "")
}
func valueToGoStringVanity(v interface{}, typ string) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("func(v %v) *%v { return &v } ( %#v )", typ, typ, pv)
}
func (m *A) Marshal() (dAtA []byte, err error) {
size := m.Size()
dAtA = make([]byte, size)
n, err := m.MarshalTo(dAtA)
if err != nil {
return nil, err
}
return dAtA[:n], nil
}
func (m *A) MarshalTo(dAtA []byte) (int, error) {
var i int
_ = i
var l int
_ = l
dAtA[i] = 0xa
i++
i = encodeVarintVanity(dAtA, i, uint64(len(m.Strings)))
i += copy(dAtA[i:], m.Strings)
dAtA[i] = 0x10
i++
i = encodeVarintVanity(dAtA, i, uint64(m.Int))
return i, nil
}
func encodeVarintVanity(dAtA []byte, offset int, v uint64) int {
for v >= 1<<7 {
dAtA[offset] = uint8(v&0x7f | 0x80)
v >>= 7
offset++
}
dAtA[offset] = uint8(v)
return offset + 1
}
func (m *A) Size() (n int) {
var l int
_ = l
l = len(m.Strings)
n += 1 + l + sovVanity(uint64(l))
n += 1 + sovVanity(uint64(m.Int))
return n
}
func sovVanity(x uint64) (n int) {
for {
n++
x >>= 7
if x == 0 {
break
}
}
return n
}
func sozVanity(x uint64) (n int) {
return sovVanity(uint64((x << 1) ^ uint64((int64(x) >> 63))))
}
func (this *A) String() string {
if this == nil {
return "nil"
}
s := strings.Join([]string{`&A{`,
`Strings:` + fmt.Sprintf("%v", this.Strings) + `,`,
`Int:` + fmt.Sprintf("%v", this.Int) + `,`,
`}`,
}, "")
return s
}
func valueToStringVanity(v interface{}) string {
rv := reflect.ValueOf(v)
if rv.IsNil() {
return "nil"
}
pv := reflect.Indirect(rv).Interface()
return fmt.Sprintf("*%v", pv)
}
func (m *A) Unmarshal(dAtA []byte) error {
var hasFields [1]uint64
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
preIndex := iNdEx
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowVanity
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
fieldNum := int32(wire >> 3)
wireType := int(wire & 0x7)
if wireType == 4 {
return fmt.Errorf("proto: A: wiretype end group for non-group")
}
if fieldNum <= 0 {
return fmt.Errorf("proto: A: illegal tag %d (wire type %d)", fieldNum, wire)
}
switch fieldNum {
case 1:
if wireType != 2 {
return fmt.Errorf("proto: wrong wireType = %d for field Strings", wireType)
}
var stringLen uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowVanity
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
stringLen |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
intStringLen := int(stringLen)
if intStringLen < 0 {
return ErrInvalidLengthVanity
}
postIndex := iNdEx + intStringLen
if postIndex > l {
return io.ErrUnexpectedEOF
}
m.Strings = string(dAtA[iNdEx:postIndex])
iNdEx = postIndex
case 2:
if wireType != 0 {
return fmt.Errorf("proto: wrong wireType = %d for field Int", wireType)
}
m.Int = 0
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return ErrIntOverflowVanity
}
if iNdEx >= l {
return io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
m.Int |= (int64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
hasFields[0] |= uint64(0x00000001)
default:
iNdEx = preIndex
skippy, err := skipVanity(dAtA[iNdEx:])
if err != nil {
return err
}
if skippy < 0 {
return ErrInvalidLengthVanity
}
if (iNdEx + skippy) > l {
return io.ErrUnexpectedEOF
}
iNdEx += skippy
}
}
if hasFields[0]&uint64(0x00000001) == 0 {
return github_com_gogo_protobuf_proto.NewRequiredNotSetError("Int")
}
if iNdEx > l {
return io.ErrUnexpectedEOF
}
return nil
}
func skipVanity(dAtA []byte) (n int, err error) {
l := len(dAtA)
iNdEx := 0
for iNdEx < l {
var wire uint64
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowVanity
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
wire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
wireType := int(wire & 0x7)
switch wireType {
case 0:
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowVanity
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
iNdEx++
if dAtA[iNdEx-1] < 0x80 {
break
}
}
return iNdEx, nil
case 1:
iNdEx += 8
return iNdEx, nil
case 2:
var length int
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowVanity
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
length |= (int(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
iNdEx += length
if length < 0 {
return 0, ErrInvalidLengthVanity
}
return iNdEx, nil
case 3:
for {
var innerWire uint64
var start int = iNdEx
for shift := uint(0); ; shift += 7 {
if shift >= 64 {
return 0, ErrIntOverflowVanity
}
if iNdEx >= l {
return 0, io.ErrUnexpectedEOF
}
b := dAtA[iNdEx]
iNdEx++
innerWire |= (uint64(b) & 0x7F) << shift
if b < 0x80 {
break
}
}
innerWireType := int(innerWire & 0x7)
if innerWireType == 4 {
break
}
next, err := skipVanity(dAtA[start:])
if err != nil {
return 0, err
}
iNdEx = start + next
}
return iNdEx, nil
case 4:
return iNdEx, nil
case 5:
iNdEx += 4
return iNdEx, nil
default:
return 0, fmt.Errorf("proto: illegal wireType %d", wireType)
}
}
panic("unreachable")
}
var (
ErrInvalidLengthVanity = fmt.Errorf("proto: negative length found during unmarshaling")
ErrIntOverflowVanity = fmt.Errorf("proto: integer overflow")
)
func init() { proto.RegisterFile("vanity.proto", fileDescriptorVanity) }
var fileDescriptorVanity = []byte{
// 138 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x29, 0x4b, 0xcc, 0xcb,
0x2c, 0xa9, 0xd4, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0x62, 0x83, 0xf0, 0x94, 0xac, 0xb9, 0x18,
0x1d, 0x85, 0xe4, 0xb8, 0xd8, 0x83, 0x4b, 0x8a, 0x32, 0xf3, 0xd2, 0x8b, 0x25, 0x18, 0x15, 0x18,
0x35, 0x38, 0x9d, 0x58, 0x4e, 0xdc, 0x93, 0x67, 0x08, 0x82, 0x09, 0x0a, 0x89, 0x71, 0x31, 0x7b,
0xe6, 0x95, 0x48, 0x30, 0x29, 0x30, 0x69, 0x30, 0x43, 0xe5, 0x40, 0x02, 0x4e, 0x3a, 0x17, 0x1e,
0xca, 0x31, 0xdc, 0x78, 0x28, 0xc7, 0xf0, 0xe1, 0xa1, 0x1c, 0x63, 0xc3, 0x23, 0x39, 0xc6, 0x15,
0x8f, 0xe4, 0x18, 0x4f, 0x3c, 0x92, 0x63, 0xbc, 0xf0, 0x48, 0x8e, 0xf1, 0xc1, 0x23, 0x39, 0xc6,
0x17, 0x8f, 0xe4, 0x18, 0x3e, 0x3c, 0x92, 0x63, 0x9c, 0xf0, 0x58, 0x8e, 0x01, 0x10, 0x00, 0x00,
0xff, 0xff, 0x4d, 0xd9, 0xba, 0x18, 0x81, 0x00, 0x00, 0x00,
}
| vasili-v/themis | vendor/github.com/gogo/protobuf/vanity/test/slick/vanity.pb.go | GO | apache-2.0 | 9,206 |
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cache
import (
"errors"
"sync"
"k8s.io/apimachinery/pkg/util/sets"
)
// PopProcessFunc is passed to Pop() method of Queue interface.
// It is supposed to process the element popped from the queue.
type PopProcessFunc func(interface{}) error
// ErrRequeue may be returned by a PopProcessFunc to safely requeue
// the current item. The value of Err will be returned from Pop.
type ErrRequeue struct {
// Err is returned by the Pop function
Err error
}
var FIFOClosedError error = errors.New("DeltaFIFO: manipulating with closed queue")
func (e ErrRequeue) Error() string {
if e.Err == nil {
return "the popped item should be requeued without returning an error"
}
return e.Err.Error()
}
// Queue is exactly like a Store, but has a Pop() method too.
type Queue interface {
Store
// Pop blocks until it has something to process.
// It returns the object that was process and the result of processing.
// The PopProcessFunc may return an ErrRequeue{...} to indicate the item
// should be requeued before releasing the lock on the queue.
Pop(PopProcessFunc) (interface{}, error)
// AddIfNotPresent adds a value previously
// returned by Pop back into the queue as long
// as nothing else (presumably more recent)
// has since been added.
AddIfNotPresent(interface{}) error
// HasSynced returns true if the first batch of items has been popped
HasSynced() bool
// Close queue
Close()
}
// Helper function for popping from Queue.
// WARNING: Do NOT use this function in non-test code to avoid races
// unless you really really really really know what you are doing.
func Pop(queue Queue) interface{} {
var result interface{}
queue.Pop(func(obj interface{}) error {
result = obj
return nil
})
return result
}
// FIFO receives adds and updates from a Reflector, and puts them in a queue for
// FIFO order processing. If multiple adds/updates of a single item happen while
// an item is in the queue before it has been processed, it will only be
// processed once, and when it is processed, the most recent version will be
// processed. This can't be done with a channel.
//
// FIFO solves this use case:
// * You want to process every object (exactly) once.
// * You want to process the most recent version of the object when you process it.
// * You do not want to process deleted objects, they should be removed from the queue.
// * You do not want to periodically reprocess objects.
// Compare with DeltaFIFO for other use cases.
type FIFO struct {
lock sync.RWMutex
cond sync.Cond
// We depend on the property that items in the set are in the queue and vice versa.
items map[string]interface{}
queue []string
itemsInQueue sets.String
// keepCache allows resync-ing to work by keeping a history of items
keepCache bool
// populated is true if the first batch of items inserted by Replace() has been populated
// or Delete/Add/Update was called first.
populated bool
// initialPopulationCount is the number of items inserted by the first call of Replace()
initialPopulationCount int
// keyFunc is used to make the key used for queued item insertion and retrieval, and
// should be deterministic.
keyFunc KeyFunc
// Indication the queue is closed.
// Used to indicate a queue is closed so a control loop can exit when a queue is empty.
// Currently, not used to gate any of CRED operations.
closed bool
closedLock sync.Mutex
}
var (
_ = Queue(&FIFO{}) // FIFO is a Queue
)
// Close the queue.
func (f *FIFO) Close() {
f.closedLock.Lock()
defer f.closedLock.Unlock()
f.closed = true
f.cond.Broadcast()
}
// Return true if an Add/Update/Delete/AddIfNotPresent are called first,
// or an Update called first but the first batch of items inserted by Replace() has been popped
func (f *FIFO) HasSynced() bool {
f.lock.Lock()
defer f.lock.Unlock()
return f.populated && f.initialPopulationCount == 0
}
// Add inserts an item, and puts it in the queue. The item is only enqueued
// if it doesn't already exist in the set.
func (f *FIFO) Add(obj interface{}) error {
id, err := f.keyFunc(obj)
if err != nil {
return KeyError{obj, err}
}
f.lock.Lock()
defer f.lock.Unlock()
f.populated = true
if !f.itemsInQueue.Has(id) {
f.queue = append(f.queue, id)
}
f.items[id] = obj
f.itemsInQueue.Insert(id)
f.cond.Broadcast()
return nil
}
// AddIfNotPresent inserts an item, and puts it in the queue. If the item is already
// present in the set, it is neither enqueued nor added to the set.
//
// This is useful in a single producer/consumer scenario so that the consumer can
// safely retry items without contending with the producer and potentially enqueueing
// stale items.
func (f *FIFO) AddIfNotPresent(obj interface{}) error {
id, err := f.keyFunc(obj)
if err != nil {
return KeyError{obj, err}
}
f.lock.Lock()
defer f.lock.Unlock()
f.addIfNotPresent(id, obj)
return nil
}
// addIfNotPresent assumes the fifo lock is already held and adds the provided
// item to the queue under id if it does not already exist.
func (f *FIFO) addIfNotPresent(id string, obj interface{}) {
f.populated = true
if f.itemsInQueue.Has(id) {
return
}
f.queue = append(f.queue, id)
f.items[id] = obj
f.itemsInQueue.Insert(id)
f.cond.Broadcast()
}
// Update is the same as Add in this implementation.
func (f *FIFO) Update(obj interface{}) error {
return f.Add(obj)
}
// Delete removes an item. It doesn't add it to the queue, because
// this implementation assumes the consumer only cares about the objects,
// not the order in which they were created/added.
func (f *FIFO) Delete(obj interface{}) error {
id, err := f.keyFunc(obj)
if err != nil {
return KeyError{obj, err}
}
f.lock.Lock()
defer f.lock.Unlock()
f.populated = true
f.itemsInQueue.Delete(id)
delete(f.items, id)
return err
}
// List returns a list of all the items.
func (f *FIFO) List() []interface{} {
f.lock.RLock()
defer f.lock.RUnlock()
list := make([]interface{}, 0, len(f.items))
for _, item := range f.items {
list = append(list, item)
}
return list
}
// ListKeys returns a list of all the keys of the objects currently
// in the FIFO.
func (f *FIFO) ListKeys() []string {
f.lock.RLock()
defer f.lock.RUnlock()
list := make([]string, 0, len(f.items))
for key := range f.items {
list = append(list, key)
}
return list
}
// Get returns the requested item, or sets exists=false.
func (f *FIFO) Get(obj interface{}) (item interface{}, exists bool, err error) {
key, err := f.keyFunc(obj)
if err != nil {
return nil, false, KeyError{obj, err}
}
return f.GetByKey(key)
}
// GetByKey returns the requested item, or sets exists=false.
func (f *FIFO) GetByKey(key string) (item interface{}, exists bool, err error) {
f.lock.RLock()
defer f.lock.RUnlock()
item, exists = f.items[key]
return item, exists, nil
}
// Checks if the queue is closed
func (f *FIFO) IsClosed() bool {
f.closedLock.Lock()
defer f.closedLock.Unlock()
if f.closed {
return true
}
return false
}
// Pop waits until an item is ready and processes it. If multiple items are
// ready, they are returned in the order in which they were added/updated.
// The item is removed from the queue (and the store) before it is processed,
// so if you don't successfully process it, it should be added back with
// AddIfNotPresent(). process function is called under lock, so it is safe
// update data structures in it that need to be in sync with the queue.
func (f *FIFO) Pop(process PopProcessFunc) (interface{}, error) {
f.lock.Lock()
defer f.lock.Unlock()
for {
for len(f.queue) == 0 {
// When the queue is empty, invocation of Pop() is blocked until new item is enqueued.
// When Close() is called, the f.closed is set and the condition is broadcasted.
// Which causes this loop to continue and return from the Pop().
if f.IsClosed() {
return nil, FIFOClosedError
}
f.cond.Wait()
}
id := f.queue[0]
f.queue = f.queue[1:]
if f.initialPopulationCount > 0 {
f.initialPopulationCount--
}
item, ok := f.items[id]
if !ok {
// Item may have been deleted subsequently.
continue
}
f.itemsInQueue.Delete(id)
if !f.keepCache {
delete(f.items, id)
}
err := process(item)
if e, ok := err.(ErrRequeue); ok {
f.addIfNotPresent(id, item)
err = e.Err
}
return item, err
}
}
// Replace will delete the contents of 'f', using instead the given map.
// 'f' takes ownership of the map, you should not reference the map again
// after calling this function. f's queue is reset, too; upon return, it
// will contain the items in the map, in no particular order.
func (f *FIFO) Replace(list []interface{}, resourceVersion string) error {
items := map[string]interface{}{}
for _, item := range list {
key, err := f.keyFunc(item)
if err != nil {
return KeyError{item, err}
}
items[key] = item
}
f.lock.Lock()
defer f.lock.Unlock()
if !f.populated {
f.populated = true
f.initialPopulationCount = len(items)
}
f.items = items
f.queue = f.queue[:0]
for id := range items {
f.queue = append(f.queue, id)
f.itemsInQueue.Insert(id)
}
if len(f.queue) > 0 {
f.cond.Broadcast()
}
return nil
}
// Resync will touch all objects to put them into the processing queue
func (f *FIFO) Resync() error {
f.lock.Lock()
defer f.lock.Unlock()
for id := range f.items {
if !f.itemsInQueue.Has(id) {
f.queue = append(f.queue, id)
}
}
if len(f.queue) > 0 {
f.cond.Broadcast()
}
return nil
}
// NewFIFO returns a Store which can be used to queue up items to
// process.
func NewFIFO(keyFunc KeyFunc) *FIFO {
f := &FIFO{
items: map[string]interface{}{},
queue: []string{},
keyFunc: keyFunc,
itemsInQueue: sets.String{},
}
f.cond.L = &f.lock
return f
}
func NewResyncableFIFO(keyFunc KeyFunc) *FIFO {
fifo := NewFIFO(keyFunc)
fifo.keepCache = true
return fifo
}
| wjiangjay/origin | vendor/k8s.io/kubernetes/staging/src/k8s.io/client-go/tools/cache/fifo.go | GO | apache-2.0 | 10,440 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.binary.datastreaming;
import java.util.Collection;
import java.util.Map;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.binary.BinaryObject;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.processors.datastreamer.DataStreamProcessorSelfTest;
import org.apache.ignite.internal.binary.BinaryMarshaller;
import org.apache.ignite.stream.StreamReceiver;
/**
*
*/
public class DataStreamProcessorBinarySelfTest extends DataStreamProcessorSelfTest {
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
BinaryMarshaller marsh = new BinaryMarshaller();
cfg.setMarshaller(marsh);
return cfg;
}
/** {@inheritDoc} */
@Override protected StreamReceiver<String, TestObject> getStreamReceiver() {
return new TestDataReceiver();
}
/** {@inheritDoc} */
@Override protected boolean customKeepBinary() {
return true;
}
/**
*
*/
private static class TestDataReceiver implements StreamReceiver<String, TestObject> {
/** {@inheritDoc} */
@Override public void receive(IgniteCache<String, TestObject> cache,
Collection<Map.Entry<String, TestObject>> entries) {
for (Map.Entry<String, TestObject> e : entries) {
assertTrue(e.getKey() instanceof String);
assertTrue(String.valueOf(e.getValue()), e.getValue() instanceof BinaryObject);
TestObject obj = ((BinaryObject)e.getValue()).deserialize();
cache.put(e.getKey(), new TestObject(obj.val + 1));
}
}
}
}
| irudyak/ignite | modules/core/src/test/java/org/apache/ignite/internal/processors/cache/binary/datastreaming/DataStreamProcessorBinarySelfTest.java | Java | apache-2.0 | 2,633 |
// { dg-do "run" }
// { dg-options "-std=gnu++0x" }
struct A {};
struct B {};
struct C {};
template<typename... Exceptions> void f(int idx) throw(Exceptions...) {
if (idx == 0) throw A();
else if (idx == 1) throw B();
else if (idx == 2) throw C();
}
extern "C" void abort();
int main()
{
try {
f<A, B, C>(0);
abort();
} catch (A) {
}
try {
f<A, B, C>(1);
abort();
} catch (B) {
}
try {
f<A, B, C>(2);
abort();
} catch (C) {
}
return 0;
}
| efortuna/AndroidSDKClone | ndk_experimental/tests/device/test-stlport_static-exception/jni/variadic73.cpp | C++ | apache-2.0 | 491 |
<?php
/**
* Copyright 2016 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
namespace LINE\LINEBot\Event\MessageEvent;
use LINE\LINEBot\Event\MessageEvent;
/**
* A class that represents the message event of image.
*
* @package LINE\LINEBot\Event\MessageEvent
*/
class ImageMessage extends MessageEvent
{
/**
* ImageMessage constructor.
*
* @param array $event
*/
public function __construct($event)
{
parent::__construct($event);
}
}
| rsps950551/LineBotTemplate | vendor/vendor/linecorp/line-bot-sdk/src/LINEBot/Event/MessageEvent/ImageMessage.php | PHP | apache-2.0 | 1,051 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Dojo
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id$
*/
// Call Zend_Dojo_Form_Element_ButtonTest::main() if this source file is executed directly.
if (!defined("PHPUnit_MAIN_METHOD")) {
define("PHPUnit_MAIN_METHOD", "Zend_Dojo_Form_Element_ButtonTest::main");
}
/** Zend_Dojo_Form_Element_Button */
require_once 'Zend/Dojo/Form/Element/Button.php';
/** Zend_View */
require_once 'Zend/View.php';
/** Zend_Registry */
require_once 'Zend/Registry.php';
/** Zend_Translate */
require_once 'Zend/Translate.php';
/** Zend_Dojo_View_Helper_Dojo */
require_once 'Zend/Dojo/View/Helper/Dojo.php';
/**
* Test class for Zend_Dojo_Form_Element_Button.
*
* @category Zend
* @package Zend_Dojo
* @subpackage UnitTests
* @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @group Zend_Dojo
* @group Zend_Dojo_Form
*/
class Zend_Dojo_Form_Element_ButtonTest extends PHPUnit_Framework_TestCase
{
/**
* Runs the test methods of this class.
*
* @return void
*/
public static function main()
{
$suite = new PHPUnit_Framework_TestSuite("Zend_Dojo_Form_Element_ButtonTest");
$result = PHPUnit_TextUI_TestRunner::run($suite);
}
/**
* Sets up the fixture, for example, open a network connection.
* This method is called before a test is executed.
*
* @return void
*/
public function setUp()
{
Zend_Registry::_unsetInstance();
Zend_Dojo_View_Helper_Dojo::setUseDeclarative();
$this->view = $this->getView();
$this->element = $this->getElement();
$this->element->setView($this->view);
}
/**
* Tears down the fixture, for example, close a network connection.
* This method is called after a test is executed.
*
* @return void
*/
public function tearDown()
{
}
public function getView()
{
require_once 'Zend/View.php';
$view = new Zend_View();
$view->addHelperPath('Zend/Dojo/View/Helper/', 'Zend_Dojo_View_Helper');
return $view;
}
public function getElement()
{
$element = new Zend_Dojo_Form_Element_Button('foo');
return $element;
}
public function testGetLabelReturnsNameIfNoValuePresent()
{
$this->assertEquals($this->element->getName(), $this->element->getLabel());
}
public function testGetLabelReturnsTranslatedLabelIfTranslatorIsRegistered()
{
$translations = include dirname(__FILE__) . '/_files/locale/array.php';
$translate = new Zend_Translate('array', $translations, 'en');
$this->element->setTranslator($translate)
->setLabel('submit');
$test = $this->element->getLabel();
$this->assertEquals($translations['submit'], $test);
}
public function testTranslatedLabelIsRendered()
{
$this->testGetLabelReturnsTranslatedLabelIfTranslatorIsRegistered();
$this->element->setView($this->getView());
$decorator = $this->element->getDecorator('DijitElement');
$decorator->setElement($this->element);
$html = $decorator->render('');
$this->assertRegexp('/<(input|button)[^>]*?>Submit Button/', $html, 'Label: ' . $this->element->getLabel() . "\nHTML: " . $html);
}
public function testConstructorSetsLabelToNameIfNoLabelProvided()
{
$button = new Zend_Dojo_Form_Element_Button('foo');
$this->assertEquals('foo', $button->getName());
$this->assertEquals('foo', $button->getLabel());
}
public function testCanPassLabelAsParameterToConstructor()
{
$button = new Zend_Dojo_Form_Element_Button('foo', 'Label');
$this->assertEquals('Label', $button->getLabel());
}
public function testLabelIsTranslatedWhenTranslationAvailable()
{
require_once 'Zend/Translate.php';
$translations = array('Label' => 'This is the Submit Label');
$translate = new Zend_Translate('array', $translations);
$button = new Zend_Dojo_Form_Element_Button('foo', 'Label');
$button->setTranslator($translate);
$this->assertEquals($translations['Label'], $button->getLabel());
}
public function testIsCheckedReturnsFalseWhenNoValuePresent()
{
$this->assertFalse($this->element->isChecked());
}
public function testIsCheckedReturnsFalseWhenValuePresentButDoesNotMatchLabel()
{
$this->assertFalse($this->element->isChecked());
$this->element->setValue('bar');
$this->assertFalse($this->element->isChecked());
}
public function testIsCheckedReturnsTrueWhenValuePresentAndMatchesLabel()
{
$this->testIsCheckedReturnsFalseWhenNoValuePresent();
$this->element->setValue('foo');
$this->assertTrue($this->element->isChecked());
}
public function testShouldRenderButtonDijit()
{
$html = $this->element->render();
$this->assertContains('dojoType="dijit.form.Button"', $html);
}
/**
* @group ZF-3961
*/
public function testValuePropertyShouldNotBeRendered()
{
$this->element->setLabel('Button Label')
->setView($this->getView());
$html = $this->element->render();
$this->assertContains('Button Label', $html, $html);
$this->assertNotContains('value="', $html);
}
}
// Call Zend_Dojo_Form_Element_ButtonTest::main() if this source file is executed directly.
if (PHPUnit_MAIN_METHOD == "Zend_Dojo_Form_Element_ButtonTest::main") {
Zend_Dojo_Form_Element_ButtonTest::main();
}
| axot/zf1 | tests/Zend/Dojo/Form/Element/ButtonTest.php | PHP | bsd-3-clause | 6,367 |
// Type definitions for base64id 2.0
// Project: https://github.com/faeldt/base64id
// Definitions by: Shadman Kolahzary <https://github.com/Kolahzary>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// TypeScript Version: 4.0
/// <reference types="node" />
/**
* Generates a base64 id
*/
export function generateId(): string;
/**
* Get random bytes
*
* Uses a buffer if available, falls back to crypto.randomBytes
*/
export function getRandomBytes(bytes: number): Buffer;
export let bytesBuffer: Buffer;
export let bytesBufferIndex: number;
export let isGeneratingBytes: boolean;
export let sequenceNumber: number;
| georgemarshall/DefinitelyTyped | types/base64id/index.d.ts | TypeScript | mit | 644 |
"""Tests for the NumpyVersion class.
"""
from __future__ import division, absolute_import, print_function
from numpy.testing import assert_, assert_raises
from numpy.lib import NumpyVersion
def test_main_versions():
assert_(NumpyVersion('1.8.0') == '1.8.0')
for ver in ['1.9.0', '2.0.0', '1.8.1']:
assert_(NumpyVersion('1.8.0') < ver)
for ver in ['1.7.0', '1.7.1', '0.9.9']:
assert_(NumpyVersion('1.8.0') > ver)
def test_version_1_point_10():
# regression test for gh-2998.
assert_(NumpyVersion('1.9.0') < '1.10.0')
assert_(NumpyVersion('1.11.0') < '1.11.1')
assert_(NumpyVersion('1.11.0') == '1.11.0')
assert_(NumpyVersion('1.99.11') < '1.99.12')
def test_alpha_beta_rc():
assert_(NumpyVersion('1.8.0rc1') == '1.8.0rc1')
for ver in ['1.8.0', '1.8.0rc2']:
assert_(NumpyVersion('1.8.0rc1') < ver)
for ver in ['1.8.0a2', '1.8.0b3', '1.7.2rc4']:
assert_(NumpyVersion('1.8.0rc1') > ver)
assert_(NumpyVersion('1.8.0b1') > '1.8.0a2')
def test_dev_version():
assert_(NumpyVersion('1.9.0.dev-Unknown') < '1.9.0')
for ver in ['1.9.0', '1.9.0a1', '1.9.0b2', '1.9.0b2.dev-ffffffff']:
assert_(NumpyVersion('1.9.0.dev-f16acvda') < ver)
assert_(NumpyVersion('1.9.0.dev-f16acvda') == '1.9.0.dev-11111111')
def test_dev_a_b_rc_mixed():
assert_(NumpyVersion('1.9.0a2.dev-f16acvda') == '1.9.0a2.dev-11111111')
assert_(NumpyVersion('1.9.0a2.dev-6acvda54') < '1.9.0a2')
def test_dev0_version():
assert_(NumpyVersion('1.9.0.dev0+Unknown') < '1.9.0')
for ver in ['1.9.0', '1.9.0a1', '1.9.0b2', '1.9.0b2.dev0+ffffffff']:
assert_(NumpyVersion('1.9.0.dev0+f16acvda') < ver)
assert_(NumpyVersion('1.9.0.dev0+f16acvda') == '1.9.0.dev0+11111111')
def test_dev0_a_b_rc_mixed():
assert_(NumpyVersion('1.9.0a2.dev0+f16acvda') == '1.9.0a2.dev0+11111111')
assert_(NumpyVersion('1.9.0a2.dev0+6acvda54') < '1.9.0a2')
def test_raises():
for ver in ['1.9', '1,9.0', '1.7.x']:
assert_raises(ValueError, NumpyVersion, ver)
| kubaszostak/gdal-dragndrop | osgeo/apps/Python27/Lib/site-packages/numpy/lib/tests/test__version.py | Python | mit | 2,055 |
// Copyright Oliver Kowalke 2013.
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
#ifndef BOOST_FIBERS_DETAIL_CONTEXT_SPMC_QUEUE_H
#define BOOST_FIBERS_DETAIL_CONTEXT_SPMC_QUEUE_H
#include <atomic>
#include <cstddef>
#include <cstdint>
#include <memory>
#include <type_traits>
#include <utility>
#include <boost/assert.hpp>
#include <boost/config.hpp>
#include <boost/fiber/detail/config.hpp>
#include <boost/fiber/context.hpp>
// David Chase and Yossi Lev. Dynamic circular work-stealing deque.
// In SPAA ’05: Proceedings of the seventeenth annual ACM symposium
// on Parallelism in algorithms and architectures, pages 21–28,
// New York, NY, USA, 2005. ACM.
//
// Nhat Minh Lê, Antoniu Pop, Albert Cohen, and Francesco Zappa Nardelli. 2013.
// Correct and efficient work-stealing for weak memory models.
// In Proceedings of the 18th ACM SIGPLAN symposium on Principles and practice
// of parallel programming (PPoPP '13). ACM, New York, NY, USA, 69-80.
#if BOOST_COMP_CLANG
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunused-private-field"
#endif
namespace boost {
namespace fibers {
namespace detail {
class context_spmc_queue {
private:
class array {
private:
typedef std::atomic< context * > atomic_type;
typedef atomic_type storage_type;
std::size_t capacity_;
storage_type * storage_;
public:
array( std::size_t capacity) :
capacity_{ capacity },
storage_{ new storage_type[capacity_] } {
for ( std::size_t i = 0; i < capacity_; ++i) {
::new ( static_cast< void * >( std::addressof( storage_[i]) ) ) atomic_type{ nullptr };
}
}
~array() {
for ( std::size_t i = 0; i < capacity_; ++i) {
reinterpret_cast< atomic_type * >( std::addressof( storage_[i]) )->~atomic_type();
}
delete [] storage_;
}
std::size_t capacity() const noexcept {
return capacity_;
}
void push( std::size_t bottom, context * ctx) noexcept {
reinterpret_cast< atomic_type * >(
std::addressof( storage_[bottom % capacity_]) )
->store( ctx, std::memory_order_relaxed);
}
context * pop( std::size_t top) noexcept {
return reinterpret_cast< atomic_type * >(
std::addressof( storage_[top % capacity_]) )
->load( std::memory_order_relaxed);
}
array * resize( std::size_t bottom, std::size_t top) {
std::unique_ptr< array > tmp{ new array{ 2 * capacity_ } };
for ( std::size_t i = top; i != bottom; ++i) {
tmp->push( i, pop( i) );
}
return tmp.release();
}
};
std::atomic< std::size_t > top_{ 0 };
std::atomic< std::size_t > bottom_{ 0 };
std::atomic< array * > array_;
std::vector< array * > old_arrays_{};
char padding_[cacheline_length];
public:
context_spmc_queue( std::size_t capacity = 4096) :
array_{ new array{ capacity } } {
old_arrays_.reserve( 32);
}
~context_spmc_queue() {
for ( array * a : old_arrays_) {
delete a;
}
delete array_.load();
}
context_spmc_queue( context_spmc_queue const&) = delete;
context_spmc_queue & operator=( context_spmc_queue const&) = delete;
bool empty() const noexcept {
std::size_t bottom = bottom_.load( std::memory_order_relaxed);
std::size_t top = top_.load( std::memory_order_relaxed);
return bottom <= top;
}
void push( context * ctx) {
std::size_t bottom = bottom_.load( std::memory_order_relaxed);
std::size_t top = top_.load( std::memory_order_acquire);
array * a = array_.load( std::memory_order_relaxed);
if ( (a->capacity() - 1) < (bottom - top) ) {
// queue is full
// resize
array * tmp = a->resize( bottom, top);
old_arrays_.push_back( a);
std::swap( a, tmp);
array_.store( a, std::memory_order_relaxed);
}
a->push( bottom, ctx);
std::atomic_thread_fence( std::memory_order_release);
bottom_.store( bottom + 1, std::memory_order_relaxed);
}
context * pop() {
std::size_t bottom = bottom_.load( std::memory_order_relaxed) - 1;
array * a = array_.load( std::memory_order_relaxed);
bottom_.store( bottom, std::memory_order_relaxed);
std::atomic_thread_fence( std::memory_order_seq_cst);
std::size_t top = top_.load( std::memory_order_relaxed);
context * ctx = nullptr;
if ( top <= bottom) {
// queue is not empty
ctx = a->pop( bottom);
BOOST_ASSERT( nullptr != ctx);
if ( top == bottom) {
// last element dequeued
if ( ! top_.compare_exchange_strong( top, top + 1,
std::memory_order_seq_cst,
std::memory_order_relaxed) ) {
// lose the race
ctx = nullptr;
}
bottom_.store( bottom + 1, std::memory_order_relaxed);
}
} else {
// queue is empty
bottom_.store( bottom + 1, std::memory_order_relaxed);
}
return ctx;
}
context * steal() {
std::size_t top = top_.load( std::memory_order_acquire);
std::atomic_thread_fence( std::memory_order_seq_cst);
std::size_t bottom = bottom_.load( std::memory_order_acquire);
context * ctx = nullptr;
if ( top < bottom) {
// queue is not empty
array * a = array_.load( std::memory_order_consume);
ctx = a->pop( top);
BOOST_ASSERT( nullptr != ctx);
// do not steal pinned context (e.g. main-/dispatcher-context)
if ( ctx->is_context( type::pinned_context) ) {
return nullptr;
}
if ( ! top_.compare_exchange_strong( top, top + 1,
std::memory_order_seq_cst,
std::memory_order_relaxed) ) {
// lose the race
return nullptr;
}
}
return ctx;
}
};
}}}
#if BOOST_COMP_CLANG
#pragma clang diagnostic pop
#endif
#endif // BOOST_FIBERS_DETAIL_CONTEXT_SPMC_QUEUE_H
| kumakoko/KumaGL | third_lib/boost/1.75.0/boost/fiber/detail/context_spmc_queue.hpp | C++ | mit | 6,848 |
package org.robolectric.shadows;
import android.app.PendingIntent;
import android.content.Context;
import android.graphics.Bitmap;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import android.widget.RemoteViews;
import android.widget.TextView;
import org.robolectric.annotation.Implementation;
import org.robolectric.annotation.Implements;
import java.util.ArrayList;
import java.util.List;
/**
* Shadow for {@link android.widget.RemoteViews}.
*/
@Implements(RemoteViews.class)
public class ShadowRemoteViews {
private String packageName;
private int layoutId;
private List<ViewUpdater> viewUpdaters = new ArrayList<>();
public void __constructor__(String packageName, int layoutId) {
this.packageName = packageName;
this.layoutId = layoutId;
}
@Implementation
public String getPackage() {
return packageName;
}
@Implementation
public int getLayoutId() {
return layoutId;
}
@Implementation
public void setTextViewText(int viewId, final CharSequence text) {
viewUpdaters.add(new ViewUpdater(viewId) {
@Override
public void doUpdate(View view) {
((TextView) view).setText(text);
}
});
}
@Implementation
public void setOnClickPendingIntent(int viewId, final PendingIntent pendingIntent) {
viewUpdaters.add(new ViewUpdater(viewId) {
@Override void doUpdate(final View view) {
view.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
pendingIntent.send(view.getContext(), 0, null);
} catch (PendingIntent.CanceledException e) {
throw new RuntimeException(e);
}
}
});
}
});
}
@Implementation
public void setViewVisibility(int viewId, final int visibility) {
viewUpdaters.add(new ViewUpdater(viewId) {
@Override
public void doUpdate(View view) {
view.setVisibility(visibility);
}
});
}
@Implementation
public void setImageViewResource(int viewId, final int resourceId) {
viewUpdaters.add(new ViewUpdater(viewId) {
@Override
public void doUpdate(View view) {
((ImageView) view).setImageResource(resourceId);
}
});
}
@Implementation
public void setImageViewBitmap(int viewId, final Bitmap bitmap) {
viewUpdaters.add(new ViewUpdater(viewId) {
@Override
public void doUpdate(View view) {
((ImageView) view).setImageBitmap(bitmap);
}
});
}
@Implementation
public View apply(Context context, ViewGroup parent) {
LayoutInflater inflater = LayoutInflater.from(context);
View inflated = inflater.inflate(layoutId, parent);
reapply(context, inflated);
return inflated;
}
@Implementation
public void reapply(Context context, View v) {
for (ViewUpdater viewUpdater : viewUpdaters) {
viewUpdater.update(v);
}
}
private abstract class ViewUpdater {
private int viewId;
public ViewUpdater(int viewId) {
this.viewId = viewId;
}
final void update(View parent) {
View view = parent.findViewById(viewId);
if (view == null) {
throw new NullPointerException("couldn't find view " + viewId
+ " (" + ShadowApplication.getInstance().getResourceLoader().getNameForId(viewId) + ")");
}
doUpdate(view);
}
abstract void doUpdate(View view);
}
}
| lexs/robolectric | robolectric-shadows/shadows-core/src/main/java/org/robolectric/shadows/ShadowRemoteViews.java | Java | mit | 3,505 |
ej.addCulture( "ug", {
name: "ug",
englishName: "Uyghur",
nativeName: "ئۇيغۇرچە",
language: "ug",
isRTL: true,
numberFormat: {
"NaN": "سان ئەمەس",
negativeInfinity: "مەنپىي چەكسىزلىك",
positiveInfinity: "مۇسبەت چەكسىزلىك",
percent: {
pattern: ["-n%","n%"]
},
currency: {
pattern: ["$-n","$n"],
symbol: "¥"
}
},
calendars: {
standard: {
"/": "-",
firstDay: 1,
days: {
names: ["يەكشەنبە","دۈشەنبە","سەيشەنبە","چارشەنبە","پەيشەنبە","جۈمە","شەنبە"],
namesAbbr: ["يە","دۈ","سە","چا","پە","جۈ","شە"],
namesShort: ["ي","د","س","چ","پ","ج","ش"]
},
months: {
names: ["يانۋار","فېۋرال","مارت","ئاپرېل","ماي","ئىيۇن","ئىيۇل","ئاۋغۇست","سېنتەبىر","ئۆكتەبىر","نويابىر","دېكابىر",""],
namesAbbr: ["1-ئاي","2-ئاي","3-ئاي","4-ئاي","5-ئاي","6-ئاي","7-ئاي","8-ئاي","9-ئاي","10-ئاي","11-ئاي","12-ئاي",""]
},
AM: ["چۈشتىن بۇرۇن","چۈشتىن بۇرۇن","چۈشتىن بۇرۇن"],
PM: ["چۈشتىن كېيىن","چۈشتىن كېيىن","چۈشتىن كېيىن"],
patterns: {
d: "yyyy-M-d",
D: "yyyy-'يىل' d-MMMM",
t: "H:mm",
T: "H:mm:ss",
f: "yyyy-'يىل' d-MMMM H:mm",
F: "yyyy-'يىل' d-MMMM H:mm:ss",
M: "d-MMMM",
Y: "yyyy-'يىلى' MMMM"
}
},
Hijri: {
name: "Hijri",
"/": "-",
firstDay: 1,
days: {
names: ["يەكشەنبە","دۈشەنبە","سەيشەنبە","چارشەنبە","پەيشەنبە","جۈمە","شەنبە"],
namesAbbr: ["يە","دۈ","سە","چا","پە","جۈ","شە"],
namesShort: ["ي","د","س","چ","پ","ج","ش"]
},
months: {
names: ["مۇھەررەم","سەپەر","رەبىئۇلئەۋۋەل","رەبىئۇلئاخىر","جەمادىيەلئەۋۋەل","جەمادىيەلئاخىر","رەجەب","شەئبان","رامىزان","شەۋۋال","زۇلقەئدە","زۇلھەججە",""],
namesAbbr: ["مۇھەررەم","سەپەر","رەبىئۇلئەۋۋەل","رەبىئۇلئاخىر","جەمادىيەلئەۋۋەل","جەمادىيەلئاخىر","رەجەب","شەئبان","رامىزان","شەۋۋال","زۇلقەئدە","زۇلھەججە",""]
},
AM: ["چۈشتىن بۇرۇن","چۈشتىن بۇرۇن","چۈشتىن بۇرۇن"],
PM: ["چۈشتىن كېيىن","چۈشتىن كېيىن","چۈشتىن كېيىن"],
twoDigitYearMax: 1451,
patterns: {
d: "yyyy-M-d",
D: "yyyy-'يىل' d-MMMM",
t: "H:mm",
T: "H:mm:ss",
f: "yyyy-'يىل' d-MMMM H:mm",
F: "yyyy-'يىل' d-MMMM H:mm:ss",
M: "d-MMMM",
Y: "yyyy-'يىلى' MMMM"
},
convert: {
// Adapted to Script from System.Globalization.HijriCalendar
ticks1970: 62135596800000,
// number of days leading up to each month
monthDays: [0, 30, 59, 89, 118, 148, 177, 207, 236, 266, 295, 325, 355],
minDate: -42521673600000,
maxDate: 253402300799999,
// The number of days to add or subtract from the calendar to accommodate the variances
// in the start and the end of Ramadan and to accommodate the date difference between
// countries/regions. May be dynamically adjusted based on user preference, but should
// remain in the range of -2 to 2, inclusive.
hijriAdjustment: 0,
toGregorian: function(hyear, hmonth, hday) {
var daysSinceJan0101 = this.daysToYear(hyear) + this.monthDays[hmonth] + hday - 1 - this.hijriAdjustment;
// 86400000 = ticks per day
var gdate = new Date(daysSinceJan0101 * 86400000 - this.ticks1970);
// adjust for timezone, because we are interested in the gregorian date for the same timezone
// but ticks in javascript is always from GMT, unlike the server were ticks counts from the base
// date in the current timezone.
gdate.setMinutes(gdate.getMinutes() + gdate.getTimezoneOffset());
return gdate;
},
fromGregorian: function(gdate) {
if ((gdate < this.minDate) || (gdate > this.maxDate)) return null;
var ticks = this.ticks1970 + (gdate-0) - gdate.getTimezoneOffset() * 60000,
daysSinceJan0101 = Math.floor(ticks / 86400000) + 1 + this.hijriAdjustment;
// very particular formula determined by someone smart, adapted from the server-side implementation.
// it approximates the hijri year.
var hday, hmonth, hyear = Math.floor(((daysSinceJan0101 - 227013) * 30) / 10631) + 1,
absDays = this.daysToYear(hyear),
daysInYear = this.isLeapYear(hyear) ? 355 : 354;
// hyear is just approximate, it may need adjustment up or down by 1.
if (daysSinceJan0101 < absDays) {
hyear--;
absDays -= daysInYear;
}
else if (daysSinceJan0101 === absDays) {
hyear--;
absDays = this.daysToYear(hyear);
}
else {
if (daysSinceJan0101 > (absDays + daysInYear)) {
absDays += daysInYear;
hyear++;
}
}
// determine month by looking at how many days into the hyear we are
// monthDays contains the number of days up to each month.
hmonth = 0;
var daysIntoYear = daysSinceJan0101 - absDays;
while (hmonth <= 11 && daysIntoYear > this.monthDays[hmonth]) {
hmonth++;
}
hmonth--;
hday = daysIntoYear - this.monthDays[hmonth];
return [hyear, hmonth, hday];
},
daysToYear: function(year) {
// calculates how many days since Jan 1, 0001
var yearsToYear30 = Math.floor((year - 1) / 30) * 30,
yearsInto30 = year - yearsToYear30 - 1,
days = Math.floor((yearsToYear30 * 10631) / 30) + 227013;
while (yearsInto30 > 0) {
days += (this.isLeapYear(yearsInto30) ? 355 : 354);
yearsInto30--;
}
return days;
},
isLeapYear: function(year) {
return ((((year * 11) + 14) % 30) < 11);
}
}
}
}
});
| Asaf-S/jsdelivr | files/syncfusion-ej-global/15.1.41/i18n/ej.culture.ug.js | JavaScript | mit | 7,278 |
module Docs
class Less
class CleanHtmlFilter < Filter
def call
css('.anchor-target').each do |node|
node.parent['id'] = node['id']
node.remove
end
css('.source-link', 'a[id$="md"]', 'br').remove
css('#functions-overview').each do |node|
node.ancestors('.docs-section').remove
end
css('.docs-content', '.docs-section', '.section-content', 'blockquote').each do |node|
node.before(node.children).remove
end
css('.page-header').each do |node|
node.before(node.first_element_child).remove
end
css('h1, h2, h3, h4').each do |node|
node.name = node.name.sub(/\d/) { |i| [i.to_i + 1, 3].min }
end
css('pre').each do |node|
node.content = node.content
end
doc
end
end
end
end
| zerin108/devdocs | lib/docs/filters/less/clean_html.rb | Ruby | mpl-2.0 | 881 |
// Code generated by private/model/cli/gen-api/main.go. DO NOT EDIT.
package elasticsearchservice
import (
"fmt"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awsutil"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/private/protocol"
"github.com/aws/aws-sdk-go/private/protocol/restjson"
)
const opAddTags = "AddTags"
// AddTagsRequest generates a "aws/request.Request" representing the
// client's request for the AddTags operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See AddTags for more information on using the AddTags
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the AddTagsRequest method.
// req, resp := client.AddTagsRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/AddTags
func (c *ElasticsearchService) AddTagsRequest(input *AddTagsInput) (req *request.Request, output *AddTagsOutput) {
op := &request.Operation{
Name: opAddTags,
HTTPMethod: "POST",
HTTPPath: "/2015-01-01/tags",
}
if input == nil {
input = &AddTagsInput{}
}
output = &AddTagsOutput{}
req = c.newRequest(op, input, output)
req.Handlers.Unmarshal.Remove(restjson.UnmarshalHandler)
req.Handlers.Unmarshal.PushBackNamed(protocol.UnmarshalDiscardBodyHandler)
return
}
// AddTags API operation for Amazon Elasticsearch Service.
//
// Attaches tags to an existing Elasticsearch domain. Tags are a set of case-sensitive
// key value pairs. An Elasticsearch domain may have up to 10 tags. See Tagging
// Amazon Elasticsearch Service Domains for more information. (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-awsresorcetagging)
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation AddTags for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeLimitExceededException "LimitExceededException"
// An exception for trying to create more than allowed resources or sub-resources.
// Gives http status code of 409.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/AddTags
func (c *ElasticsearchService) AddTags(input *AddTagsInput) (*AddTagsOutput, error) {
req, out := c.AddTagsRequest(input)
return out, req.Send()
}
// AddTagsWithContext is the same as AddTags with the addition of
// the ability to pass a context and additional request options.
//
// See AddTags for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) AddTagsWithContext(ctx aws.Context, input *AddTagsInput, opts ...request.Option) (*AddTagsOutput, error) {
req, out := c.AddTagsRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opCreateElasticsearchDomain = "CreateElasticsearchDomain"
// CreateElasticsearchDomainRequest generates a "aws/request.Request" representing the
// client's request for the CreateElasticsearchDomain operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See CreateElasticsearchDomain for more information on using the CreateElasticsearchDomain
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the CreateElasticsearchDomainRequest method.
// req, resp := client.CreateElasticsearchDomainRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/CreateElasticsearchDomain
func (c *ElasticsearchService) CreateElasticsearchDomainRequest(input *CreateElasticsearchDomainInput) (req *request.Request, output *CreateElasticsearchDomainOutput) {
op := &request.Operation{
Name: opCreateElasticsearchDomain,
HTTPMethod: "POST",
HTTPPath: "/2015-01-01/es/domain",
}
if input == nil {
input = &CreateElasticsearchDomainInput{}
}
output = &CreateElasticsearchDomainOutput{}
req = c.newRequest(op, input, output)
return
}
// CreateElasticsearchDomain API operation for Amazon Elasticsearch Service.
//
// Creates a new Elasticsearch domain. For more information, see Creating Elasticsearch
// Domains (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomains)
// in the Amazon Elasticsearch Service Developer Guide.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation CreateElasticsearchDomain for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeDisabledOperationException "DisabledOperationException"
// An error occured because the client wanted to access a not supported operation.
// Gives http status code of 409.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// * ErrCodeInvalidTypeException "InvalidTypeException"
// An exception for trying to create or access sub-resource that is either invalid
// or not supported. Gives http status code of 409.
//
// * ErrCodeLimitExceededException "LimitExceededException"
// An exception for trying to create more than allowed resources or sub-resources.
// Gives http status code of 409.
//
// * ErrCodeResourceAlreadyExistsException "ResourceAlreadyExistsException"
// An exception for creating a resource that already exists. Gives http status
// code of 400.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/CreateElasticsearchDomain
func (c *ElasticsearchService) CreateElasticsearchDomain(input *CreateElasticsearchDomainInput) (*CreateElasticsearchDomainOutput, error) {
req, out := c.CreateElasticsearchDomainRequest(input)
return out, req.Send()
}
// CreateElasticsearchDomainWithContext is the same as CreateElasticsearchDomain with the addition of
// the ability to pass a context and additional request options.
//
// See CreateElasticsearchDomain for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) CreateElasticsearchDomainWithContext(ctx aws.Context, input *CreateElasticsearchDomainInput, opts ...request.Option) (*CreateElasticsearchDomainOutput, error) {
req, out := c.CreateElasticsearchDomainRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opDeleteElasticsearchDomain = "DeleteElasticsearchDomain"
// DeleteElasticsearchDomainRequest generates a "aws/request.Request" representing the
// client's request for the DeleteElasticsearchDomain operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See DeleteElasticsearchDomain for more information on using the DeleteElasticsearchDomain
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the DeleteElasticsearchDomainRequest method.
// req, resp := client.DeleteElasticsearchDomainRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DeleteElasticsearchDomain
func (c *ElasticsearchService) DeleteElasticsearchDomainRequest(input *DeleteElasticsearchDomainInput) (req *request.Request, output *DeleteElasticsearchDomainOutput) {
op := &request.Operation{
Name: opDeleteElasticsearchDomain,
HTTPMethod: "DELETE",
HTTPPath: "/2015-01-01/es/domain/{DomainName}",
}
if input == nil {
input = &DeleteElasticsearchDomainInput{}
}
output = &DeleteElasticsearchDomainOutput{}
req = c.newRequest(op, input, output)
return
}
// DeleteElasticsearchDomain API operation for Amazon Elasticsearch Service.
//
// Permanently deletes the specified Elasticsearch domain and all of its data.
// Once a domain is deleted, it cannot be recovered.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation DeleteElasticsearchDomain for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// * ErrCodeResourceNotFoundException "ResourceNotFoundException"
// An exception for accessing or deleting a resource that does not exist. Gives
// http status code of 400.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DeleteElasticsearchDomain
func (c *ElasticsearchService) DeleteElasticsearchDomain(input *DeleteElasticsearchDomainInput) (*DeleteElasticsearchDomainOutput, error) {
req, out := c.DeleteElasticsearchDomainRequest(input)
return out, req.Send()
}
// DeleteElasticsearchDomainWithContext is the same as DeleteElasticsearchDomain with the addition of
// the ability to pass a context and additional request options.
//
// See DeleteElasticsearchDomain for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) DeleteElasticsearchDomainWithContext(ctx aws.Context, input *DeleteElasticsearchDomainInput, opts ...request.Option) (*DeleteElasticsearchDomainOutput, error) {
req, out := c.DeleteElasticsearchDomainRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opDescribeElasticsearchDomain = "DescribeElasticsearchDomain"
// DescribeElasticsearchDomainRequest generates a "aws/request.Request" representing the
// client's request for the DescribeElasticsearchDomain operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See DescribeElasticsearchDomain for more information on using the DescribeElasticsearchDomain
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the DescribeElasticsearchDomainRequest method.
// req, resp := client.DescribeElasticsearchDomainRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomain
func (c *ElasticsearchService) DescribeElasticsearchDomainRequest(input *DescribeElasticsearchDomainInput) (req *request.Request, output *DescribeElasticsearchDomainOutput) {
op := &request.Operation{
Name: opDescribeElasticsearchDomain,
HTTPMethod: "GET",
HTTPPath: "/2015-01-01/es/domain/{DomainName}",
}
if input == nil {
input = &DescribeElasticsearchDomainInput{}
}
output = &DescribeElasticsearchDomainOutput{}
req = c.newRequest(op, input, output)
return
}
// DescribeElasticsearchDomain API operation for Amazon Elasticsearch Service.
//
// Returns domain configuration information about the specified Elasticsearch
// domain, including the domain ID, domain endpoint, and domain ARN.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation DescribeElasticsearchDomain for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// * ErrCodeResourceNotFoundException "ResourceNotFoundException"
// An exception for accessing or deleting a resource that does not exist. Gives
// http status code of 400.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomain
func (c *ElasticsearchService) DescribeElasticsearchDomain(input *DescribeElasticsearchDomainInput) (*DescribeElasticsearchDomainOutput, error) {
req, out := c.DescribeElasticsearchDomainRequest(input)
return out, req.Send()
}
// DescribeElasticsearchDomainWithContext is the same as DescribeElasticsearchDomain with the addition of
// the ability to pass a context and additional request options.
//
// See DescribeElasticsearchDomain for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) DescribeElasticsearchDomainWithContext(ctx aws.Context, input *DescribeElasticsearchDomainInput, opts ...request.Option) (*DescribeElasticsearchDomainOutput, error) {
req, out := c.DescribeElasticsearchDomainRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opDescribeElasticsearchDomainConfig = "DescribeElasticsearchDomainConfig"
// DescribeElasticsearchDomainConfigRequest generates a "aws/request.Request" representing the
// client's request for the DescribeElasticsearchDomainConfig operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See DescribeElasticsearchDomainConfig for more information on using the DescribeElasticsearchDomainConfig
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the DescribeElasticsearchDomainConfigRequest method.
// req, resp := client.DescribeElasticsearchDomainConfigRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomainConfig
func (c *ElasticsearchService) DescribeElasticsearchDomainConfigRequest(input *DescribeElasticsearchDomainConfigInput) (req *request.Request, output *DescribeElasticsearchDomainConfigOutput) {
op := &request.Operation{
Name: opDescribeElasticsearchDomainConfig,
HTTPMethod: "GET",
HTTPPath: "/2015-01-01/es/domain/{DomainName}/config",
}
if input == nil {
input = &DescribeElasticsearchDomainConfigInput{}
}
output = &DescribeElasticsearchDomainConfigOutput{}
req = c.newRequest(op, input, output)
return
}
// DescribeElasticsearchDomainConfig API operation for Amazon Elasticsearch Service.
//
// Provides cluster configuration information about the specified Elasticsearch
// domain, such as the state, creation date, update version, and update date
// for cluster options.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation DescribeElasticsearchDomainConfig for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// * ErrCodeResourceNotFoundException "ResourceNotFoundException"
// An exception for accessing or deleting a resource that does not exist. Gives
// http status code of 400.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomainConfig
func (c *ElasticsearchService) DescribeElasticsearchDomainConfig(input *DescribeElasticsearchDomainConfigInput) (*DescribeElasticsearchDomainConfigOutput, error) {
req, out := c.DescribeElasticsearchDomainConfigRequest(input)
return out, req.Send()
}
// DescribeElasticsearchDomainConfigWithContext is the same as DescribeElasticsearchDomainConfig with the addition of
// the ability to pass a context and additional request options.
//
// See DescribeElasticsearchDomainConfig for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) DescribeElasticsearchDomainConfigWithContext(ctx aws.Context, input *DescribeElasticsearchDomainConfigInput, opts ...request.Option) (*DescribeElasticsearchDomainConfigOutput, error) {
req, out := c.DescribeElasticsearchDomainConfigRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opDescribeElasticsearchDomains = "DescribeElasticsearchDomains"
// DescribeElasticsearchDomainsRequest generates a "aws/request.Request" representing the
// client's request for the DescribeElasticsearchDomains operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See DescribeElasticsearchDomains for more information on using the DescribeElasticsearchDomains
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the DescribeElasticsearchDomainsRequest method.
// req, resp := client.DescribeElasticsearchDomainsRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomains
func (c *ElasticsearchService) DescribeElasticsearchDomainsRequest(input *DescribeElasticsearchDomainsInput) (req *request.Request, output *DescribeElasticsearchDomainsOutput) {
op := &request.Operation{
Name: opDescribeElasticsearchDomains,
HTTPMethod: "POST",
HTTPPath: "/2015-01-01/es/domain-info",
}
if input == nil {
input = &DescribeElasticsearchDomainsInput{}
}
output = &DescribeElasticsearchDomainsOutput{}
req = c.newRequest(op, input, output)
return
}
// DescribeElasticsearchDomains API operation for Amazon Elasticsearch Service.
//
// Returns domain configuration information about the specified Elasticsearch
// domains, including the domain ID, domain endpoint, and domain ARN.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation DescribeElasticsearchDomains for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomains
func (c *ElasticsearchService) DescribeElasticsearchDomains(input *DescribeElasticsearchDomainsInput) (*DescribeElasticsearchDomainsOutput, error) {
req, out := c.DescribeElasticsearchDomainsRequest(input)
return out, req.Send()
}
// DescribeElasticsearchDomainsWithContext is the same as DescribeElasticsearchDomains with the addition of
// the ability to pass a context and additional request options.
//
// See DescribeElasticsearchDomains for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) DescribeElasticsearchDomainsWithContext(ctx aws.Context, input *DescribeElasticsearchDomainsInput, opts ...request.Option) (*DescribeElasticsearchDomainsOutput, error) {
req, out := c.DescribeElasticsearchDomainsRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opDescribeElasticsearchInstanceTypeLimits = "DescribeElasticsearchInstanceTypeLimits"
// DescribeElasticsearchInstanceTypeLimitsRequest generates a "aws/request.Request" representing the
// client's request for the DescribeElasticsearchInstanceTypeLimits operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See DescribeElasticsearchInstanceTypeLimits for more information on using the DescribeElasticsearchInstanceTypeLimits
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the DescribeElasticsearchInstanceTypeLimitsRequest method.
// req, resp := client.DescribeElasticsearchInstanceTypeLimitsRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchInstanceTypeLimits
func (c *ElasticsearchService) DescribeElasticsearchInstanceTypeLimitsRequest(input *DescribeElasticsearchInstanceTypeLimitsInput) (req *request.Request, output *DescribeElasticsearchInstanceTypeLimitsOutput) {
op := &request.Operation{
Name: opDescribeElasticsearchInstanceTypeLimits,
HTTPMethod: "GET",
HTTPPath: "/2015-01-01/es/instanceTypeLimits/{ElasticsearchVersion}/{InstanceType}",
}
if input == nil {
input = &DescribeElasticsearchInstanceTypeLimitsInput{}
}
output = &DescribeElasticsearchInstanceTypeLimitsOutput{}
req = c.newRequest(op, input, output)
return
}
// DescribeElasticsearchInstanceTypeLimits API operation for Amazon Elasticsearch Service.
//
// Describe Elasticsearch Limits for a given InstanceType and ElasticsearchVersion.
// When modifying existing Domain, specify the DomainName to know what Limits
// are supported for modifying.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation DescribeElasticsearchInstanceTypeLimits for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// * ErrCodeInvalidTypeException "InvalidTypeException"
// An exception for trying to create or access sub-resource that is either invalid
// or not supported. Gives http status code of 409.
//
// * ErrCodeLimitExceededException "LimitExceededException"
// An exception for trying to create more than allowed resources or sub-resources.
// Gives http status code of 409.
//
// * ErrCodeResourceNotFoundException "ResourceNotFoundException"
// An exception for accessing or deleting a resource that does not exist. Gives
// http status code of 400.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchInstanceTypeLimits
func (c *ElasticsearchService) DescribeElasticsearchInstanceTypeLimits(input *DescribeElasticsearchInstanceTypeLimitsInput) (*DescribeElasticsearchInstanceTypeLimitsOutput, error) {
req, out := c.DescribeElasticsearchInstanceTypeLimitsRequest(input)
return out, req.Send()
}
// DescribeElasticsearchInstanceTypeLimitsWithContext is the same as DescribeElasticsearchInstanceTypeLimits with the addition of
// the ability to pass a context and additional request options.
//
// See DescribeElasticsearchInstanceTypeLimits for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) DescribeElasticsearchInstanceTypeLimitsWithContext(ctx aws.Context, input *DescribeElasticsearchInstanceTypeLimitsInput, opts ...request.Option) (*DescribeElasticsearchInstanceTypeLimitsOutput, error) {
req, out := c.DescribeElasticsearchInstanceTypeLimitsRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opListDomainNames = "ListDomainNames"
// ListDomainNamesRequest generates a "aws/request.Request" representing the
// client's request for the ListDomainNames operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See ListDomainNames for more information on using the ListDomainNames
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the ListDomainNamesRequest method.
// req, resp := client.ListDomainNamesRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListDomainNames
func (c *ElasticsearchService) ListDomainNamesRequest(input *ListDomainNamesInput) (req *request.Request, output *ListDomainNamesOutput) {
op := &request.Operation{
Name: opListDomainNames,
HTTPMethod: "GET",
HTTPPath: "/2015-01-01/domain",
}
if input == nil {
input = &ListDomainNamesInput{}
}
output = &ListDomainNamesOutput{}
req = c.newRequest(op, input, output)
return
}
// ListDomainNames API operation for Amazon Elasticsearch Service.
//
// Returns the name of all Elasticsearch domains owned by the current user's
// account.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation ListDomainNames for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListDomainNames
func (c *ElasticsearchService) ListDomainNames(input *ListDomainNamesInput) (*ListDomainNamesOutput, error) {
req, out := c.ListDomainNamesRequest(input)
return out, req.Send()
}
// ListDomainNamesWithContext is the same as ListDomainNames with the addition of
// the ability to pass a context and additional request options.
//
// See ListDomainNames for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) ListDomainNamesWithContext(ctx aws.Context, input *ListDomainNamesInput, opts ...request.Option) (*ListDomainNamesOutput, error) {
req, out := c.ListDomainNamesRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opListElasticsearchInstanceTypes = "ListElasticsearchInstanceTypes"
// ListElasticsearchInstanceTypesRequest generates a "aws/request.Request" representing the
// client's request for the ListElasticsearchInstanceTypes operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See ListElasticsearchInstanceTypes for more information on using the ListElasticsearchInstanceTypes
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the ListElasticsearchInstanceTypesRequest method.
// req, resp := client.ListElasticsearchInstanceTypesRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListElasticsearchInstanceTypes
func (c *ElasticsearchService) ListElasticsearchInstanceTypesRequest(input *ListElasticsearchInstanceTypesInput) (req *request.Request, output *ListElasticsearchInstanceTypesOutput) {
op := &request.Operation{
Name: opListElasticsearchInstanceTypes,
HTTPMethod: "GET",
HTTPPath: "/2015-01-01/es/instanceTypes/{ElasticsearchVersion}",
Paginator: &request.Paginator{
InputTokens: []string{"NextToken"},
OutputTokens: []string{"NextToken"},
LimitToken: "MaxResults",
TruncationToken: "",
},
}
if input == nil {
input = &ListElasticsearchInstanceTypesInput{}
}
output = &ListElasticsearchInstanceTypesOutput{}
req = c.newRequest(op, input, output)
return
}
// ListElasticsearchInstanceTypes API operation for Amazon Elasticsearch Service.
//
// List all Elasticsearch instance types that are supported for given ElasticsearchVersion
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation ListElasticsearchInstanceTypes for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// * ErrCodeResourceNotFoundException "ResourceNotFoundException"
// An exception for accessing or deleting a resource that does not exist. Gives
// http status code of 400.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListElasticsearchInstanceTypes
func (c *ElasticsearchService) ListElasticsearchInstanceTypes(input *ListElasticsearchInstanceTypesInput) (*ListElasticsearchInstanceTypesOutput, error) {
req, out := c.ListElasticsearchInstanceTypesRequest(input)
return out, req.Send()
}
// ListElasticsearchInstanceTypesWithContext is the same as ListElasticsearchInstanceTypes with the addition of
// the ability to pass a context and additional request options.
//
// See ListElasticsearchInstanceTypes for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) ListElasticsearchInstanceTypesWithContext(ctx aws.Context, input *ListElasticsearchInstanceTypesInput, opts ...request.Option) (*ListElasticsearchInstanceTypesOutput, error) {
req, out := c.ListElasticsearchInstanceTypesRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
// ListElasticsearchInstanceTypesPages iterates over the pages of a ListElasticsearchInstanceTypes operation,
// calling the "fn" function with the response data for each page. To stop
// iterating, return false from the fn function.
//
// See ListElasticsearchInstanceTypes method for more information on how to use this operation.
//
// Note: This operation can generate multiple requests to a service.
//
// // Example iterating over at most 3 pages of a ListElasticsearchInstanceTypes operation.
// pageNum := 0
// err := client.ListElasticsearchInstanceTypesPages(params,
// func(page *ListElasticsearchInstanceTypesOutput, lastPage bool) bool {
// pageNum++
// fmt.Println(page)
// return pageNum <= 3
// })
//
func (c *ElasticsearchService) ListElasticsearchInstanceTypesPages(input *ListElasticsearchInstanceTypesInput, fn func(*ListElasticsearchInstanceTypesOutput, bool) bool) error {
return c.ListElasticsearchInstanceTypesPagesWithContext(aws.BackgroundContext(), input, fn)
}
// ListElasticsearchInstanceTypesPagesWithContext same as ListElasticsearchInstanceTypesPages except
// it takes a Context and allows setting request options on the pages.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) ListElasticsearchInstanceTypesPagesWithContext(ctx aws.Context, input *ListElasticsearchInstanceTypesInput, fn func(*ListElasticsearchInstanceTypesOutput, bool) bool, opts ...request.Option) error {
p := request.Pagination{
NewRequest: func() (*request.Request, error) {
var inCpy *ListElasticsearchInstanceTypesInput
if input != nil {
tmp := *input
inCpy = &tmp
}
req, _ := c.ListElasticsearchInstanceTypesRequest(inCpy)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return req, nil
},
}
cont := true
for p.Next() && cont {
cont = fn(p.Page().(*ListElasticsearchInstanceTypesOutput), !p.HasNextPage())
}
return p.Err()
}
const opListElasticsearchVersions = "ListElasticsearchVersions"
// ListElasticsearchVersionsRequest generates a "aws/request.Request" representing the
// client's request for the ListElasticsearchVersions operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See ListElasticsearchVersions for more information on using the ListElasticsearchVersions
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the ListElasticsearchVersionsRequest method.
// req, resp := client.ListElasticsearchVersionsRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListElasticsearchVersions
func (c *ElasticsearchService) ListElasticsearchVersionsRequest(input *ListElasticsearchVersionsInput) (req *request.Request, output *ListElasticsearchVersionsOutput) {
op := &request.Operation{
Name: opListElasticsearchVersions,
HTTPMethod: "GET",
HTTPPath: "/2015-01-01/es/versions",
Paginator: &request.Paginator{
InputTokens: []string{"NextToken"},
OutputTokens: []string{"NextToken"},
LimitToken: "MaxResults",
TruncationToken: "",
},
}
if input == nil {
input = &ListElasticsearchVersionsInput{}
}
output = &ListElasticsearchVersionsOutput{}
req = c.newRequest(op, input, output)
return
}
// ListElasticsearchVersions API operation for Amazon Elasticsearch Service.
//
// List all supported Elasticsearch versions
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation ListElasticsearchVersions for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// * ErrCodeResourceNotFoundException "ResourceNotFoundException"
// An exception for accessing or deleting a resource that does not exist. Gives
// http status code of 400.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListElasticsearchVersions
func (c *ElasticsearchService) ListElasticsearchVersions(input *ListElasticsearchVersionsInput) (*ListElasticsearchVersionsOutput, error) {
req, out := c.ListElasticsearchVersionsRequest(input)
return out, req.Send()
}
// ListElasticsearchVersionsWithContext is the same as ListElasticsearchVersions with the addition of
// the ability to pass a context and additional request options.
//
// See ListElasticsearchVersions for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) ListElasticsearchVersionsWithContext(ctx aws.Context, input *ListElasticsearchVersionsInput, opts ...request.Option) (*ListElasticsearchVersionsOutput, error) {
req, out := c.ListElasticsearchVersionsRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
// ListElasticsearchVersionsPages iterates over the pages of a ListElasticsearchVersions operation,
// calling the "fn" function with the response data for each page. To stop
// iterating, return false from the fn function.
//
// See ListElasticsearchVersions method for more information on how to use this operation.
//
// Note: This operation can generate multiple requests to a service.
//
// // Example iterating over at most 3 pages of a ListElasticsearchVersions operation.
// pageNum := 0
// err := client.ListElasticsearchVersionsPages(params,
// func(page *ListElasticsearchVersionsOutput, lastPage bool) bool {
// pageNum++
// fmt.Println(page)
// return pageNum <= 3
// })
//
func (c *ElasticsearchService) ListElasticsearchVersionsPages(input *ListElasticsearchVersionsInput, fn func(*ListElasticsearchVersionsOutput, bool) bool) error {
return c.ListElasticsearchVersionsPagesWithContext(aws.BackgroundContext(), input, fn)
}
// ListElasticsearchVersionsPagesWithContext same as ListElasticsearchVersionsPages except
// it takes a Context and allows setting request options on the pages.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) ListElasticsearchVersionsPagesWithContext(ctx aws.Context, input *ListElasticsearchVersionsInput, fn func(*ListElasticsearchVersionsOutput, bool) bool, opts ...request.Option) error {
p := request.Pagination{
NewRequest: func() (*request.Request, error) {
var inCpy *ListElasticsearchVersionsInput
if input != nil {
tmp := *input
inCpy = &tmp
}
req, _ := c.ListElasticsearchVersionsRequest(inCpy)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return req, nil
},
}
cont := true
for p.Next() && cont {
cont = fn(p.Page().(*ListElasticsearchVersionsOutput), !p.HasNextPage())
}
return p.Err()
}
const opListTags = "ListTags"
// ListTagsRequest generates a "aws/request.Request" representing the
// client's request for the ListTags operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See ListTags for more information on using the ListTags
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the ListTagsRequest method.
// req, resp := client.ListTagsRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListTags
func (c *ElasticsearchService) ListTagsRequest(input *ListTagsInput) (req *request.Request, output *ListTagsOutput) {
op := &request.Operation{
Name: opListTags,
HTTPMethod: "GET",
HTTPPath: "/2015-01-01/tags/",
}
if input == nil {
input = &ListTagsInput{}
}
output = &ListTagsOutput{}
req = c.newRequest(op, input, output)
return
}
// ListTags API operation for Amazon Elasticsearch Service.
//
// Returns all tags for the given Elasticsearch domain.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation ListTags for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeResourceNotFoundException "ResourceNotFoundException"
// An exception for accessing or deleting a resource that does not exist. Gives
// http status code of 400.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListTags
func (c *ElasticsearchService) ListTags(input *ListTagsInput) (*ListTagsOutput, error) {
req, out := c.ListTagsRequest(input)
return out, req.Send()
}
// ListTagsWithContext is the same as ListTags with the addition of
// the ability to pass a context and additional request options.
//
// See ListTags for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) ListTagsWithContext(ctx aws.Context, input *ListTagsInput, opts ...request.Option) (*ListTagsOutput, error) {
req, out := c.ListTagsRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opRemoveTags = "RemoveTags"
// RemoveTagsRequest generates a "aws/request.Request" representing the
// client's request for the RemoveTags operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See RemoveTags for more information on using the RemoveTags
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the RemoveTagsRequest method.
// req, resp := client.RemoveTagsRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/RemoveTags
func (c *ElasticsearchService) RemoveTagsRequest(input *RemoveTagsInput) (req *request.Request, output *RemoveTagsOutput) {
op := &request.Operation{
Name: opRemoveTags,
HTTPMethod: "POST",
HTTPPath: "/2015-01-01/tags-removal",
}
if input == nil {
input = &RemoveTagsInput{}
}
output = &RemoveTagsOutput{}
req = c.newRequest(op, input, output)
req.Handlers.Unmarshal.Remove(restjson.UnmarshalHandler)
req.Handlers.Unmarshal.PushBackNamed(protocol.UnmarshalDiscardBodyHandler)
return
}
// RemoveTags API operation for Amazon Elasticsearch Service.
//
// Removes the specified set of tags from the specified Elasticsearch domain.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation RemoveTags for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/RemoveTags
func (c *ElasticsearchService) RemoveTags(input *RemoveTagsInput) (*RemoveTagsOutput, error) {
req, out := c.RemoveTagsRequest(input)
return out, req.Send()
}
// RemoveTagsWithContext is the same as RemoveTags with the addition of
// the ability to pass a context and additional request options.
//
// See RemoveTags for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) RemoveTagsWithContext(ctx aws.Context, input *RemoveTagsInput, opts ...request.Option) (*RemoveTagsOutput, error) {
req, out := c.RemoveTagsRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
const opUpdateElasticsearchDomainConfig = "UpdateElasticsearchDomainConfig"
// UpdateElasticsearchDomainConfigRequest generates a "aws/request.Request" representing the
// client's request for the UpdateElasticsearchDomainConfig operation. The "output" return
// value will be populated with the request's response once the request complets
// successfuly.
//
// Use "Send" method on the returned Request to send the API call to the service.
// the "output" return value is not valid until after Send returns without error.
//
// See UpdateElasticsearchDomainConfig for more information on using the UpdateElasticsearchDomainConfig
// API call, and error handling.
//
// This method is useful when you want to inject custom logic or configuration
// into the SDK's request lifecycle. Such as custom headers, or retry logic.
//
//
// // Example sending a request using the UpdateElasticsearchDomainConfigRequest method.
// req, resp := client.UpdateElasticsearchDomainConfigRequest(params)
//
// err := req.Send()
// if err == nil { // resp is now filled
// fmt.Println(resp)
// }
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/UpdateElasticsearchDomainConfig
func (c *ElasticsearchService) UpdateElasticsearchDomainConfigRequest(input *UpdateElasticsearchDomainConfigInput) (req *request.Request, output *UpdateElasticsearchDomainConfigOutput) {
op := &request.Operation{
Name: opUpdateElasticsearchDomainConfig,
HTTPMethod: "POST",
HTTPPath: "/2015-01-01/es/domain/{DomainName}/config",
}
if input == nil {
input = &UpdateElasticsearchDomainConfigInput{}
}
output = &UpdateElasticsearchDomainConfigOutput{}
req = c.newRequest(op, input, output)
return
}
// UpdateElasticsearchDomainConfig API operation for Amazon Elasticsearch Service.
//
// Modifies the cluster configuration of the specified Elasticsearch domain,
// setting as setting the instance type and the number of instances.
//
// Returns awserr.Error for service API and SDK errors. Use runtime type assertions
// with awserr.Error's Code and Message methods to get detailed information about
// the error.
//
// See the AWS API reference guide for Amazon Elasticsearch Service's
// API operation UpdateElasticsearchDomainConfig for usage and error information.
//
// Returned Error Codes:
// * ErrCodeBaseException "BaseException"
// An error occurred while processing the request.
//
// * ErrCodeInternalException "InternalException"
// The request processing has failed because of an unknown error, exception
// or failure (the failure is internal to the service) . Gives http status code
// of 500.
//
// * ErrCodeInvalidTypeException "InvalidTypeException"
// An exception for trying to create or access sub-resource that is either invalid
// or not supported. Gives http status code of 409.
//
// * ErrCodeLimitExceededException "LimitExceededException"
// An exception for trying to create more than allowed resources or sub-resources.
// Gives http status code of 409.
//
// * ErrCodeResourceNotFoundException "ResourceNotFoundException"
// An exception for accessing or deleting a resource that does not exist. Gives
// http status code of 400.
//
// * ErrCodeValidationException "ValidationException"
// An exception for missing / invalid input fields. Gives http status code of
// 400.
//
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/UpdateElasticsearchDomainConfig
func (c *ElasticsearchService) UpdateElasticsearchDomainConfig(input *UpdateElasticsearchDomainConfigInput) (*UpdateElasticsearchDomainConfigOutput, error) {
req, out := c.UpdateElasticsearchDomainConfigRequest(input)
return out, req.Send()
}
// UpdateElasticsearchDomainConfigWithContext is the same as UpdateElasticsearchDomainConfig with the addition of
// the ability to pass a context and additional request options.
//
// See UpdateElasticsearchDomainConfig for details on how to use this API operation.
//
// The context must be non-nil and will be used for request cancellation. If
// the context is nil a panic will occur. In the future the SDK may create
// sub-contexts for http.Requests. See https://golang.org/pkg/context/
// for more information on using Contexts.
func (c *ElasticsearchService) UpdateElasticsearchDomainConfigWithContext(ctx aws.Context, input *UpdateElasticsearchDomainConfigInput, opts ...request.Option) (*UpdateElasticsearchDomainConfigOutput, error) {
req, out := c.UpdateElasticsearchDomainConfigRequest(input)
req.SetContext(ctx)
req.ApplyOptions(opts...)
return out, req.Send()
}
// The configured access rules for the domain's document and search endpoints,
// and the current status of those rules.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/AccessPoliciesStatus
type AccessPoliciesStatus struct {
_ struct{} `type:"structure"`
// The access policy configured for the Elasticsearch domain. Access policies
// may be resource-based, IP-based, or IAM-based. See Configuring Access Policies
// (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-access-policies)for
// more information.
//
// Options is a required field
Options *string `type:"string" required:"true"`
// The status of the access policy for the Elasticsearch domain. See OptionStatus
// for the status information that's included.
//
// Status is a required field
Status *OptionStatus `type:"structure" required:"true"`
}
// String returns the string representation
func (s AccessPoliciesStatus) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s AccessPoliciesStatus) GoString() string {
return s.String()
}
// SetOptions sets the Options field's value.
func (s *AccessPoliciesStatus) SetOptions(v string) *AccessPoliciesStatus {
s.Options = &v
return s
}
// SetStatus sets the Status field's value.
func (s *AccessPoliciesStatus) SetStatus(v *OptionStatus) *AccessPoliciesStatus {
s.Status = v
return s
}
// Container for the parameters to the AddTags operation. Specify the tags that
// you want to attach to the Elasticsearch domain.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/AddTagsRequest
type AddTagsInput struct {
_ struct{} `type:"structure"`
// Specify the ARN for which you want to add the tags.
//
// ARN is a required field
ARN *string `type:"string" required:"true"`
// List of Tag that need to be added for the Elasticsearch domain.
//
// TagList is a required field
TagList []*Tag `type:"list" required:"true"`
}
// String returns the string representation
func (s AddTagsInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s AddTagsInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *AddTagsInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "AddTagsInput"}
if s.ARN == nil {
invalidParams.Add(request.NewErrParamRequired("ARN"))
}
if s.TagList == nil {
invalidParams.Add(request.NewErrParamRequired("TagList"))
}
if s.TagList != nil {
for i, v := range s.TagList {
if v == nil {
continue
}
if err := v.Validate(); err != nil {
invalidParams.AddNested(fmt.Sprintf("%s[%v]", "TagList", i), err.(request.ErrInvalidParams))
}
}
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetARN sets the ARN field's value.
func (s *AddTagsInput) SetARN(v string) *AddTagsInput {
s.ARN = &v
return s
}
// SetTagList sets the TagList field's value.
func (s *AddTagsInput) SetTagList(v []*Tag) *AddTagsInput {
s.TagList = v
return s
}
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/AddTagsOutput
type AddTagsOutput struct {
_ struct{} `type:"structure"`
}
// String returns the string representation
func (s AddTagsOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s AddTagsOutput) GoString() string {
return s.String()
}
// List of limits that are specific to a given InstanceType and for each of
// it's InstanceRole .
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/AdditionalLimit
type AdditionalLimit struct {
_ struct{} `type:"structure"`
// Name of Additional Limit is specific to a given InstanceType and for each
// of it's InstanceRole etc. Attributes and their details: MaximumNumberOfDataNodesSupported
// This attribute will be present in Master node only to specify how much data
// nodes upto which given ESPartitionInstanceTypecan support as master node. MaximumNumberOfDataNodesWithoutMasterNode
// This attribute will be present in Data node only to specify how much data
// nodes of given ESPartitionInstanceType
LimitName *string `type:"string"`
// Value for given AdditionalLimit$LimitName .
LimitValues []*string `type:"list"`
}
// String returns the string representation
func (s AdditionalLimit) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s AdditionalLimit) GoString() string {
return s.String()
}
// SetLimitName sets the LimitName field's value.
func (s *AdditionalLimit) SetLimitName(v string) *AdditionalLimit {
s.LimitName = &v
return s
}
// SetLimitValues sets the LimitValues field's value.
func (s *AdditionalLimit) SetLimitValues(v []*string) *AdditionalLimit {
s.LimitValues = v
return s
}
// Status of the advanced options for the specified Elasticsearch domain. Currently,
// the following advanced options are available:
//
// * Option to allow references to indices in an HTTP request body. Must
// be false when configuring access to individual sub-resources. By default,
// the value is true. See Configuration Advanced Options (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options)
// for more information.
// * Option to specify the percentage of heap space that is allocated to
// field data. By default, this setting is unbounded.
// For more information, see Configuring Advanced Options (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options).
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/AdvancedOptionsStatus
type AdvancedOptionsStatus struct {
_ struct{} `type:"structure"`
// Specifies the status of advanced options for the specified Elasticsearch
// domain.
//
// Options is a required field
Options map[string]*string `type:"map" required:"true"`
// Specifies the status of OptionStatus for advanced options for the specified
// Elasticsearch domain.
//
// Status is a required field
Status *OptionStatus `type:"structure" required:"true"`
}
// String returns the string representation
func (s AdvancedOptionsStatus) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s AdvancedOptionsStatus) GoString() string {
return s.String()
}
// SetOptions sets the Options field's value.
func (s *AdvancedOptionsStatus) SetOptions(v map[string]*string) *AdvancedOptionsStatus {
s.Options = v
return s
}
// SetStatus sets the Status field's value.
func (s *AdvancedOptionsStatus) SetStatus(v *OptionStatus) *AdvancedOptionsStatus {
s.Status = v
return s
}
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/CreateElasticsearchDomainRequest
type CreateElasticsearchDomainInput struct {
_ struct{} `type:"structure"`
// IAM access policy as a JSON-formatted string.
AccessPolicies *string `type:"string"`
// Option to allow references to indices in an HTTP request body. Must be false
// when configuring access to individual sub-resources. By default, the value
// is true. See Configuration Advanced Options (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options)
// for more information.
AdvancedOptions map[string]*string `type:"map"`
// The name of the Elasticsearch domain that you are creating. Domain names
// are unique across the domains owned by an account within an AWS region. Domain
// names must start with a letter or number and can contain the following characters:
// a-z (lowercase), 0-9, and - (hyphen).
//
// DomainName is a required field
DomainName *string `min:"3" type:"string" required:"true"`
// Options to enable, disable and specify the type and size of EBS storage volumes.
EBSOptions *EBSOptions `type:"structure"`
// Configuration options for an Elasticsearch domain. Specifies the instance
// type and number of instances in the domain cluster.
ElasticsearchClusterConfig *ElasticsearchClusterConfig `type:"structure"`
// String of format X.Y to specify version for the Elasticsearch domain eg.
// "1.5" or "2.3". For more information, see Creating Elasticsearch Domains
// (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomains)
// in the Amazon Elasticsearch Service Developer Guide.
ElasticsearchVersion *string `type:"string"`
// Option to set time, in UTC format, of the daily automated snapshot. Default
// value is 0 hours.
SnapshotOptions *SnapshotOptions `type:"structure"`
}
// String returns the string representation
func (s CreateElasticsearchDomainInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s CreateElasticsearchDomainInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *CreateElasticsearchDomainInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "CreateElasticsearchDomainInput"}
if s.DomainName == nil {
invalidParams.Add(request.NewErrParamRequired("DomainName"))
}
if s.DomainName != nil && len(*s.DomainName) < 3 {
invalidParams.Add(request.NewErrParamMinLen("DomainName", 3))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetAccessPolicies sets the AccessPolicies field's value.
func (s *CreateElasticsearchDomainInput) SetAccessPolicies(v string) *CreateElasticsearchDomainInput {
s.AccessPolicies = &v
return s
}
// SetAdvancedOptions sets the AdvancedOptions field's value.
func (s *CreateElasticsearchDomainInput) SetAdvancedOptions(v map[string]*string) *CreateElasticsearchDomainInput {
s.AdvancedOptions = v
return s
}
// SetDomainName sets the DomainName field's value.
func (s *CreateElasticsearchDomainInput) SetDomainName(v string) *CreateElasticsearchDomainInput {
s.DomainName = &v
return s
}
// SetEBSOptions sets the EBSOptions field's value.
func (s *CreateElasticsearchDomainInput) SetEBSOptions(v *EBSOptions) *CreateElasticsearchDomainInput {
s.EBSOptions = v
return s
}
// SetElasticsearchClusterConfig sets the ElasticsearchClusterConfig field's value.
func (s *CreateElasticsearchDomainInput) SetElasticsearchClusterConfig(v *ElasticsearchClusterConfig) *CreateElasticsearchDomainInput {
s.ElasticsearchClusterConfig = v
return s
}
// SetElasticsearchVersion sets the ElasticsearchVersion field's value.
func (s *CreateElasticsearchDomainInput) SetElasticsearchVersion(v string) *CreateElasticsearchDomainInput {
s.ElasticsearchVersion = &v
return s
}
// SetSnapshotOptions sets the SnapshotOptions field's value.
func (s *CreateElasticsearchDomainInput) SetSnapshotOptions(v *SnapshotOptions) *CreateElasticsearchDomainInput {
s.SnapshotOptions = v
return s
}
// The result of a CreateElasticsearchDomain operation. Contains the status
// of the newly created Elasticsearch domain.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/CreateElasticsearchDomainResponse
type CreateElasticsearchDomainOutput struct {
_ struct{} `type:"structure"`
// The status of the newly created Elasticsearch domain.
DomainStatus *ElasticsearchDomainStatus `type:"structure"`
}
// String returns the string representation
func (s CreateElasticsearchDomainOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s CreateElasticsearchDomainOutput) GoString() string {
return s.String()
}
// SetDomainStatus sets the DomainStatus field's value.
func (s *CreateElasticsearchDomainOutput) SetDomainStatus(v *ElasticsearchDomainStatus) *CreateElasticsearchDomainOutput {
s.DomainStatus = v
return s
}
// Container for the parameters to the DeleteElasticsearchDomain operation.
// Specifies the name of the Elasticsearch domain that you want to delete.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DeleteElasticsearchDomainRequest
type DeleteElasticsearchDomainInput struct {
_ struct{} `type:"structure"`
// The name of the Elasticsearch domain that you want to permanently delete.
//
// DomainName is a required field
DomainName *string `location:"uri" locationName:"DomainName" min:"3" type:"string" required:"true"`
}
// String returns the string representation
func (s DeleteElasticsearchDomainInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DeleteElasticsearchDomainInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *DeleteElasticsearchDomainInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "DeleteElasticsearchDomainInput"}
if s.DomainName == nil {
invalidParams.Add(request.NewErrParamRequired("DomainName"))
}
if s.DomainName != nil && len(*s.DomainName) < 3 {
invalidParams.Add(request.NewErrParamMinLen("DomainName", 3))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetDomainName sets the DomainName field's value.
func (s *DeleteElasticsearchDomainInput) SetDomainName(v string) *DeleteElasticsearchDomainInput {
s.DomainName = &v
return s
}
// The result of a DeleteElasticsearchDomain request. Contains the status of
// the pending deletion, or no status if the domain and all of its resources
// have been deleted.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DeleteElasticsearchDomainResponse
type DeleteElasticsearchDomainOutput struct {
_ struct{} `type:"structure"`
// The status of the Elasticsearch domain being deleted.
DomainStatus *ElasticsearchDomainStatus `type:"structure"`
}
// String returns the string representation
func (s DeleteElasticsearchDomainOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DeleteElasticsearchDomainOutput) GoString() string {
return s.String()
}
// SetDomainStatus sets the DomainStatus field's value.
func (s *DeleteElasticsearchDomainOutput) SetDomainStatus(v *ElasticsearchDomainStatus) *DeleteElasticsearchDomainOutput {
s.DomainStatus = v
return s
}
// Container for the parameters to the DescribeElasticsearchDomainConfig operation.
// Specifies the domain name for which you want configuration information.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomainConfigRequest
type DescribeElasticsearchDomainConfigInput struct {
_ struct{} `type:"structure"`
// The Elasticsearch domain that you want to get information about.
//
// DomainName is a required field
DomainName *string `location:"uri" locationName:"DomainName" min:"3" type:"string" required:"true"`
}
// String returns the string representation
func (s DescribeElasticsearchDomainConfigInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DescribeElasticsearchDomainConfigInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *DescribeElasticsearchDomainConfigInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "DescribeElasticsearchDomainConfigInput"}
if s.DomainName == nil {
invalidParams.Add(request.NewErrParamRequired("DomainName"))
}
if s.DomainName != nil && len(*s.DomainName) < 3 {
invalidParams.Add(request.NewErrParamMinLen("DomainName", 3))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetDomainName sets the DomainName field's value.
func (s *DescribeElasticsearchDomainConfigInput) SetDomainName(v string) *DescribeElasticsearchDomainConfigInput {
s.DomainName = &v
return s
}
// The result of a DescribeElasticsearchDomainConfig request. Contains the configuration
// information of the requested domain.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomainConfigResponse
type DescribeElasticsearchDomainConfigOutput struct {
_ struct{} `type:"structure"`
// The configuration information of the domain requested in the DescribeElasticsearchDomainConfig
// request.
//
// DomainConfig is a required field
DomainConfig *ElasticsearchDomainConfig `type:"structure" required:"true"`
}
// String returns the string representation
func (s DescribeElasticsearchDomainConfigOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DescribeElasticsearchDomainConfigOutput) GoString() string {
return s.String()
}
// SetDomainConfig sets the DomainConfig field's value.
func (s *DescribeElasticsearchDomainConfigOutput) SetDomainConfig(v *ElasticsearchDomainConfig) *DescribeElasticsearchDomainConfigOutput {
s.DomainConfig = v
return s
}
// Container for the parameters to the DescribeElasticsearchDomain operation.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomainRequest
type DescribeElasticsearchDomainInput struct {
_ struct{} `type:"structure"`
// The name of the Elasticsearch domain for which you want information.
//
// DomainName is a required field
DomainName *string `location:"uri" locationName:"DomainName" min:"3" type:"string" required:"true"`
}
// String returns the string representation
func (s DescribeElasticsearchDomainInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DescribeElasticsearchDomainInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *DescribeElasticsearchDomainInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "DescribeElasticsearchDomainInput"}
if s.DomainName == nil {
invalidParams.Add(request.NewErrParamRequired("DomainName"))
}
if s.DomainName != nil && len(*s.DomainName) < 3 {
invalidParams.Add(request.NewErrParamMinLen("DomainName", 3))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetDomainName sets the DomainName field's value.
func (s *DescribeElasticsearchDomainInput) SetDomainName(v string) *DescribeElasticsearchDomainInput {
s.DomainName = &v
return s
}
// The result of a DescribeElasticsearchDomain request. Contains the status
// of the domain specified in the request.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomainResponse
type DescribeElasticsearchDomainOutput struct {
_ struct{} `type:"structure"`
// The current status of the Elasticsearch domain.
//
// DomainStatus is a required field
DomainStatus *ElasticsearchDomainStatus `type:"structure" required:"true"`
}
// String returns the string representation
func (s DescribeElasticsearchDomainOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DescribeElasticsearchDomainOutput) GoString() string {
return s.String()
}
// SetDomainStatus sets the DomainStatus field's value.
func (s *DescribeElasticsearchDomainOutput) SetDomainStatus(v *ElasticsearchDomainStatus) *DescribeElasticsearchDomainOutput {
s.DomainStatus = v
return s
}
// Container for the parameters to the DescribeElasticsearchDomains operation.
// By default, the API returns the status of all Elasticsearch domains.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomainsRequest
type DescribeElasticsearchDomainsInput struct {
_ struct{} `type:"structure"`
// The Elasticsearch domains for which you want information.
//
// DomainNames is a required field
DomainNames []*string `type:"list" required:"true"`
}
// String returns the string representation
func (s DescribeElasticsearchDomainsInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DescribeElasticsearchDomainsInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *DescribeElasticsearchDomainsInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "DescribeElasticsearchDomainsInput"}
if s.DomainNames == nil {
invalidParams.Add(request.NewErrParamRequired("DomainNames"))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetDomainNames sets the DomainNames field's value.
func (s *DescribeElasticsearchDomainsInput) SetDomainNames(v []*string) *DescribeElasticsearchDomainsInput {
s.DomainNames = v
return s
}
// The result of a DescribeElasticsearchDomains request. Contains the status
// of the specified domains or all domains owned by the account.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchDomainsResponse
type DescribeElasticsearchDomainsOutput struct {
_ struct{} `type:"structure"`
// The status of the domains requested in the DescribeElasticsearchDomains request.
//
// DomainStatusList is a required field
DomainStatusList []*ElasticsearchDomainStatus `type:"list" required:"true"`
}
// String returns the string representation
func (s DescribeElasticsearchDomainsOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DescribeElasticsearchDomainsOutput) GoString() string {
return s.String()
}
// SetDomainStatusList sets the DomainStatusList field's value.
func (s *DescribeElasticsearchDomainsOutput) SetDomainStatusList(v []*ElasticsearchDomainStatus) *DescribeElasticsearchDomainsOutput {
s.DomainStatusList = v
return s
}
// Container for the parameters to DescribeElasticsearchInstanceTypeLimits operation.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchInstanceTypeLimitsRequest
type DescribeElasticsearchInstanceTypeLimitsInput struct {
_ struct{} `type:"structure"`
// DomainName represents the name of the Domain that we are trying to modify.
// This should be present only if we are querying for Elasticsearch Limits for
// existing domain.
DomainName *string `location:"querystring" locationName:"domainName" min:"3" type:"string"`
// Version of Elasticsearch for which Limits are needed.
//
// ElasticsearchVersion is a required field
ElasticsearchVersion *string `location:"uri" locationName:"ElasticsearchVersion" type:"string" required:"true"`
// The instance type for an Elasticsearch cluster for which Elasticsearch Limits
// are needed.
//
// InstanceType is a required field
InstanceType *string `location:"uri" locationName:"InstanceType" type:"string" required:"true" enum:"ESPartitionInstanceType"`
}
// String returns the string representation
func (s DescribeElasticsearchInstanceTypeLimitsInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DescribeElasticsearchInstanceTypeLimitsInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *DescribeElasticsearchInstanceTypeLimitsInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "DescribeElasticsearchInstanceTypeLimitsInput"}
if s.DomainName != nil && len(*s.DomainName) < 3 {
invalidParams.Add(request.NewErrParamMinLen("DomainName", 3))
}
if s.ElasticsearchVersion == nil {
invalidParams.Add(request.NewErrParamRequired("ElasticsearchVersion"))
}
if s.InstanceType == nil {
invalidParams.Add(request.NewErrParamRequired("InstanceType"))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetDomainName sets the DomainName field's value.
func (s *DescribeElasticsearchInstanceTypeLimitsInput) SetDomainName(v string) *DescribeElasticsearchInstanceTypeLimitsInput {
s.DomainName = &v
return s
}
// SetElasticsearchVersion sets the ElasticsearchVersion field's value.
func (s *DescribeElasticsearchInstanceTypeLimitsInput) SetElasticsearchVersion(v string) *DescribeElasticsearchInstanceTypeLimitsInput {
s.ElasticsearchVersion = &v
return s
}
// SetInstanceType sets the InstanceType field's value.
func (s *DescribeElasticsearchInstanceTypeLimitsInput) SetInstanceType(v string) *DescribeElasticsearchInstanceTypeLimitsInput {
s.InstanceType = &v
return s
}
// Container for the parameters received from DescribeElasticsearchInstanceTypeLimits
// operation.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DescribeElasticsearchInstanceTypeLimitsResponse
type DescribeElasticsearchInstanceTypeLimitsOutput struct {
_ struct{} `type:"structure"`
// Map of Role of the Instance and Limits that are applicable. Role performed
// by given Instance in Elasticsearch can be one of the following: Data: If
// the given InstanceType is used as Data node
// Master: If the given InstanceType is used as Master node
LimitsByRole map[string]*Limits `type:"map"`
}
// String returns the string representation
func (s DescribeElasticsearchInstanceTypeLimitsOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DescribeElasticsearchInstanceTypeLimitsOutput) GoString() string {
return s.String()
}
// SetLimitsByRole sets the LimitsByRole field's value.
func (s *DescribeElasticsearchInstanceTypeLimitsOutput) SetLimitsByRole(v map[string]*Limits) *DescribeElasticsearchInstanceTypeLimitsOutput {
s.LimitsByRole = v
return s
}
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/DomainInfo
type DomainInfo struct {
_ struct{} `type:"structure"`
// Specifies the DomainName.
DomainName *string `min:"3" type:"string"`
}
// String returns the string representation
func (s DomainInfo) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s DomainInfo) GoString() string {
return s.String()
}
// SetDomainName sets the DomainName field's value.
func (s *DomainInfo) SetDomainName(v string) *DomainInfo {
s.DomainName = &v
return s
}
// Options to enable, disable, and specify the properties of EBS storage volumes.
// For more information, see Configuring EBS-based Storage (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-ebs).
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/EBSOptions
type EBSOptions struct {
_ struct{} `type:"structure"`
// Specifies whether EBS-based storage is enabled.
EBSEnabled *bool `type:"boolean"`
// Specifies the IOPD for a Provisioned IOPS EBS volume (SSD).
Iops *int64 `type:"integer"`
// Integer to specify the size of an EBS volume.
VolumeSize *int64 `type:"integer"`
// Specifies the volume type for EBS-based storage.
VolumeType *string `type:"string" enum:"VolumeType"`
}
// String returns the string representation
func (s EBSOptions) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s EBSOptions) GoString() string {
return s.String()
}
// SetEBSEnabled sets the EBSEnabled field's value.
func (s *EBSOptions) SetEBSEnabled(v bool) *EBSOptions {
s.EBSEnabled = &v
return s
}
// SetIops sets the Iops field's value.
func (s *EBSOptions) SetIops(v int64) *EBSOptions {
s.Iops = &v
return s
}
// SetVolumeSize sets the VolumeSize field's value.
func (s *EBSOptions) SetVolumeSize(v int64) *EBSOptions {
s.VolumeSize = &v
return s
}
// SetVolumeType sets the VolumeType field's value.
func (s *EBSOptions) SetVolumeType(v string) *EBSOptions {
s.VolumeType = &v
return s
}
// Status of the EBS options for the specified Elasticsearch domain.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/EBSOptionsStatus
type EBSOptionsStatus struct {
_ struct{} `type:"structure"`
// Specifies the EBS options for the specified Elasticsearch domain.
//
// Options is a required field
Options *EBSOptions `type:"structure" required:"true"`
// Specifies the status of the EBS options for the specified Elasticsearch domain.
//
// Status is a required field
Status *OptionStatus `type:"structure" required:"true"`
}
// String returns the string representation
func (s EBSOptionsStatus) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s EBSOptionsStatus) GoString() string {
return s.String()
}
// SetOptions sets the Options field's value.
func (s *EBSOptionsStatus) SetOptions(v *EBSOptions) *EBSOptionsStatus {
s.Options = v
return s
}
// SetStatus sets the Status field's value.
func (s *EBSOptionsStatus) SetStatus(v *OptionStatus) *EBSOptionsStatus {
s.Status = v
return s
}
// Specifies the configuration for the domain cluster, such as the type and
// number of instances.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ElasticsearchClusterConfig
type ElasticsearchClusterConfig struct {
_ struct{} `type:"structure"`
// Total number of dedicated master nodes, active and on standby, for the cluster.
DedicatedMasterCount *int64 `type:"integer"`
// A boolean value to indicate whether a dedicated master node is enabled. See
// About Dedicated Master Nodes (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-dedicatedmasternodes)
// for more information.
DedicatedMasterEnabled *bool `type:"boolean"`
// The instance type for a dedicated master node.
DedicatedMasterType *string `type:"string" enum:"ESPartitionInstanceType"`
// The number of instances in the specified domain cluster.
InstanceCount *int64 `type:"integer"`
// The instance type for an Elasticsearch cluster.
InstanceType *string `type:"string" enum:"ESPartitionInstanceType"`
// A boolean value to indicate whether zone awareness is enabled. See About
// Zone Awareness (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-managedomains.html#es-managedomains-zoneawareness)
// for more information.
ZoneAwarenessEnabled *bool `type:"boolean"`
}
// String returns the string representation
func (s ElasticsearchClusterConfig) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ElasticsearchClusterConfig) GoString() string {
return s.String()
}
// SetDedicatedMasterCount sets the DedicatedMasterCount field's value.
func (s *ElasticsearchClusterConfig) SetDedicatedMasterCount(v int64) *ElasticsearchClusterConfig {
s.DedicatedMasterCount = &v
return s
}
// SetDedicatedMasterEnabled sets the DedicatedMasterEnabled field's value.
func (s *ElasticsearchClusterConfig) SetDedicatedMasterEnabled(v bool) *ElasticsearchClusterConfig {
s.DedicatedMasterEnabled = &v
return s
}
// SetDedicatedMasterType sets the DedicatedMasterType field's value.
func (s *ElasticsearchClusterConfig) SetDedicatedMasterType(v string) *ElasticsearchClusterConfig {
s.DedicatedMasterType = &v
return s
}
// SetInstanceCount sets the InstanceCount field's value.
func (s *ElasticsearchClusterConfig) SetInstanceCount(v int64) *ElasticsearchClusterConfig {
s.InstanceCount = &v
return s
}
// SetInstanceType sets the InstanceType field's value.
func (s *ElasticsearchClusterConfig) SetInstanceType(v string) *ElasticsearchClusterConfig {
s.InstanceType = &v
return s
}
// SetZoneAwarenessEnabled sets the ZoneAwarenessEnabled field's value.
func (s *ElasticsearchClusterConfig) SetZoneAwarenessEnabled(v bool) *ElasticsearchClusterConfig {
s.ZoneAwarenessEnabled = &v
return s
}
// Specifies the configuration status for the specified Elasticsearch domain.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ElasticsearchClusterConfigStatus
type ElasticsearchClusterConfigStatus struct {
_ struct{} `type:"structure"`
// Specifies the cluster configuration for the specified Elasticsearch domain.
//
// Options is a required field
Options *ElasticsearchClusterConfig `type:"structure" required:"true"`
// Specifies the status of the configuration for the specified Elasticsearch
// domain.
//
// Status is a required field
Status *OptionStatus `type:"structure" required:"true"`
}
// String returns the string representation
func (s ElasticsearchClusterConfigStatus) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ElasticsearchClusterConfigStatus) GoString() string {
return s.String()
}
// SetOptions sets the Options field's value.
func (s *ElasticsearchClusterConfigStatus) SetOptions(v *ElasticsearchClusterConfig) *ElasticsearchClusterConfigStatus {
s.Options = v
return s
}
// SetStatus sets the Status field's value.
func (s *ElasticsearchClusterConfigStatus) SetStatus(v *OptionStatus) *ElasticsearchClusterConfigStatus {
s.Status = v
return s
}
// The configuration of an Elasticsearch domain.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ElasticsearchDomainConfig
type ElasticsearchDomainConfig struct {
_ struct{} `type:"structure"`
// IAM access policy as a JSON-formatted string.
AccessPolicies *AccessPoliciesStatus `type:"structure"`
// Specifies the AdvancedOptions for the domain. See Configuring Advanced Options
// (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options)
// for more information.
AdvancedOptions *AdvancedOptionsStatus `type:"structure"`
// Specifies the EBSOptions for the Elasticsearch domain.
EBSOptions *EBSOptionsStatus `type:"structure"`
// Specifies the ElasticsearchClusterConfig for the Elasticsearch domain.
ElasticsearchClusterConfig *ElasticsearchClusterConfigStatus `type:"structure"`
// String of format X.Y to specify version for the Elasticsearch domain.
ElasticsearchVersion *ElasticsearchVersionStatus `type:"structure"`
// Specifies the SnapshotOptions for the Elasticsearch domain.
SnapshotOptions *SnapshotOptionsStatus `type:"structure"`
}
// String returns the string representation
func (s ElasticsearchDomainConfig) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ElasticsearchDomainConfig) GoString() string {
return s.String()
}
// SetAccessPolicies sets the AccessPolicies field's value.
func (s *ElasticsearchDomainConfig) SetAccessPolicies(v *AccessPoliciesStatus) *ElasticsearchDomainConfig {
s.AccessPolicies = v
return s
}
// SetAdvancedOptions sets the AdvancedOptions field's value.
func (s *ElasticsearchDomainConfig) SetAdvancedOptions(v *AdvancedOptionsStatus) *ElasticsearchDomainConfig {
s.AdvancedOptions = v
return s
}
// SetEBSOptions sets the EBSOptions field's value.
func (s *ElasticsearchDomainConfig) SetEBSOptions(v *EBSOptionsStatus) *ElasticsearchDomainConfig {
s.EBSOptions = v
return s
}
// SetElasticsearchClusterConfig sets the ElasticsearchClusterConfig field's value.
func (s *ElasticsearchDomainConfig) SetElasticsearchClusterConfig(v *ElasticsearchClusterConfigStatus) *ElasticsearchDomainConfig {
s.ElasticsearchClusterConfig = v
return s
}
// SetElasticsearchVersion sets the ElasticsearchVersion field's value.
func (s *ElasticsearchDomainConfig) SetElasticsearchVersion(v *ElasticsearchVersionStatus) *ElasticsearchDomainConfig {
s.ElasticsearchVersion = v
return s
}
// SetSnapshotOptions sets the SnapshotOptions field's value.
func (s *ElasticsearchDomainConfig) SetSnapshotOptions(v *SnapshotOptionsStatus) *ElasticsearchDomainConfig {
s.SnapshotOptions = v
return s
}
// The current status of an Elasticsearch domain.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ElasticsearchDomainStatus
type ElasticsearchDomainStatus struct {
_ struct{} `type:"structure"`
// The Amazon resource name (ARN) of an Elasticsearch domain. See Identifiers
// for IAM Entities (http://docs.aws.amazon.com/IAM/latest/UserGuide/index.html?Using_Identifiers.html)
// in Using AWS Identity and Access Management for more information.
//
// ARN is a required field
ARN *string `type:"string" required:"true"`
// IAM access policy as a JSON-formatted string.
AccessPolicies *string `type:"string"`
// Specifies the status of the AdvancedOptions
AdvancedOptions map[string]*string `type:"map"`
// The domain creation status. True if the creation of an Elasticsearch domain
// is complete. False if domain creation is still in progress.
Created *bool `type:"boolean"`
// The domain deletion status. True if a delete request has been received for
// the domain but resource cleanup is still in progress. False if the domain
// has not been deleted. Once domain deletion is complete, the status of the
// domain is no longer returned.
Deleted *bool `type:"boolean"`
// The unique identifier for the specified Elasticsearch domain.
//
// DomainId is a required field
DomainId *string `min:"1" type:"string" required:"true"`
// The name of an Elasticsearch domain. Domain names are unique across the domains
// owned by an account within an AWS region. Domain names start with a letter
// or number and can contain the following characters: a-z (lowercase), 0-9,
// and - (hyphen).
//
// DomainName is a required field
DomainName *string `min:"3" type:"string" required:"true"`
// The EBSOptions for the specified domain. See Configuring EBS-based Storage
// (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-ebs)
// for more information.
EBSOptions *EBSOptions `type:"structure"`
// The type and number of instances in the domain cluster.
//
// ElasticsearchClusterConfig is a required field
ElasticsearchClusterConfig *ElasticsearchClusterConfig `type:"structure" required:"true"`
ElasticsearchVersion *string `type:"string"`
// The Elasticsearch domain endpoint that you use to submit index and search
// requests.
Endpoint *string `type:"string"`
// The status of the Elasticsearch domain configuration. True if Amazon Elasticsearch
// Service is processing configuration changes. False if the configuration is
// active.
Processing *bool `type:"boolean"`
// Specifies the status of the SnapshotOptions
SnapshotOptions *SnapshotOptions `type:"structure"`
}
// String returns the string representation
func (s ElasticsearchDomainStatus) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ElasticsearchDomainStatus) GoString() string {
return s.String()
}
// SetARN sets the ARN field's value.
func (s *ElasticsearchDomainStatus) SetARN(v string) *ElasticsearchDomainStatus {
s.ARN = &v
return s
}
// SetAccessPolicies sets the AccessPolicies field's value.
func (s *ElasticsearchDomainStatus) SetAccessPolicies(v string) *ElasticsearchDomainStatus {
s.AccessPolicies = &v
return s
}
// SetAdvancedOptions sets the AdvancedOptions field's value.
func (s *ElasticsearchDomainStatus) SetAdvancedOptions(v map[string]*string) *ElasticsearchDomainStatus {
s.AdvancedOptions = v
return s
}
// SetCreated sets the Created field's value.
func (s *ElasticsearchDomainStatus) SetCreated(v bool) *ElasticsearchDomainStatus {
s.Created = &v
return s
}
// SetDeleted sets the Deleted field's value.
func (s *ElasticsearchDomainStatus) SetDeleted(v bool) *ElasticsearchDomainStatus {
s.Deleted = &v
return s
}
// SetDomainId sets the DomainId field's value.
func (s *ElasticsearchDomainStatus) SetDomainId(v string) *ElasticsearchDomainStatus {
s.DomainId = &v
return s
}
// SetDomainName sets the DomainName field's value.
func (s *ElasticsearchDomainStatus) SetDomainName(v string) *ElasticsearchDomainStatus {
s.DomainName = &v
return s
}
// SetEBSOptions sets the EBSOptions field's value.
func (s *ElasticsearchDomainStatus) SetEBSOptions(v *EBSOptions) *ElasticsearchDomainStatus {
s.EBSOptions = v
return s
}
// SetElasticsearchClusterConfig sets the ElasticsearchClusterConfig field's value.
func (s *ElasticsearchDomainStatus) SetElasticsearchClusterConfig(v *ElasticsearchClusterConfig) *ElasticsearchDomainStatus {
s.ElasticsearchClusterConfig = v
return s
}
// SetElasticsearchVersion sets the ElasticsearchVersion field's value.
func (s *ElasticsearchDomainStatus) SetElasticsearchVersion(v string) *ElasticsearchDomainStatus {
s.ElasticsearchVersion = &v
return s
}
// SetEndpoint sets the Endpoint field's value.
func (s *ElasticsearchDomainStatus) SetEndpoint(v string) *ElasticsearchDomainStatus {
s.Endpoint = &v
return s
}
// SetProcessing sets the Processing field's value.
func (s *ElasticsearchDomainStatus) SetProcessing(v bool) *ElasticsearchDomainStatus {
s.Processing = &v
return s
}
// SetSnapshotOptions sets the SnapshotOptions field's value.
func (s *ElasticsearchDomainStatus) SetSnapshotOptions(v *SnapshotOptions) *ElasticsearchDomainStatus {
s.SnapshotOptions = v
return s
}
// Status of the Elasticsearch version options for the specified Elasticsearch
// domain.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ElasticsearchVersionStatus
type ElasticsearchVersionStatus struct {
_ struct{} `type:"structure"`
// Specifies the Elasticsearch version for the specified Elasticsearch domain.
//
// Options is a required field
Options *string `type:"string" required:"true"`
// Specifies the status of the Elasticsearch version options for the specified
// Elasticsearch domain.
//
// Status is a required field
Status *OptionStatus `type:"structure" required:"true"`
}
// String returns the string representation
func (s ElasticsearchVersionStatus) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ElasticsearchVersionStatus) GoString() string {
return s.String()
}
// SetOptions sets the Options field's value.
func (s *ElasticsearchVersionStatus) SetOptions(v string) *ElasticsearchVersionStatus {
s.Options = &v
return s
}
// SetStatus sets the Status field's value.
func (s *ElasticsearchVersionStatus) SetStatus(v *OptionStatus) *ElasticsearchVersionStatus {
s.Status = v
return s
}
// InstanceCountLimits represents the limits on number of instances that be
// created in Amazon Elasticsearch for given InstanceType.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/InstanceCountLimits
type InstanceCountLimits struct {
_ struct{} `type:"structure"`
// Maximum number of Instances that can be instantiated for given InstanceType.
MaximumInstanceCount *int64 `type:"integer"`
// Minimum number of Instances that can be instantiated for given InstanceType.
MinimumInstanceCount *int64 `type:"integer"`
}
// String returns the string representation
func (s InstanceCountLimits) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s InstanceCountLimits) GoString() string {
return s.String()
}
// SetMaximumInstanceCount sets the MaximumInstanceCount field's value.
func (s *InstanceCountLimits) SetMaximumInstanceCount(v int64) *InstanceCountLimits {
s.MaximumInstanceCount = &v
return s
}
// SetMinimumInstanceCount sets the MinimumInstanceCount field's value.
func (s *InstanceCountLimits) SetMinimumInstanceCount(v int64) *InstanceCountLimits {
s.MinimumInstanceCount = &v
return s
}
// InstanceLimits represents the list of instance related attributes that are
// available for given InstanceType.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/InstanceLimits
type InstanceLimits struct {
_ struct{} `type:"structure"`
// InstanceCountLimits represents the limits on number of instances that be
// created in Amazon Elasticsearch for given InstanceType.
InstanceCountLimits *InstanceCountLimits `type:"structure"`
}
// String returns the string representation
func (s InstanceLimits) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s InstanceLimits) GoString() string {
return s.String()
}
// SetInstanceCountLimits sets the InstanceCountLimits field's value.
func (s *InstanceLimits) SetInstanceCountLimits(v *InstanceCountLimits) *InstanceLimits {
s.InstanceCountLimits = v
return s
}
// Limits for given InstanceType and for each of it's role. Limits contains following StorageTypes, InstanceLimitsand AdditionalLimits
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/Limits
type Limits struct {
_ struct{} `type:"structure"`
// List of additional limits that are specific to a given InstanceType and for
// each of it's InstanceRole .
AdditionalLimits []*AdditionalLimit `type:"list"`
// InstanceLimits represents the list of instance related attributes that are
// available for given InstanceType.
InstanceLimits *InstanceLimits `type:"structure"`
// StorageType represents the list of storage related types and attributes that
// are available for given InstanceType.
StorageTypes []*StorageType `type:"list"`
}
// String returns the string representation
func (s Limits) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s Limits) GoString() string {
return s.String()
}
// SetAdditionalLimits sets the AdditionalLimits field's value.
func (s *Limits) SetAdditionalLimits(v []*AdditionalLimit) *Limits {
s.AdditionalLimits = v
return s
}
// SetInstanceLimits sets the InstanceLimits field's value.
func (s *Limits) SetInstanceLimits(v *InstanceLimits) *Limits {
s.InstanceLimits = v
return s
}
// SetStorageTypes sets the StorageTypes field's value.
func (s *Limits) SetStorageTypes(v []*StorageType) *Limits {
s.StorageTypes = v
return s
}
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListDomainNamesInput
type ListDomainNamesInput struct {
_ struct{} `type:"structure"`
}
// String returns the string representation
func (s ListDomainNamesInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ListDomainNamesInput) GoString() string {
return s.String()
}
// The result of a ListDomainNames operation. Contains the names of all Elasticsearch
// domains owned by this account.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListDomainNamesResponse
type ListDomainNamesOutput struct {
_ struct{} `type:"structure"`
// List of Elasticsearch domain names.
DomainNames []*DomainInfo `type:"list"`
}
// String returns the string representation
func (s ListDomainNamesOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ListDomainNamesOutput) GoString() string {
return s.String()
}
// SetDomainNames sets the DomainNames field's value.
func (s *ListDomainNamesOutput) SetDomainNames(v []*DomainInfo) *ListDomainNamesOutput {
s.DomainNames = v
return s
}
// Container for the parameters to the ListElasticsearchInstanceTypes operation.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListElasticsearchInstanceTypesRequest
type ListElasticsearchInstanceTypesInput struct {
_ struct{} `type:"structure"`
// DomainName represents the name of the Domain that we are trying to modify.
// This should be present only if we are querying for list of available Elasticsearch
// instance types when modifying existing domain.
DomainName *string `location:"querystring" locationName:"domainName" min:"3" type:"string"`
// Version of Elasticsearch for which list of supported elasticsearch instance
// types are needed.
//
// ElasticsearchVersion is a required field
ElasticsearchVersion *string `location:"uri" locationName:"ElasticsearchVersion" type:"string" required:"true"`
// Set this value to limit the number of results returned. Value provided must
// be greater than 30 else it wont be honored.
MaxResults *int64 `location:"querystring" locationName:"maxResults" type:"integer"`
// NextToken should be sent in case if earlier API call produced result containing
// NextToken. It is used for pagination.
NextToken *string `location:"querystring" locationName:"nextToken" type:"string"`
}
// String returns the string representation
func (s ListElasticsearchInstanceTypesInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ListElasticsearchInstanceTypesInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *ListElasticsearchInstanceTypesInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "ListElasticsearchInstanceTypesInput"}
if s.DomainName != nil && len(*s.DomainName) < 3 {
invalidParams.Add(request.NewErrParamMinLen("DomainName", 3))
}
if s.ElasticsearchVersion == nil {
invalidParams.Add(request.NewErrParamRequired("ElasticsearchVersion"))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetDomainName sets the DomainName field's value.
func (s *ListElasticsearchInstanceTypesInput) SetDomainName(v string) *ListElasticsearchInstanceTypesInput {
s.DomainName = &v
return s
}
// SetElasticsearchVersion sets the ElasticsearchVersion field's value.
func (s *ListElasticsearchInstanceTypesInput) SetElasticsearchVersion(v string) *ListElasticsearchInstanceTypesInput {
s.ElasticsearchVersion = &v
return s
}
// SetMaxResults sets the MaxResults field's value.
func (s *ListElasticsearchInstanceTypesInput) SetMaxResults(v int64) *ListElasticsearchInstanceTypesInput {
s.MaxResults = &v
return s
}
// SetNextToken sets the NextToken field's value.
func (s *ListElasticsearchInstanceTypesInput) SetNextToken(v string) *ListElasticsearchInstanceTypesInput {
s.NextToken = &v
return s
}
// Container for the parameters returned by ListElasticsearchInstanceTypes operation.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListElasticsearchInstanceTypesResponse
type ListElasticsearchInstanceTypesOutput struct {
_ struct{} `type:"structure"`
// List of instance types supported by Amazon Elasticsearch service for given
// ElasticsearchVersion
ElasticsearchInstanceTypes []*string `type:"list"`
// In case if there are more results available NextToken would be present, make
// further request to the same API with received NextToken to paginate remaining
// results.
NextToken *string `type:"string"`
}
// String returns the string representation
func (s ListElasticsearchInstanceTypesOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ListElasticsearchInstanceTypesOutput) GoString() string {
return s.String()
}
// SetElasticsearchInstanceTypes sets the ElasticsearchInstanceTypes field's value.
func (s *ListElasticsearchInstanceTypesOutput) SetElasticsearchInstanceTypes(v []*string) *ListElasticsearchInstanceTypesOutput {
s.ElasticsearchInstanceTypes = v
return s
}
// SetNextToken sets the NextToken field's value.
func (s *ListElasticsearchInstanceTypesOutput) SetNextToken(v string) *ListElasticsearchInstanceTypesOutput {
s.NextToken = &v
return s
}
// Container for the parameters to the ListElasticsearchVersions operation.
// Use MaxResults to control the maximum number of results to retrieve in a
// single call.
//
// Use NextToken in response to retrieve more results. If the received response
// does not contain a NextToken, then there are no more results to retrieve.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListElasticsearchVersionsRequest
type ListElasticsearchVersionsInput struct {
_ struct{} `type:"structure"`
// Set this value to limit the number of results returned. Value provided must
// be greater than 10 else it wont be honored.
MaxResults *int64 `location:"querystring" locationName:"maxResults" type:"integer"`
// Paginated APIs accepts NextToken input to returns next page results and provides
// a NextToken output in the response which can be used by the client to retrieve
// more results.
NextToken *string `location:"querystring" locationName:"nextToken" type:"string"`
}
// String returns the string representation
func (s ListElasticsearchVersionsInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ListElasticsearchVersionsInput) GoString() string {
return s.String()
}
// SetMaxResults sets the MaxResults field's value.
func (s *ListElasticsearchVersionsInput) SetMaxResults(v int64) *ListElasticsearchVersionsInput {
s.MaxResults = &v
return s
}
// SetNextToken sets the NextToken field's value.
func (s *ListElasticsearchVersionsInput) SetNextToken(v string) *ListElasticsearchVersionsInput {
s.NextToken = &v
return s
}
// Container for the parameters for response received from ListElasticsearchVersions
// operation.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListElasticsearchVersionsResponse
type ListElasticsearchVersionsOutput struct {
_ struct{} `type:"structure"`
// List of supported elastic search versions.
ElasticsearchVersions []*string `type:"list"`
// Paginated APIs accepts NextToken input to returns next page results and provides
// a NextToken output in the response which can be used by the client to retrieve
// more results.
NextToken *string `type:"string"`
}
// String returns the string representation
func (s ListElasticsearchVersionsOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ListElasticsearchVersionsOutput) GoString() string {
return s.String()
}
// SetElasticsearchVersions sets the ElasticsearchVersions field's value.
func (s *ListElasticsearchVersionsOutput) SetElasticsearchVersions(v []*string) *ListElasticsearchVersionsOutput {
s.ElasticsearchVersions = v
return s
}
// SetNextToken sets the NextToken field's value.
func (s *ListElasticsearchVersionsOutput) SetNextToken(v string) *ListElasticsearchVersionsOutput {
s.NextToken = &v
return s
}
// Container for the parameters to the ListTags operation. Specify the ARN for
// the Elasticsearch domain to which the tags are attached that you want to
// view are attached.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListTagsRequest
type ListTagsInput struct {
_ struct{} `type:"structure"`
// Specify the ARN for the Elasticsearch domain to which the tags are attached
// that you want to view.
//
// ARN is a required field
ARN *string `location:"querystring" locationName:"arn" type:"string" required:"true"`
}
// String returns the string representation
func (s ListTagsInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ListTagsInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *ListTagsInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "ListTagsInput"}
if s.ARN == nil {
invalidParams.Add(request.NewErrParamRequired("ARN"))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetARN sets the ARN field's value.
func (s *ListTagsInput) SetARN(v string) *ListTagsInput {
s.ARN = &v
return s
}
// The result of a ListTags operation. Contains tags for all requested Elasticsearch
// domains.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/ListTagsResponse
type ListTagsOutput struct {
_ struct{} `type:"structure"`
// List of Tag for the requested Elasticsearch domain.
TagList []*Tag `type:"list"`
}
// String returns the string representation
func (s ListTagsOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s ListTagsOutput) GoString() string {
return s.String()
}
// SetTagList sets the TagList field's value.
func (s *ListTagsOutput) SetTagList(v []*Tag) *ListTagsOutput {
s.TagList = v
return s
}
// Provides the current status of the entity.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/OptionStatus
type OptionStatus struct {
_ struct{} `type:"structure"`
// Timestamp which tells the creation date for the entity.
//
// CreationDate is a required field
CreationDate *time.Time `type:"timestamp" timestampFormat:"unix" required:"true"`
// Indicates whether the Elasticsearch domain is being deleted.
PendingDeletion *bool `type:"boolean"`
// Provides the OptionState for the Elasticsearch domain.
//
// State is a required field
State *string `type:"string" required:"true" enum:"OptionState"`
// Timestamp which tells the last updated time for the entity.
//
// UpdateDate is a required field
UpdateDate *time.Time `type:"timestamp" timestampFormat:"unix" required:"true"`
// Specifies the latest version for the entity.
UpdateVersion *int64 `type:"integer"`
}
// String returns the string representation
func (s OptionStatus) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s OptionStatus) GoString() string {
return s.String()
}
// SetCreationDate sets the CreationDate field's value.
func (s *OptionStatus) SetCreationDate(v time.Time) *OptionStatus {
s.CreationDate = &v
return s
}
// SetPendingDeletion sets the PendingDeletion field's value.
func (s *OptionStatus) SetPendingDeletion(v bool) *OptionStatus {
s.PendingDeletion = &v
return s
}
// SetState sets the State field's value.
func (s *OptionStatus) SetState(v string) *OptionStatus {
s.State = &v
return s
}
// SetUpdateDate sets the UpdateDate field's value.
func (s *OptionStatus) SetUpdateDate(v time.Time) *OptionStatus {
s.UpdateDate = &v
return s
}
// SetUpdateVersion sets the UpdateVersion field's value.
func (s *OptionStatus) SetUpdateVersion(v int64) *OptionStatus {
s.UpdateVersion = &v
return s
}
// Container for the parameters to the RemoveTags operation. Specify the ARN
// for the Elasticsearch domain from which you want to remove the specified
// TagKey.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/RemoveTagsRequest
type RemoveTagsInput struct {
_ struct{} `type:"structure"`
// Specifies the ARN for the Elasticsearch domain from which you want to delete
// the specified tags.
//
// ARN is a required field
ARN *string `type:"string" required:"true"`
// Specifies the TagKey list which you want to remove from the Elasticsearch
// domain.
//
// TagKeys is a required field
TagKeys []*string `type:"list" required:"true"`
}
// String returns the string representation
func (s RemoveTagsInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s RemoveTagsInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *RemoveTagsInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "RemoveTagsInput"}
if s.ARN == nil {
invalidParams.Add(request.NewErrParamRequired("ARN"))
}
if s.TagKeys == nil {
invalidParams.Add(request.NewErrParamRequired("TagKeys"))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetARN sets the ARN field's value.
func (s *RemoveTagsInput) SetARN(v string) *RemoveTagsInput {
s.ARN = &v
return s
}
// SetTagKeys sets the TagKeys field's value.
func (s *RemoveTagsInput) SetTagKeys(v []*string) *RemoveTagsInput {
s.TagKeys = v
return s
}
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/RemoveTagsOutput
type RemoveTagsOutput struct {
_ struct{} `type:"structure"`
}
// String returns the string representation
func (s RemoveTagsOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s RemoveTagsOutput) GoString() string {
return s.String()
}
// Specifies the time, in UTC format, when the service takes a daily automated
// snapshot of the specified Elasticsearch domain. Default value is 0 hours.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/SnapshotOptions
type SnapshotOptions struct {
_ struct{} `type:"structure"`
// Specifies the time, in UTC format, when the service takes a daily automated
// snapshot of the specified Elasticsearch domain. Default value is 0 hours.
AutomatedSnapshotStartHour *int64 `type:"integer"`
}
// String returns the string representation
func (s SnapshotOptions) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s SnapshotOptions) GoString() string {
return s.String()
}
// SetAutomatedSnapshotStartHour sets the AutomatedSnapshotStartHour field's value.
func (s *SnapshotOptions) SetAutomatedSnapshotStartHour(v int64) *SnapshotOptions {
s.AutomatedSnapshotStartHour = &v
return s
}
// Status of a daily automated snapshot.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/SnapshotOptionsStatus
type SnapshotOptionsStatus struct {
_ struct{} `type:"structure"`
// Specifies the daily snapshot options specified for the Elasticsearch domain.
//
// Options is a required field
Options *SnapshotOptions `type:"structure" required:"true"`
// Specifies the status of a daily automated snapshot.
//
// Status is a required field
Status *OptionStatus `type:"structure" required:"true"`
}
// String returns the string representation
func (s SnapshotOptionsStatus) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s SnapshotOptionsStatus) GoString() string {
return s.String()
}
// SetOptions sets the Options field's value.
func (s *SnapshotOptionsStatus) SetOptions(v *SnapshotOptions) *SnapshotOptionsStatus {
s.Options = v
return s
}
// SetStatus sets the Status field's value.
func (s *SnapshotOptionsStatus) SetStatus(v *OptionStatus) *SnapshotOptionsStatus {
s.Status = v
return s
}
// StorageTypes represents the list of storage related types and their attributes
// that are available for given InstanceType.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/StorageType
type StorageType struct {
_ struct{} `type:"structure"`
// SubType of the given storage type. List of available sub-storage options:
// For "instance" storageType we wont have any storageSubType, in case of "ebs"
// storageType we will have following valid storageSubTypes standard
// gp2
// io1
// Refer VolumeType for more information regarding above EBS storage options.
StorageSubTypeName *string `type:"string"`
// List of limits that are applicable for given storage type.
StorageTypeLimits []*StorageTypeLimit `type:"list"`
// Type of the storage. List of available storage options: instance
// Inbuilt storage available for the given Instance ebs
// Elastic block storage that would be attached to the given Instance
StorageTypeName *string `type:"string"`
}
// String returns the string representation
func (s StorageType) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s StorageType) GoString() string {
return s.String()
}
// SetStorageSubTypeName sets the StorageSubTypeName field's value.
func (s *StorageType) SetStorageSubTypeName(v string) *StorageType {
s.StorageSubTypeName = &v
return s
}
// SetStorageTypeLimits sets the StorageTypeLimits field's value.
func (s *StorageType) SetStorageTypeLimits(v []*StorageTypeLimit) *StorageType {
s.StorageTypeLimits = v
return s
}
// SetStorageTypeName sets the StorageTypeName field's value.
func (s *StorageType) SetStorageTypeName(v string) *StorageType {
s.StorageTypeName = &v
return s
}
// Limits that are applicable for given storage type.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/StorageTypeLimit
type StorageTypeLimit struct {
_ struct{} `type:"structure"`
// Name of storage limits that are applicable for given storage type. If StorageType
// is ebs, following storage options are applicable MinimumVolumeSize
// Minimum amount of volume size that is applicable for given storage type.It
// can be empty if it is not applicable. MaximumVolumeSize
// Maximum amount of volume size that is applicable for given storage type.It
// can be empty if it is not applicable. MaximumIops
// Maximum amount of Iops that is applicable for given storage type.It can
// be empty if it is not applicable. MinimumIops
// Minimum amount of Iops that is applicable for given storage type.It can
// be empty if it is not applicable.
LimitName *string `type:"string"`
// Values for the StorageTypeLimit$LimitName .
LimitValues []*string `type:"list"`
}
// String returns the string representation
func (s StorageTypeLimit) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s StorageTypeLimit) GoString() string {
return s.String()
}
// SetLimitName sets the LimitName field's value.
func (s *StorageTypeLimit) SetLimitName(v string) *StorageTypeLimit {
s.LimitName = &v
return s
}
// SetLimitValues sets the LimitValues field's value.
func (s *StorageTypeLimit) SetLimitValues(v []*string) *StorageTypeLimit {
s.LimitValues = v
return s
}
// Specifies a key value pair for a resource tag.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/Tag
type Tag struct {
_ struct{} `type:"structure"`
// Specifies the TagKey, the name of the tag. Tag keys must be unique for the
// Elasticsearch domain to which they are attached.
//
// Key is a required field
Key *string `min:"1" type:"string" required:"true"`
// Specifies the TagValue, the value assigned to the corresponding tag key.
// Tag values can be null and do not have to be unique in a tag set. For example,
// you can have a key value pair in a tag set of project : Trinity and cost-center
// : Trinity
//
// Value is a required field
Value *string `type:"string" required:"true"`
}
// String returns the string representation
func (s Tag) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s Tag) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *Tag) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "Tag"}
if s.Key == nil {
invalidParams.Add(request.NewErrParamRequired("Key"))
}
if s.Key != nil && len(*s.Key) < 1 {
invalidParams.Add(request.NewErrParamMinLen("Key", 1))
}
if s.Value == nil {
invalidParams.Add(request.NewErrParamRequired("Value"))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetKey sets the Key field's value.
func (s *Tag) SetKey(v string) *Tag {
s.Key = &v
return s
}
// SetValue sets the Value field's value.
func (s *Tag) SetValue(v string) *Tag {
s.Value = &v
return s
}
// Container for the parameters to the UpdateElasticsearchDomain operation.
// Specifies the type and number of instances in the domain cluster.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/UpdateElasticsearchDomainConfigRequest
type UpdateElasticsearchDomainConfigInput struct {
_ struct{} `type:"structure"`
// IAM access policy as a JSON-formatted string.
AccessPolicies *string `type:"string"`
// Modifies the advanced option to allow references to indices in an HTTP request
// body. Must be false when configuring access to individual sub-resources.
// By default, the value is true. See Configuration Advanced Options (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-advanced-options)
// for more information.
AdvancedOptions map[string]*string `type:"map"`
// The name of the Elasticsearch domain that you are updating.
//
// DomainName is a required field
DomainName *string `location:"uri" locationName:"DomainName" min:"3" type:"string" required:"true"`
// Specify the type and size of the EBS volume that you want to use.
EBSOptions *EBSOptions `type:"structure"`
// The type and number of instances to instantiate for the domain cluster.
ElasticsearchClusterConfig *ElasticsearchClusterConfig `type:"structure"`
// Option to set the time, in UTC format, for the daily automated snapshot.
// Default value is 0 hours.
SnapshotOptions *SnapshotOptions `type:"structure"`
}
// String returns the string representation
func (s UpdateElasticsearchDomainConfigInput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s UpdateElasticsearchDomainConfigInput) GoString() string {
return s.String()
}
// Validate inspects the fields of the type to determine if they are valid.
func (s *UpdateElasticsearchDomainConfigInput) Validate() error {
invalidParams := request.ErrInvalidParams{Context: "UpdateElasticsearchDomainConfigInput"}
if s.DomainName == nil {
invalidParams.Add(request.NewErrParamRequired("DomainName"))
}
if s.DomainName != nil && len(*s.DomainName) < 3 {
invalidParams.Add(request.NewErrParamMinLen("DomainName", 3))
}
if invalidParams.Len() > 0 {
return invalidParams
}
return nil
}
// SetAccessPolicies sets the AccessPolicies field's value.
func (s *UpdateElasticsearchDomainConfigInput) SetAccessPolicies(v string) *UpdateElasticsearchDomainConfigInput {
s.AccessPolicies = &v
return s
}
// SetAdvancedOptions sets the AdvancedOptions field's value.
func (s *UpdateElasticsearchDomainConfigInput) SetAdvancedOptions(v map[string]*string) *UpdateElasticsearchDomainConfigInput {
s.AdvancedOptions = v
return s
}
// SetDomainName sets the DomainName field's value.
func (s *UpdateElasticsearchDomainConfigInput) SetDomainName(v string) *UpdateElasticsearchDomainConfigInput {
s.DomainName = &v
return s
}
// SetEBSOptions sets the EBSOptions field's value.
func (s *UpdateElasticsearchDomainConfigInput) SetEBSOptions(v *EBSOptions) *UpdateElasticsearchDomainConfigInput {
s.EBSOptions = v
return s
}
// SetElasticsearchClusterConfig sets the ElasticsearchClusterConfig field's value.
func (s *UpdateElasticsearchDomainConfigInput) SetElasticsearchClusterConfig(v *ElasticsearchClusterConfig) *UpdateElasticsearchDomainConfigInput {
s.ElasticsearchClusterConfig = v
return s
}
// SetSnapshotOptions sets the SnapshotOptions field's value.
func (s *UpdateElasticsearchDomainConfigInput) SetSnapshotOptions(v *SnapshotOptions) *UpdateElasticsearchDomainConfigInput {
s.SnapshotOptions = v
return s
}
// The result of an UpdateElasticsearchDomain request. Contains the status of
// the Elasticsearch domain being updated.
// Please also see https://docs.aws.amazon.com/goto/WebAPI/es-2015-01-01/UpdateElasticsearchDomainConfigResponse
type UpdateElasticsearchDomainConfigOutput struct {
_ struct{} `type:"structure"`
// The status of the updated Elasticsearch domain.
//
// DomainConfig is a required field
DomainConfig *ElasticsearchDomainConfig `type:"structure" required:"true"`
}
// String returns the string representation
func (s UpdateElasticsearchDomainConfigOutput) String() string {
return awsutil.Prettify(s)
}
// GoString returns the string representation
func (s UpdateElasticsearchDomainConfigOutput) GoString() string {
return s.String()
}
// SetDomainConfig sets the DomainConfig field's value.
func (s *UpdateElasticsearchDomainConfigOutput) SetDomainConfig(v *ElasticsearchDomainConfig) *UpdateElasticsearchDomainConfigOutput {
s.DomainConfig = v
return s
}
const (
// ESPartitionInstanceTypeM3MediumElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeM3MediumElasticsearch = "m3.medium.elasticsearch"
// ESPartitionInstanceTypeM3LargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeM3LargeElasticsearch = "m3.large.elasticsearch"
// ESPartitionInstanceTypeM3XlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeM3XlargeElasticsearch = "m3.xlarge.elasticsearch"
// ESPartitionInstanceTypeM32xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeM32xlargeElasticsearch = "m3.2xlarge.elasticsearch"
// ESPartitionInstanceTypeM4LargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeM4LargeElasticsearch = "m4.large.elasticsearch"
// ESPartitionInstanceTypeM4XlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeM4XlargeElasticsearch = "m4.xlarge.elasticsearch"
// ESPartitionInstanceTypeM42xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeM42xlargeElasticsearch = "m4.2xlarge.elasticsearch"
// ESPartitionInstanceTypeM44xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeM44xlargeElasticsearch = "m4.4xlarge.elasticsearch"
// ESPartitionInstanceTypeM410xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeM410xlargeElasticsearch = "m4.10xlarge.elasticsearch"
// ESPartitionInstanceTypeT2MicroElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeT2MicroElasticsearch = "t2.micro.elasticsearch"
// ESPartitionInstanceTypeT2SmallElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeT2SmallElasticsearch = "t2.small.elasticsearch"
// ESPartitionInstanceTypeT2MediumElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeT2MediumElasticsearch = "t2.medium.elasticsearch"
// ESPartitionInstanceTypeR3LargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR3LargeElasticsearch = "r3.large.elasticsearch"
// ESPartitionInstanceTypeR3XlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR3XlargeElasticsearch = "r3.xlarge.elasticsearch"
// ESPartitionInstanceTypeR32xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR32xlargeElasticsearch = "r3.2xlarge.elasticsearch"
// ESPartitionInstanceTypeR34xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR34xlargeElasticsearch = "r3.4xlarge.elasticsearch"
// ESPartitionInstanceTypeR38xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR38xlargeElasticsearch = "r3.8xlarge.elasticsearch"
// ESPartitionInstanceTypeI2XlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeI2XlargeElasticsearch = "i2.xlarge.elasticsearch"
// ESPartitionInstanceTypeI22xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeI22xlargeElasticsearch = "i2.2xlarge.elasticsearch"
// ESPartitionInstanceTypeD2XlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeD2XlargeElasticsearch = "d2.xlarge.elasticsearch"
// ESPartitionInstanceTypeD22xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeD22xlargeElasticsearch = "d2.2xlarge.elasticsearch"
// ESPartitionInstanceTypeD24xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeD24xlargeElasticsearch = "d2.4xlarge.elasticsearch"
// ESPartitionInstanceTypeD28xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeD28xlargeElasticsearch = "d2.8xlarge.elasticsearch"
// ESPartitionInstanceTypeC4LargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeC4LargeElasticsearch = "c4.large.elasticsearch"
// ESPartitionInstanceTypeC4XlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeC4XlargeElasticsearch = "c4.xlarge.elasticsearch"
// ESPartitionInstanceTypeC42xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeC42xlargeElasticsearch = "c4.2xlarge.elasticsearch"
// ESPartitionInstanceTypeC44xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeC44xlargeElasticsearch = "c4.4xlarge.elasticsearch"
// ESPartitionInstanceTypeC48xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeC48xlargeElasticsearch = "c4.8xlarge.elasticsearch"
// ESPartitionInstanceTypeR4LargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR4LargeElasticsearch = "r4.large.elasticsearch"
// ESPartitionInstanceTypeR4XlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR4XlargeElasticsearch = "r4.xlarge.elasticsearch"
// ESPartitionInstanceTypeR42xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR42xlargeElasticsearch = "r4.2xlarge.elasticsearch"
// ESPartitionInstanceTypeR44xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR44xlargeElasticsearch = "r4.4xlarge.elasticsearch"
// ESPartitionInstanceTypeR48xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR48xlargeElasticsearch = "r4.8xlarge.elasticsearch"
// ESPartitionInstanceTypeR416xlargeElasticsearch is a ESPartitionInstanceType enum value
ESPartitionInstanceTypeR416xlargeElasticsearch = "r4.16xlarge.elasticsearch"
)
// The state of a requested change. One of the following:
//
// * Processing: The request change is still in-process.
// * Active: The request change is processed and deployed to the Elasticsearch
// domain.
const (
// OptionStateRequiresIndexDocuments is a OptionState enum value
OptionStateRequiresIndexDocuments = "RequiresIndexDocuments"
// OptionStateProcessing is a OptionState enum value
OptionStateProcessing = "Processing"
// OptionStateActive is a OptionState enum value
OptionStateActive = "Active"
)
// The type of EBS volume, standard, gp2, or io1. See Configuring EBS-based
// Storage (http://docs.aws.amazon.com/elasticsearch-service/latest/developerguide/es-createupdatedomains.html#es-createdomain-configure-ebs)for
// more information.
const (
// VolumeTypeStandard is a VolumeType enum value
VolumeTypeStandard = "standard"
// VolumeTypeGp2 is a VolumeType enum value
VolumeTypeGp2 = "gp2"
// VolumeTypeIo1 is a VolumeType enum value
VolumeTypeIo1 = "io1"
)
| markpeek/terraform | vendor/github.com/aws/aws-sdk-go/service/elasticsearchservice/api.go | GO | mpl-2.0 | 136,217 |
// Copyright 2015 The appc Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package discovery
import (
"bytes"
"io/ioutil"
"net/http"
"os"
"testing"
"github.com/coreos/rkt/Godeps/_workspace/src/github.com/appc/spec/schema/types"
)
func fakeHTTPGet(filename string, failures int) func(uri string) (*http.Response, error) {
attempts := 0
return func(uri string) (*http.Response, error) {
f, err := os.Open(filename)
if err != nil {
return nil, err
}
var resp *http.Response
switch {
case attempts < failures:
resp = &http.Response{
Status: "404 Not Found",
StatusCode: http.StatusNotFound,
Proto: "HTTP/1.1",
ProtoMajor: 1,
ProtoMinor: 1,
Header: http.Header{
"Content-Type": []string{"text/html"},
},
Body: ioutil.NopCloser(bytes.NewBufferString("")),
}
default:
resp = &http.Response{
Status: "200 OK",
StatusCode: http.StatusOK,
Proto: "HTTP/1.1",
ProtoMajor: 1,
ProtoMinor: 1,
Header: http.Header{
"Content-Type": []string{"text/html"},
},
Body: f,
}
}
attempts = attempts + 1
return resp, nil
}
}
type httpgetter func(uri string) (*http.Response, error)
func TestDiscoverEndpoints(t *testing.T) {
tests := []struct {
get httpgetter
expectDiscoverySuccess bool
app App
expectedACIEndpoints []ACIEndpoint
expectedKeys []string
}{
{
fakeHTTPGet("myapp.html", 0),
true,
App{
Name: "example.com/myapp",
Labels: map[types.ACIdentifier]string{
"version": "1.0.0",
"os": "linux",
"arch": "amd64",
},
},
[]ACIEndpoint{
ACIEndpoint{
ACI: "https://storage.example.com/example.com/myapp-1.0.0.aci?torrent",
ASC: "https://storage.example.com/example.com/myapp-1.0.0.aci.asc?torrent",
},
ACIEndpoint{
ACI: "hdfs://storage.example.com/example.com/myapp-1.0.0.aci",
ASC: "hdfs://storage.example.com/example.com/myapp-1.0.0.aci.asc",
},
},
[]string{"https://example.com/pubkeys.gpg"},
},
{
fakeHTTPGet("myapp.html", 1),
true,
App{
Name: "example.com/myapp/foobar",
Labels: map[types.ACIdentifier]string{
"version": "1.0.0",
"os": "linux",
"arch": "amd64",
},
},
[]ACIEndpoint{
ACIEndpoint{
ACI: "https://storage.example.com/example.com/myapp/foobar-1.0.0.aci?torrent",
ASC: "https://storage.example.com/example.com/myapp/foobar-1.0.0.aci.asc?torrent",
},
ACIEndpoint{
ACI: "hdfs://storage.example.com/example.com/myapp/foobar-1.0.0.aci",
ASC: "hdfs://storage.example.com/example.com/myapp/foobar-1.0.0.aci.asc",
},
},
[]string{"https://example.com/pubkeys.gpg"},
},
{
fakeHTTPGet("myapp.html", 20),
false,
App{
Name: "example.com/myapp/foobar/bazzer",
Labels: map[types.ACIdentifier]string{
"version": "1.0.0",
"os": "linux",
"arch": "amd64",
},
},
[]ACIEndpoint{},
[]string{},
},
// Test missing label. Only one ac-discovery template should be
// returned as the other one cannot be completely rendered due to
// missing labels.
{
fakeHTTPGet("myapp2.html", 0),
true,
App{
Name: "example.com/myapp",
Labels: map[types.ACIdentifier]string{
"version": "1.0.0",
},
},
[]ACIEndpoint{
ACIEndpoint{
ACI: "https://storage.example.com/example.com/myapp-1.0.0.aci",
ASC: "https://storage.example.com/example.com/myapp-1.0.0.aci.asc",
},
},
[]string{"https://example.com/pubkeys.gpg"},
},
// Test missing labels. version label should default to
// "latest" and the first template should be rendered
{
fakeHTTPGet("myapp2.html", 0),
false,
App{
Name: "example.com/myapp",
Labels: map[types.ACIdentifier]string{},
},
[]ACIEndpoint{
ACIEndpoint{
ACI: "https://storage.example.com/example.com/myapp-latest.aci",
ASC: "https://storage.example.com/example.com/myapp-latest.aci.asc",
},
},
[]string{"https://example.com/pubkeys.gpg"},
},
// Test with a label called "name". It should be ignored.
{
fakeHTTPGet("myapp2.html", 0),
false,
App{
Name: "example.com/myapp",
Labels: map[types.ACIdentifier]string{
"name": "labelcalledname",
"version": "1.0.0",
},
},
[]ACIEndpoint{
ACIEndpoint{
ACI: "https://storage.example.com/example.com/myapp-1.0.0.aci",
ASC: "https://storage.example.com/example.com/myapp-1.0.0.aci.asc",
},
},
[]string{"https://example.com/pubkeys.gpg"},
},
}
for i, tt := range tests {
httpGet = &mockHttpGetter{getter: tt.get}
de, _, err := DiscoverEndpoints(tt.app, true)
if err != nil && !tt.expectDiscoverySuccess {
continue
}
if err != nil {
t.Fatalf("#%d DiscoverEndpoints failed: %v", i, err)
}
if len(de.ACIEndpoints) != len(tt.expectedACIEndpoints) {
t.Errorf("ACIEndpoints array is wrong length want %d got %d", len(tt.expectedACIEndpoints), len(de.ACIEndpoints))
} else {
for n, _ := range de.ACIEndpoints {
if de.ACIEndpoints[n] != tt.expectedACIEndpoints[n] {
t.Errorf("#%d ACIEndpoints[%d] mismatch: want %v got %v", i, n, tt.expectedACIEndpoints[n], de.ACIEndpoints[n])
}
}
}
if len(de.Keys) != len(tt.expectedKeys) {
t.Errorf("Keys array is wrong length want %d got %d", len(tt.expectedKeys), len(de.Keys))
} else {
for n, _ := range de.Keys {
if de.Keys[n] != tt.expectedKeys[n] {
t.Errorf("#%d sig[%d] mismatch: want %v got %v", i, n, tt.expectedKeys[n], de.Keys[n])
}
}
}
}
}
| tixxdz/rkt | Godeps/_workspace/src/github.com/appc/spec/discovery/discovery_test.go | GO | apache-2.0 | 6,128 |
package json
import (
"fmt"
"github.com/hashicorp/hcl2/hcl/hclsyntax"
"github.com/hashicorp/hcl2/hcl"
"github.com/zclconf/go-cty/cty"
)
// body is the implementation of "Body" used for files processed with the JSON
// parser.
type body struct {
obj *objectVal
// If non-nil, the keys of this map cause the corresponding attributes to
// be treated as non-existing. This is used when Body.PartialContent is
// called, to produce the "remaining content" Body.
hiddenAttrs map[string]struct{}
// If set, string values are turned into expressions using HIL's template
// language, rather than the native zcl language. This is intended to
// allow applications moving from HCL to zcl to continue to parse the
// JSON variant of their config that HCL handled previously.
useHIL bool
}
// expression is the implementation of "Expression" used for files processed
// with the JSON parser.
type expression struct {
src node
}
func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) {
content, newBody, diags := b.PartialContent(schema)
hiddenAttrs := newBody.(*body).hiddenAttrs
var nameSuggestions []string
for _, attrS := range schema.Attributes {
if _, ok := hiddenAttrs[attrS.Name]; !ok {
// Only suggest an attribute name if we didn't use it already.
nameSuggestions = append(nameSuggestions, attrS.Name)
}
}
for _, blockS := range schema.Blocks {
// Blocks can appear multiple times, so we'll suggest their type
// names regardless of whether they've already been used.
nameSuggestions = append(nameSuggestions, blockS.Type)
}
for k, attr := range b.obj.Attrs {
if k == "//" {
// Ignore "//" keys in objects representing bodies, to allow
// their use as comments.
continue
}
if _, ok := hiddenAttrs[k]; !ok {
var fixItHint string
suggestion := nameSuggestion(k, nameSuggestions)
if suggestion != "" {
fixItHint = fmt.Sprintf(" Did you mean %q?", suggestion)
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Extraneous JSON object property",
Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, fixItHint),
Subject: &attr.NameRange,
Context: attr.Range().Ptr(),
})
}
}
return content, diags
}
func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
obj := b.obj
jsonAttrs := obj.Attrs
usedNames := map[string]struct{}{}
if b.hiddenAttrs != nil {
for k := range b.hiddenAttrs {
usedNames[k] = struct{}{}
}
}
var diags hcl.Diagnostics
content := &hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{},
Blocks: nil,
MissingItemRange: b.MissingItemRange(),
}
for _, attrS := range schema.Attributes {
jsonAttr, exists := jsonAttrs[attrS.Name]
_, used := usedNames[attrS.Name]
if used || !exists {
if attrS.Required {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing required attribute",
Detail: fmt.Sprintf("The attribute %q is required, so a JSON object property must be present with this name.", attrS.Name),
Subject: &obj.OpenRange,
})
}
continue
}
content.Attributes[attrS.Name] = &hcl.Attribute{
Name: attrS.Name,
Expr: &expression{src: jsonAttr.Value},
Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
NameRange: jsonAttr.NameRange,
}
usedNames[attrS.Name] = struct{}{}
}
for _, blockS := range schema.Blocks {
jsonAttr, exists := jsonAttrs[blockS.Type]
_, used := usedNames[blockS.Type]
if used || !exists {
usedNames[blockS.Type] = struct{}{}
continue
}
v := jsonAttr.Value
diags = append(diags, b.unpackBlock(v, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)...)
usedNames[blockS.Type] = struct{}{}
}
unusedBody := &body{
obj: b.obj,
hiddenAttrs: usedNames,
useHIL: b.useHIL,
}
return content, unusedBody, diags
}
// JustAttributes for JSON bodies interprets all properties of the wrapped
// JSON object as attributes and returns them.
func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
attrs := make(map[string]*hcl.Attribute)
for name, jsonAttr := range b.obj.Attrs {
if name == "//" {
// Ignore "//" keys in objects representing bodies, to allow
// their use as comments.
continue
}
if _, hidden := b.hiddenAttrs[name]; hidden {
continue
}
attrs[name] = &hcl.Attribute{
Name: name,
Expr: &expression{src: jsonAttr.Value},
Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
NameRange: jsonAttr.NameRange,
}
}
// No diagnostics possible here, since the parser already took care of
// finding duplicates and every JSON value can be a valid attribute value.
return attrs, nil
}
func (b *body) MissingItemRange() hcl.Range {
return b.obj.CloseRange
}
func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) {
if len(labelsLeft) > 0 {
labelName := labelsLeft[0]
ov, ok := v.(*objectVal)
if !ok {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("A JSON object is required, whose keys represent the %s block's %s.", typeName, labelName),
Subject: v.StartRange().Ptr(),
})
return
}
if len(ov.Attrs) == 0 {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing block label",
Detail: fmt.Sprintf("At least one object property is required, whose name represents the %s block's %s.", typeName, labelName),
Subject: v.StartRange().Ptr(),
})
return
}
labelsUsed := append(labelsUsed, "")
labelRanges := append(labelRanges, hcl.Range{})
for pk, p := range ov.Attrs {
labelsUsed[len(labelsUsed)-1] = pk
labelRanges[len(labelRanges)-1] = p.NameRange
diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...)
}
return
}
// By the time we get here, we've peeled off all the labels and we're ready
// to deal with the block's actual content.
// need to copy the label slices because their underlying arrays will
// continue to be mutated after we return.
labels := make([]string, len(labelsUsed))
copy(labels, labelsUsed)
labelR := make([]hcl.Range, len(labelRanges))
copy(labelR, labelRanges)
switch tv := v.(type) {
case *objectVal:
// Single instance of the block
*blocks = append(*blocks, &hcl.Block{
Type: typeName,
Labels: labels,
Body: &body{
obj: tv,
useHIL: b.useHIL,
},
DefRange: tv.OpenRange,
TypeRange: *typeRange,
LabelRanges: labelR,
})
case *arrayVal:
// Multiple instances of the block
for _, av := range tv.Values {
ov, ok := av.(*objectVal)
if !ok {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("A JSON object is required, representing the contents of a %q block.", typeName),
Subject: v.StartRange().Ptr(),
})
continue
}
*blocks = append(*blocks, &hcl.Block{
Type: typeName,
Labels: labels,
Body: &body{
obj: ov,
useHIL: b.useHIL,
},
DefRange: tv.OpenRange,
TypeRange: *typeRange,
LabelRanges: labelR,
})
}
default:
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("Either a JSON object or a JSON array is required, representing the contents of one or more %q blocks.", typeName),
Subject: v.StartRange().Ptr(),
})
}
return
}
func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
switch v := e.src.(type) {
case *stringVal:
if ctx != nil {
// Parse string contents as a zcl native language expression.
// We only do this if we have a context, so passing a nil context
// is how the caller specifies that interpolations are not allowed
// and that the string should just be returned verbatim.
templateSrc := v.Value
expr, diags := hclsyntax.ParseTemplate(
[]byte(templateSrc),
v.SrcRange.Filename,
// This won't produce _exactly_ the right result, since
// the zclsyntax parser can't "see" any escapes we removed
// while parsing JSON, but it's better than nothing.
hcl.Pos{
Line: v.SrcRange.Start.Line,
// skip over the opening quote mark
Byte: v.SrcRange.Start.Byte + 1,
Column: v.SrcRange.Start.Column + 1,
},
)
if diags.HasErrors() {
return cty.DynamicVal, diags
}
val, evalDiags := expr.Value(ctx)
diags = append(diags, evalDiags...)
return val, diags
}
// FIXME: Once the native zcl template language parser is implemented,
// parse string values as templates and evaluate them.
return cty.StringVal(v.Value), nil
case *numberVal:
return cty.NumberVal(v.Value), nil
case *booleanVal:
return cty.BoolVal(v.Value), nil
case *arrayVal:
vals := []cty.Value{}
for _, jsonVal := range v.Values {
val, _ := (&expression{src: jsonVal}).Value(ctx)
vals = append(vals, val)
}
return cty.TupleVal(vals), nil
case *objectVal:
attrs := map[string]cty.Value{}
for name, jsonAttr := range v.Attrs {
val, _ := (&expression{src: jsonAttr.Value}).Value(ctx)
attrs[name] = val
}
return cty.ObjectVal(attrs), nil
default:
// Default to DynamicVal so that ASTs containing invalid nodes can
// still be partially-evaluated.
return cty.DynamicVal, nil
}
}
func (e *expression) Variables() []hcl.Traversal {
var vars []hcl.Traversal
switch v := e.src.(type) {
case *stringVal:
// FIXME: Once the native zcl template language parser is implemented,
// parse with that and look for variables in there too,
case *arrayVal:
for _, jsonVal := range v.Values {
vars = append(vars, (&expression{src: jsonVal}).Variables()...)
}
case *objectVal:
for _, jsonAttr := range v.Attrs {
vars = append(vars, (&expression{src: jsonAttr.Value}).Variables()...)
}
}
return vars
}
func (e *expression) Range() hcl.Range {
return e.src.Range()
}
func (e *expression) StartRange() hcl.Range {
return e.src.StartRange()
}
| xiaozhu36/terraform-provider | vendor/github.com/hashicorp/terraform/vendor/github.com/hashicorp/hcl2/hcl/json/structure.go | GO | apache-2.0 | 10,389 |
package broker_builder_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"testing"
)
func TestBrokerBuilder(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "BrokerBuilder Suite")
}
| cf-routing/cli | cf/actors/broker_builder/broker_builder_suite_test.go | GO | apache-2.0 | 213 |
/**
* Copyright (c) Microsoft. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
var util = require('util');
var validate = require('../validation');
function AccessTokenCloudCredentials(accessToken, subscriptionId) {
validate.validateArgs('AccessTokenCloudCredentials', function (v) {
v.object(accessToken, 'accessToken');
v.function(accessToken.authenticateRequest, 'accessToken.authenticateRequest');
});
this.accessToken = accessToken;
this.subscriptionId = subscriptionId;
}
AccessTokenCloudCredentials.prototype.signRequest = function (webResource, callback) {
this.accessToken.authenticateRequest(function (err, scheme, token) {
if (err) { return callback(err); }
webResource.headers['Authorization'] = util.format('%s %s', scheme, token);
callback(null);
});
};
module.exports = AccessTokenCloudCredentials;
| oaastest/azure-xplat-cli | lib/util/authentication/accessTokenCloudCredentials.js | JavaScript | apache-2.0 | 1,385 |
describe ManageIQ::Providers::BaseManager do
context ".default_blacklisted_event_names" do
it 'returns an empty array for the base class' do
expect(described_class.default_blacklisted_event_names).to eq([])
end
it 'returns the provider event if configured' do
stub_settings_merge(
:ems => {
:ems_some_provider => {
:blacklisted_event_names => %w(ev1 ev2)
}
}
)
allow(described_class).to receive(:provider_name).and_return('SomeProvider')
expect(described_class.default_blacklisted_event_names).to eq(%w(ev1 ev2))
end
end
end
| tzumainn/manageiq | spec/models/manageiq/providers/base_manager_spec.rb | Ruby | apache-2.0 | 624 |
//// [optionalPropertiesInClasses.ts]
interface ifoo {
x?:number;
y:number;
}
class C1 implements ifoo {
public y:number;
}
class C2 implements ifoo { // ERROR - still need 'y'
public x:number;
}
class C3 implements ifoo {
public x:number;
public y:number;
}
//// [optionalPropertiesInClasses.js]
var C1 = (function () {
function C1() {
}
return C1;
}());
var C2 = (function () {
function C2() {
}
return C2;
}());
var C3 = (function () {
function C3() {
}
return C3;
}());
| AbubakerB/TypeScript | tests/baselines/reference/optionalPropertiesInClasses.js | JavaScript | apache-2.0 | 539 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.reef.runtime.hdinsight.client.yarnrest;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonProperty;
import org.codehaus.jackson.map.ObjectMapper;
import java.io.IOException;
import java.io.StringWriter;
import java.util.List;
import java.util.Map;
/**
* The data structure used to deserialize the REST response
* from a call to the Resource Manager to list applications.
* For detailed information, please refer to
* https://hadoop.apache.org/docs/r2.6.0/hadoop-yarn/hadoop-yarn-site/ResourceManagerRest.html
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public final class ListApplicationResponse {
private static final String LIST_APPLICATION_RESPONSE = "listApplicationResponse";
private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
private Map<String, List<ApplicationState>> apps;
@JsonProperty(Constants.APPS)
public Map<String, List<ApplicationState>> getApps() {
return apps;
}
public void setApps(final Map<String, List<ApplicationState>> apps) {
this.apps = apps;
}
public List<ApplicationState> getApplicationStates() {
if (!this.apps.containsKey(Constants.APP)) {
return null;
}
return apps.get(Constants.APP);
}
@Override
public String toString() {
final StringWriter writer = new StringWriter();
final String objectString;
try {
OBJECT_MAPPER.writeValue(writer, this);
objectString = writer.toString();
} catch (final IOException e) {
throw new RuntimeException("Exception while serializing ListApplicationResponse: " + e);
}
return LIST_APPLICATION_RESPONSE + objectString;
}
}
| markusweimer/incubator-reef | lang/java/reef-runtime-hdinsight/src/main/java/org/apache/reef/runtime/hdinsight/client/yarnrest/ListApplicationResponse.java | Java | apache-2.0 | 2,497 |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.wso2.andes.management.ui.views.type;
import java.util.List;
import static org.wso2.andes.management.ui.Constants.CONNECTION;
import org.wso2.andes.management.ui.ManagedBean;
import org.wso2.andes.management.ui.ManagedServer;
import org.eclipse.swt.widgets.TabFolder;
/**
* Controller class, which takes care of displaying appropriate information and widgets for Connections.
* This allows user to select Connections and add those to the navigation view
*/
public class ConnectionTypeTabControl extends MBeanTypeTabControl
{
public ConnectionTypeTabControl(TabFolder tabFolder, ManagedServer server, String virtualHost)
{
super(tabFolder, server, virtualHost, CONNECTION);
}
@Override
protected List<ManagedBean> getMbeans()
{
return _serverRegistry.getConnections(_virtualHost);
}
}
| wso2/andes | modules/andes-core/management/eclipse-plugin/src/main/java/org/wso2/andes/management/ui/views/type/ConnectionTypeTabControl.java | Java | apache-2.0 | 1,664 |
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
class TraceResult(object):
def __init__(self, impl):
self._impl = impl
def Serialize(self, f):
"""Serializes the trace result to a file-like object"""
return self._impl.Serialize(f)
def AsTimelineModel(self):
"""Parses the trace result into a timeline model for in-memory
manipulation."""
return self._impl.AsTimelineModel()
| DirtyUnicorns/android_external_chromium-org | tools/telemetry/telemetry/core/trace_result.py | Python | bsd-3-clause | 520 |
/*
Copyright (c) 2015-2016 VMware, Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package object
import (
"fmt"
"io"
"math/rand"
"os"
"path"
"strings"
"context"
"net/http"
"net/url"
"github.com/vmware/govmomi/property"
"github.com/vmware/govmomi/session"
"github.com/vmware/govmomi/vim25"
"github.com/vmware/govmomi/vim25/mo"
"github.com/vmware/govmomi/vim25/soap"
"github.com/vmware/govmomi/vim25/types"
)
// DatastoreNoSuchDirectoryError is returned when a directory could not be found.
type DatastoreNoSuchDirectoryError struct {
verb string
subject string
}
func (e DatastoreNoSuchDirectoryError) Error() string {
return fmt.Sprintf("cannot %s '%s': No such directory", e.verb, e.subject)
}
// DatastoreNoSuchFileError is returned when a file could not be found.
type DatastoreNoSuchFileError struct {
verb string
subject string
}
func (e DatastoreNoSuchFileError) Error() string {
return fmt.Sprintf("cannot %s '%s': No such file", e.verb, e.subject)
}
type Datastore struct {
Common
DatacenterPath string
}
func NewDatastore(c *vim25.Client, ref types.ManagedObjectReference) *Datastore {
return &Datastore{
Common: NewCommon(c, ref),
}
}
func (d Datastore) Path(path string) string {
return (&DatastorePath{
Datastore: d.Name(),
Path: path,
}).String()
}
// NewURL constructs a url.URL with the given file path for datastore access over HTTP.
func (d Datastore) NewURL(path string) *url.URL {
u := d.c.URL()
return &url.URL{
Scheme: u.Scheme,
Host: u.Host,
Path: fmt.Sprintf("/folder/%s", path),
RawQuery: url.Values{
"dcPath": []string{d.DatacenterPath},
"dsName": []string{d.Name()},
}.Encode(),
}
}
// URL is deprecated, use NewURL instead.
func (d Datastore) URL(ctx context.Context, dc *Datacenter, path string) (*url.URL, error) {
return d.NewURL(path), nil
}
func (d Datastore) Browser(ctx context.Context) (*HostDatastoreBrowser, error) {
var do mo.Datastore
err := d.Properties(ctx, d.Reference(), []string{"browser"}, &do)
if err != nil {
return nil, err
}
return NewHostDatastoreBrowser(d.c, do.Browser), nil
}
func (d Datastore) useServiceTicket() bool {
// If connected to workstation, service ticketing not supported
// If connected to ESX, service ticketing not needed
if !d.c.IsVC() {
return false
}
key := "GOVMOMI_USE_SERVICE_TICKET"
val := d.c.URL().Query().Get(key)
if val == "" {
val = os.Getenv(key)
}
if val == "1" || val == "true" {
return true
}
return false
}
func (d Datastore) useServiceTicketHostName(name string) bool {
// No need if talking directly to ESX.
if !d.c.IsVC() {
return false
}
// If version happens to be < 5.1
if name == "" {
return false
}
// If the HostSystem is using DHCP on a network without dynamic DNS,
// HostSystem.Config.Network.DnsConfig.HostName is set to "localhost" by default.
// This resolves to "localhost.localdomain" by default via /etc/hosts on ESX.
// In that case, we will stick with the HostSystem.Name which is the IP address that
// was used to connect the host to VC.
if name == "localhost.localdomain" {
return false
}
// Still possible to have HostName that don't resolve via DNS,
// so we default to false.
key := "GOVMOMI_USE_SERVICE_TICKET_HOSTNAME"
val := d.c.URL().Query().Get(key)
if val == "" {
val = os.Getenv(key)
}
if val == "1" || val == "true" {
return true
}
return false
}
type datastoreServiceTicketHostKey struct{}
// HostContext returns a Context where the given host will be used for datastore HTTP access
// via the ServiceTicket method.
func (d Datastore) HostContext(ctx context.Context, host *HostSystem) context.Context {
return context.WithValue(ctx, datastoreServiceTicketHostKey{}, host)
}
// ServiceTicket obtains a ticket via AcquireGenericServiceTicket and returns it an http.Cookie with the url.URL
// that can be used along with the ticket cookie to access the given path. An host is chosen at random unless the
// the given Context was created with a specific host via the HostContext method.
func (d Datastore) ServiceTicket(ctx context.Context, path string, method string) (*url.URL, *http.Cookie, error) {
u := d.NewURL(path)
host, ok := ctx.Value(datastoreServiceTicketHostKey{}).(*HostSystem)
if !ok {
if !d.useServiceTicket() {
return u, nil, nil
}
hosts, err := d.AttachedHosts(ctx)
if err != nil {
return nil, nil, err
}
if len(hosts) == 0 {
// Fallback to letting vCenter choose a host
return u, nil, nil
}
// Pick a random attached host
host = hosts[rand.Intn(len(hosts))]
}
ips, err := host.ManagementIPs(ctx)
if err != nil {
return nil, nil, err
}
if len(ips) > 0 {
// prefer a ManagementIP
u.Host = ips[0].String()
} else {
// fallback to inventory name
u.Host, err = host.ObjectName(ctx)
if err != nil {
return nil, nil, err
}
}
// VC datacenter path will not be valid against ESX
q := u.Query()
delete(q, "dcPath")
u.RawQuery = q.Encode()
spec := types.SessionManagerHttpServiceRequestSpec{
Url: u.String(),
// See SessionManagerHttpServiceRequestSpecMethod enum
Method: fmt.Sprintf("http%s%s", method[0:1], strings.ToLower(method[1:])),
}
sm := session.NewManager(d.Client())
ticket, err := sm.AcquireGenericServiceTicket(ctx, &spec)
if err != nil {
return nil, nil, err
}
cookie := &http.Cookie{
Name: "vmware_cgi_ticket",
Value: ticket.Id,
}
if d.useServiceTicketHostName(ticket.HostName) {
u.Host = ticket.HostName
}
d.Client().SetThumbprint(u.Host, ticket.SslThumbprint)
return u, cookie, nil
}
func (d Datastore) uploadTicket(ctx context.Context, path string, param *soap.Upload) (*url.URL, *soap.Upload, error) {
p := soap.DefaultUpload
if param != nil {
p = *param // copy
}
u, ticket, err := d.ServiceTicket(ctx, path, p.Method)
if err != nil {
return nil, nil, err
}
p.Ticket = ticket
return u, &p, nil
}
func (d Datastore) downloadTicket(ctx context.Context, path string, param *soap.Download) (*url.URL, *soap.Download, error) {
p := soap.DefaultDownload
if param != nil {
p = *param // copy
}
u, ticket, err := d.ServiceTicket(ctx, path, p.Method)
if err != nil {
return nil, nil, err
}
p.Ticket = ticket
return u, &p, nil
}
// Upload via soap.Upload with an http service ticket
func (d Datastore) Upload(ctx context.Context, f io.Reader, path string, param *soap.Upload) error {
u, p, err := d.uploadTicket(ctx, path, param)
if err != nil {
return err
}
return d.Client().Upload(f, u, p)
}
// UploadFile via soap.Upload with an http service ticket
func (d Datastore) UploadFile(ctx context.Context, file string, path string, param *soap.Upload) error {
u, p, err := d.uploadTicket(ctx, path, param)
if err != nil {
return err
}
return d.Client().UploadFile(file, u, p)
}
// Download via soap.Download with an http service ticket
func (d Datastore) Download(ctx context.Context, path string, param *soap.Download) (io.ReadCloser, int64, error) {
u, p, err := d.downloadTicket(ctx, path, param)
if err != nil {
return nil, 0, err
}
return d.Client().Download(u, p)
}
// DownloadFile via soap.Download with an http service ticket
func (d Datastore) DownloadFile(ctx context.Context, path string, file string, param *soap.Download) error {
u, p, err := d.downloadTicket(ctx, path, param)
if err != nil {
return err
}
return d.Client().DownloadFile(file, u, p)
}
// AttachedHosts returns hosts that have this Datastore attached, accessible and writable.
func (d Datastore) AttachedHosts(ctx context.Context) ([]*HostSystem, error) {
var ds mo.Datastore
var hosts []*HostSystem
pc := property.DefaultCollector(d.Client())
err := pc.RetrieveOne(ctx, d.Reference(), []string{"host"}, &ds)
if err != nil {
return nil, err
}
mounts := make(map[types.ManagedObjectReference]types.DatastoreHostMount)
var refs []types.ManagedObjectReference
for _, host := range ds.Host {
refs = append(refs, host.Key)
mounts[host.Key] = host
}
var hs []mo.HostSystem
err = pc.Retrieve(ctx, refs, []string{"runtime.connectionState", "runtime.powerState"}, &hs)
if err != nil {
return nil, err
}
for _, host := range hs {
if host.Runtime.ConnectionState == types.HostSystemConnectionStateConnected &&
host.Runtime.PowerState == types.HostSystemPowerStatePoweredOn {
mount := mounts[host.Reference()]
info := mount.MountInfo
if *info.Mounted && *info.Accessible && info.AccessMode == string(types.HostMountModeReadWrite) {
hosts = append(hosts, NewHostSystem(d.Client(), mount.Key))
}
}
}
return hosts, nil
}
// AttachedClusterHosts returns hosts that have this Datastore attached, accessible and writable and are members of the given cluster.
func (d Datastore) AttachedClusterHosts(ctx context.Context, cluster *ComputeResource) ([]*HostSystem, error) {
var hosts []*HostSystem
clusterHosts, err := cluster.Hosts(ctx)
if err != nil {
return nil, err
}
attachedHosts, err := d.AttachedHosts(ctx)
if err != nil {
return nil, err
}
refs := make(map[types.ManagedObjectReference]bool)
for _, host := range attachedHosts {
refs[host.Reference()] = true
}
for _, host := range clusterHosts {
if refs[host.Reference()] {
hosts = append(hosts, host)
}
}
return hosts, nil
}
func (d Datastore) Stat(ctx context.Context, file string) (types.BaseFileInfo, error) {
b, err := d.Browser(ctx)
if err != nil {
return nil, err
}
spec := types.HostDatastoreBrowserSearchSpec{
Details: &types.FileQueryFlags{
FileType: true,
FileSize: true,
Modification: true,
FileOwner: types.NewBool(true),
},
MatchPattern: []string{path.Base(file)},
}
dsPath := d.Path(path.Dir(file))
task, err := b.SearchDatastore(ctx, dsPath, &spec)
if err != nil {
return nil, err
}
info, err := task.WaitForResult(ctx, nil)
if err != nil {
if info == nil || info.Error != nil {
_, ok := info.Error.Fault.(*types.FileNotFound)
if ok {
// FileNotFound means the base path doesn't exist.
return nil, DatastoreNoSuchDirectoryError{"stat", dsPath}
}
}
return nil, err
}
res := info.Result.(types.HostDatastoreBrowserSearchResults)
if len(res.File) == 0 {
// File doesn't exist
return nil, DatastoreNoSuchFileError{"stat", d.Path(file)}
}
return res.File[0], nil
}
// Type returns the type of file system volume.
func (d Datastore) Type(ctx context.Context) (types.HostFileSystemVolumeFileSystemType, error) {
var mds mo.Datastore
if err := d.Properties(ctx, d.Reference(), []string{"summary.type"}, &mds); err != nil {
return types.HostFileSystemVolumeFileSystemType(""), err
}
return types.HostFileSystemVolumeFileSystemType(mds.Summary.Type), nil
}
| aleksandra-malinowska/autoscaler | vertical-pod-autoscaler/vendor/github.com/vmware/govmomi/object/datastore.go | GO | apache-2.0 | 11,224 |
ActionController::Routing::Routes.draw do |map|
map.login "login", :controller => "user_sessions", :action => "new"
map.logout "logout", :controller => "user_sessions", :action => "destroy"
map.resources :user_sessions
map.resources :users
map.resources :articles
map.resources :comments
map.root :articles
end
| avinashsivaraman/railscasts-episodes | episode-170/blog/config/routes.rb | Ruby | mit | 328 |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Xunit;
namespace System.Globalization.Tests
{
public class GlobalizationExtensionsTests
{
[Fact]
public static void CompareInfoThrows()
{
Assert.Throws<ArgumentNullException>("compareInfo", () => { CompareInfo info = null; info.GetStringComparer(CompareOptions.None); });
Assert.Throws<ArgumentException>("options", () => new CultureInfo("tr-TR").CompareInfo.GetStringComparer((CompareOptions)0xFFFF));
Assert.Throws<ArgumentException>("options", () => new CultureInfo("tr-TR").CompareInfo.GetStringComparer(CompareOptions.Ordinal | CompareOptions.IgnoreCase));
Assert.Throws<ArgumentException>("options", () => new CultureInfo("tr-TR").CompareInfo.GetStringComparer(CompareOptions.OrdinalIgnoreCase | CompareOptions.IgnoreCase));
Assert.Throws<ArgumentNullException>("obj", () => new CultureInfo("tr-TR").CompareInfo.GetStringComparer(CompareOptions.None).GetHashCode(null));
}
[Fact]
[ActiveIssue(810, PlatformID.AnyUnix)]
public static void CompareInfoBasicTests()
{
string one = "A test string";
string aCopyOfOne = one;
StringComparer comp = new CultureInfo("fr-FR").CompareInfo.GetStringComparer(CompareOptions.IgnoreCase);
Assert.Equal(0, comp.Compare(one, aCopyOfOne));
Assert.True(comp.Equals(one, aCopyOfOne));
Assert.Equal(-1, comp.Compare(null, one));
Assert.Equal(0, comp.Compare(null, null));
Assert.Equal(1, comp.Compare(one, null));
Assert.False(comp.Equals(null, one));
Assert.True(comp.Equals(null, null));
Assert.False(comp.Equals(one, null));
Assert.Equal(comp.GetHashCode("abc"), comp.GetHashCode("ABC"));
}
[Theory]
[ActiveIssue(810, PlatformID.AnyUnix)]
[InlineData("abc", "def", -1, "fr-FR", CompareOptions.IgnoreCase)]
[InlineData("abc", "ABC", 0, "fr-FR", CompareOptions.IgnoreCase)]
[InlineData("def", "ABC", 1, "fr-FR", CompareOptions.IgnoreCase)]
[InlineData("abc", "ABC", 32, "en-US", CompareOptions.Ordinal)] // this test generates a 32 for some reason
[InlineData("abc", "ABC", 0, "en-US", CompareOptions.OrdinalIgnoreCase)]
[InlineData("Cot\u00E9", "cot\u00E9", 0, "fr-FR", CompareOptions.IgnoreCase)]
[InlineData("cot\u00E9", "c\u00F4te", 1, "fr-FR", CompareOptions.None)]
public static void CompareVarying(string one, string two, int compareValue, string culture, CompareOptions compareOptions)
{
StringComparer comp = new CultureInfo(culture).CompareInfo.GetStringComparer(compareOptions);
Assert.Equal(compareValue, comp.Compare(one, two));
if (compareValue == 0)
{
Assert.True(comp.Equals(one, two));
}
else
{
Assert.False(comp.Equals(one, two));
}
}
[Fact]
[ActiveIssue(810, PlatformID.AnyUnix)]
public static void CompareInfoIdentityTests()
{
StringComparer us = new CultureInfo("en-US").CompareInfo.GetStringComparer(CompareOptions.IgnoreCase);
StringComparer us2 = new CultureInfo("en-US").CompareInfo.GetStringComparer(CompareOptions.IgnoreCase);
StringComparer usNoSym = new CultureInfo("en-US").CompareInfo.GetStringComparer(CompareOptions.IgnoreSymbols);
StringComparer fr = new CultureInfo("fr-FR").CompareInfo.GetStringComparer(CompareOptions.IgnoreCase);
StringComparer frOrdinal = new CultureInfo("fr-FR").CompareInfo.GetStringComparer(CompareOptions.Ordinal);
Assert.True(us.Equals(us2));
Assert.False(us.Equals(usNoSym));
Assert.False(us.Equals(fr));
Assert.False(us.Equals(frOrdinal));
Assert.Equal(us.GetHashCode(), us2.GetHashCode());
Assert.NotEqual(us.GetHashCode(), usNoSym.GetHashCode());
Assert.NotEqual(us.GetHashCode(), fr.GetHashCode());
Assert.NotEqual(frOrdinal.GetHashCode(), fr.GetHashCode());
}
}
}
| Yanjing123/corefx | src/System.Globalization.Extensions/tests/GlobalizationExtensionsTests.cs | C# | mit | 4,354 |
/***********************************************************************
filename: CEGUIGeometryBuffer.cpp
created: Wed Jan 13 2010
author: Paul D Turner <paul@cegui.org.uk>
*************************************************************************/
/***************************************************************************
* Copyright (C) 2004 - 2010 Paul D Turner & The CEGUI Development Team
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
***************************************************************************/
#include "CEGUIGeometryBuffer.h"
// Start of CEGUI namespace section
namespace CEGUI
{
//---------------------------------------------------------------------------//
GeometryBuffer::GeometryBuffer() :
d_blendMode(BM_NORMAL)
{
}
//---------------------------------------------------------------------------//
GeometryBuffer::~GeometryBuffer()
{
}
//---------------------------------------------------------------------------//
void GeometryBuffer::setBlendMode(const BlendMode mode)
{
d_blendMode = mode;
}
//---------------------------------------------------------------------------//
BlendMode GeometryBuffer::getBlendMode() const
{
return d_blendMode;
}
//---------------------------------------------------------------------------//
} // End of CEGUI namespace section
| gorkinovich/DefendersOfMankind | dependencies/CEGUI/cegui/src/CEGUIGeometryBuffer.cpp | C++ | gpl-3.0 | 2,382 |
/***********************************************************************
filename: CEGUIItemListBase.cpp
created: 31/3/2005
author: Tomas Lindquist Olsen (based on original Listbox code by Paul D Turner)
purpose: Implementation of ItemListBase widget base class
*************************************************************************/
/***************************************************************************
* Copyright (C) 2004 - 2006 Paul D Turner & The CEGUI Development Team
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
***************************************************************************/
#include "CEGUIExceptions.h"
#include "CEGUIWindowManager.h"
#include "elements/CEGUIItemListBase.h"
#include "elements/CEGUIItemEntry.h"
#include <algorithm>
// Start of CEGUI namespace section
namespace CEGUI
{
const String ItemListBase::EventNamespace("ItemListBase");
/*************************************************************************
ItemListBaseWindowRenderer
*************************************************************************/
ItemListBaseWindowRenderer::ItemListBaseWindowRenderer(const String& name) :
WindowRenderer(name, ItemListBase::EventNamespace)
{
}
/************************************************************************/
/*************************************************************************
used for < comparisons between ItemEntry pointers
*************************************************************************/
static bool ItemEntry_less(const ItemEntry* a, const ItemEntry* b)
{
return a->getText() < b->getText();
}
/*************************************************************************
used for > comparisons between ItemEntry pointers
*************************************************************************/
static bool ItemEntry_greater(const ItemEntry* a, const ItemEntry* b)
{
return (a->getText() > b->getText());
}
/************************************************************************/
/*************************************************************************
Definition of Properties for this class
*************************************************************************/
ItemListBaseProperties::AutoResizeEnabled ItemListBase::d_autoResizeEnabledProperty;
ItemListBaseProperties::SortEnabled ItemListBase::d_sortEnabledProperty;
ItemListBaseProperties::SortMode ItemListBase::d_sortModeProperty;
/*************************************************************************
Constants
*************************************************************************/
// event names
const String ItemListBase::EventListContentsChanged("ListItemsChanged");
const String ItemListBase::EventSortEnabledChanged("SortEnabledChanged");
const String ItemListBase::EventSortModeChanged("SortModeChanged");
/*************************************************************************
Constructor for ItemListBase base class.
*************************************************************************/
ItemListBase::ItemListBase(const String& type, const String& name)
: Window(type, name),
d_autoResize(false),
d_sortEnabled(false),
d_sortMode(Ascending),
d_sortCallback(0),
d_resort(false)
{
// by default we dont have a content pane, but to make sure things still work
// we "emulate" it by setting it to this
d_pane = this;
// add properties for ItemListBase class
addItemListBaseProperties();
}
/*************************************************************************
Destructor for ItemListBase base class.
*************************************************************************/
ItemListBase::~ItemListBase(void)
{
//resetList_impl();
}
/*************************************************************************
Initialise components
*************************************************************************/
void ItemListBase::initialiseComponents(void)
{
// this pane may be ourselves, and in fact is by default...
d_pane->subscribeEvent(Window::EventChildRemoved,
Event::Subscriber(&ItemListBase::handle_PaneChildRemoved, this));
}
/*************************************************************************
Return the item at index position 'index'.
*************************************************************************/
ItemEntry* ItemListBase::getItemFromIndex(size_t index) const
{
if (index < d_listItems.size())
{
return d_listItems[index];
}
else
{
CEGUI_THROW(InvalidRequestException("ItemListBase::getItemFromIndex - the specified index is out of range for this ItemListBase."));
}
}
/*************************************************************************
Return the index of ItemEntry \a item
*************************************************************************/
size_t ItemListBase::getItemIndex(const ItemEntry* item) const
{
ItemEntryList::const_iterator pos = std::find(d_listItems.begin(), d_listItems.end(), item);
if (pos != d_listItems.end())
{
return std::distance(d_listItems.begin(), pos);
}
else
{
CEGUI_THROW(InvalidRequestException("ItemListBase::getItemIndex - the specified ItemEntry is not attached to this ItemListBase."));
}
}
/*************************************************************************
Search the list for an item with the specified text
*************************************************************************/
ItemEntry* ItemListBase::findItemWithText(const String& text, const ItemEntry* start_item)
{
// if start_item is NULL begin search at begining, else start at item after start_item
size_t index = (!start_item) ? 0 : (getItemIndex(start_item) + 1);
while (index < d_listItems.size())
{
// return pointer to this item if it's text matches
if (d_listItems[index]->getText() == text)
{
return d_listItems[index];
}
// no matching text, advance to next item
else
{
index++;
}
}
// no items matched.
return 0;
}
/*************************************************************************
Return whether the specified ItemEntry is in the List
*************************************************************************/
bool ItemListBase::isItemInList(const ItemEntry* item) const
{
//return std::find(d_listItems.begin(), d_listItems.end(), item) != d_listItems.end();
return (item->d_ownerList == this);
}
/*************************************************************************
Remove all items from the list.
*************************************************************************/
void ItemListBase::resetList(void)
{
if (resetList_impl())
{
handleUpdatedItemData();
}
}
/*************************************************************************
Add the given ItemEntry to the list.
*************************************************************************/
void ItemListBase::addItem(ItemEntry* item)
{
// make sure the item is valid and that we dont already have it in our list
if (item && item->d_ownerList != this)
{
// if sorting is enabled, re-sort the list
if (d_sortEnabled)
{
d_listItems.insert(
std::upper_bound(d_listItems.begin(), d_listItems.end(), item, getRealSortCallback()),
item);
}
// just stick it on the end.
else
{
d_listItems.push_back(item);
}
// make sure it gets added properly
item->d_ownerList = this;
addChildWindow(item);
handleUpdatedItemData();
}
}
/*************************************************************************
Insert an item into the list box after a specified item already in
the list.
*************************************************************************/
void ItemListBase::insertItem(ItemEntry* item, const ItemEntry* position)
{
if (d_sortEnabled)
{
addItem(item);
}
else if (item && item->d_ownerList != this)
{
// if position is NULL begin insert at begining, else insert after item 'position'
ItemEntryList::iterator ins_pos;
if (!position)
{
ins_pos = d_listItems.begin();
}
else
{
ins_pos = std::find(d_listItems.begin(), d_listItems.end(), position);
// throw if item 'position' is not in the list
if (ins_pos == d_listItems.end())
{
CEGUI_THROW(InvalidRequestException("ItemListBase::insertItem - the specified ItemEntry for parameter 'position' is not attached to this ItemListBase."));
}
}
d_listItems.insert(ins_pos, item);
item->d_ownerList = this;
addChildWindow(item);
handleUpdatedItemData();
}
}
/*************************************************************************
Removes the given item from the list box.
*************************************************************************/
void ItemListBase::removeItem(ItemEntry* item)
{
if (item && item->d_ownerList == this)
{
d_pane->removeChildWindow(item);
if (item->isDestroyedByParent())
{
WindowManager::getSingleton().destroyWindow(item);
}
}
}
/*************************************************************************
Set wheter or not this ItemListBase widget should automatically
resize to fit its content.
*************************************************************************/
void ItemListBase::setAutoResizeEnabled(bool setting)
{
bool old = d_autoResize;
d_autoResize = setting;
// if not already enabled, trigger a resize - only if not currently initialising
if ( d_autoResize && !old && !d_initialising)
{
sizeToContent();
}
}
/*************************************************************************
Causes the list box to update it's internal state after changes have
been made to one or more attached ItemEntry objects.
*************************************************************************/
void ItemListBase::handleUpdatedItemData(bool resort)
{
if (!d_destructionStarted)
{
d_resort |= resort;
WindowEventArgs args(this);
onListContentsChanged(args);
}
}
/*************************************************************************
Handler called internally when the list contents are changed
*************************************************************************/
void ItemListBase::onListContentsChanged(WindowEventArgs& e)
{
// if we are not currently initialising we might have things todo
if (!d_initialising)
{
invalidate();
// if auto resize is enabled - do it
if (d_autoResize)
sizeToContent();
// resort list if requested and enabled
if (d_resort && d_sortEnabled)
sortList(false);
d_resort = false;
// redo the item layout and fire our event
layoutItemWidgets();
fireEvent(EventListContentsChanged, e, EventNamespace);
}
}
//----------------------------------------------------------------------------//
void ItemListBase::onParentSized(WindowEventArgs& e)
{
Window::onParentSized(e);
if (d_autoResize)
sizeToContent();
}
//----------------------------------------------------------------------------//
/************************************************************************
Handler for when a child is removed
*************************************************************************/
/*void ItemListBase::onChildRemoved(WindowEventArgs& e)
{
// if destruction has already begun, we don't need to do anything.
// everything has to go anyway
// make sure it is removed from the itemlist if we have an ItemEntry
if (!d_destructionStarted && e.window->testClassName("ItemEntry"))
{
ItemEntryList::iterator pos = std::find(d_listItems.begin(), d_listItems.end(), e.window);
// if item is in the list
if (pos != d_listItems.end())
{
// remove item
(*pos)->d_ownerList = 0;
d_listItems.erase(pos);
// trigger list update
handleUpdatedItemData();
}
}
// base class handling
Window::onChildRemoved(e);
}*/
/************************************************************************
Handler for when the window initiates destruction
*************************************************************************/
/*void ItemListBase::onDestructionStarted(WindowEventArgs& e)
{
// base class handling
Window::onDestructionStarted(e);
// remove everything from the list properly
resetList_impl();
}*/
/*************************************************************************
Remove all items from the list.
*************************************************************************/
bool ItemListBase::resetList_impl(void)
{
// just return false if the list is already empty
if (d_listItems.empty())
{
return false;
}
// we have items to be removed and possible deleted
else
{
// delete any items we are supposed to
while (!d_listItems.empty())
{
ItemEntry* item = d_listItems[0];
d_pane->removeChildWindow(item);
if (item->isDestroyedByParent())
{
WindowManager::getSingleton().destroyWindow(item);
}
}
// list is cleared by the removeChild calls
return true;
}
}
/*************************************************************************
Add ItemListBase specific properties
*************************************************************************/
void ItemListBase::addItemListBaseProperties(void)
{
addProperty(&d_autoResizeEnabledProperty);
addProperty(&d_sortEnabledProperty);
addProperty(&d_sortModeProperty);
}
/*************************************************************************
Internal version of adding a child window.
*************************************************************************/
void ItemListBase::addChild_impl(Window* wnd)
{
// if this is an ItemEntry we add it like one, but only if it is not already in the list!
if (wnd->testClassName("ItemEntry"))
{
// add to the pane if we have one
if (d_pane != this)
{
d_pane->addChildWindow(wnd);
}
// add item directly to us
else
{
Window::addChild_impl(wnd);
}
ItemEntry* item = static_cast<ItemEntry*>(wnd);
if (item->d_ownerList != this)
{
// perform normal addItem
// if sorting is enabled, re-sort the list
if (d_sortEnabled)
{
d_listItems.insert(
std::upper_bound(d_listItems.begin(), d_listItems.end(), item, getRealSortCallback()),
item);
}
// just stick it on the end.
else
{
d_listItems.push_back(item);
}
item->d_ownerList = this;
handleUpdatedItemData();
}
}
// otherwise it's base class processing
else
{
Window::addChild_impl(wnd);
}
}
/************************************************************************
Initialisation done
*************************************************************************/
void ItemListBase::endInitialisation(void)
{
Window::endInitialisation();
handleUpdatedItemData(true);
}
/************************************************************************
Perform child window layout
************************************************************************/
void ItemListBase::performChildWindowLayout(void)
{
Window::performChildWindowLayout();
// if we are not currently initialising
if (!d_initialising)
{
// Redo the item layout.
// We don't just call handleUpdateItemData, as that could trigger a resize,
// which is not what is being requested.
// It would also cause infinite recursion... so lets just avoid that :)
layoutItemWidgets();
}
}
/************************************************************************
Resize to fit content
************************************************************************/
void ItemListBase::sizeToContent_impl(void)
{
Rect renderArea(getItemRenderArea());
Rect wndArea(getArea().asAbsolute(getParentPixelSize()));
// get size of content
Size sz(getContentSize());
// calculate the full size with the frame accounted for and resize the window to this
sz.d_width += wndArea.getWidth() - renderArea.getWidth();
sz.d_height += wndArea.getHeight() - renderArea.getHeight();
setSize(UVector2(cegui_absdim(sz.d_width), cegui_absdim(sz.d_height)));
}
/************************************************************************
Get item render area
************************************************************************/
Rect ItemListBase::getItemRenderArea(void) const
{
if (d_windowRenderer != 0)
{
ItemListBaseWindowRenderer* wr = (ItemListBaseWindowRenderer*)d_windowRenderer;
return wr->getItemRenderArea();
}
else
{
//return getItemRenderArea_impl();
CEGUI_THROW(InvalidRequestException("ItemListBase::getItemRenderArea - This function must be implemented by the window renderer module"));
}
}
/************************************************************************
Handler to manage items being removed from the content pane
************************************************************************/
bool ItemListBase::handle_PaneChildRemoved(const EventArgs& e)
{
Window* w = static_cast<const WindowEventArgs&>(e).window;
// make sure it is removed from the itemlist if we have an ItemEntry
if (w->testClassName("ItemEntry"))
{
ItemEntryList::iterator pos = std::find(d_listItems.begin(), d_listItems.end(), w);
// if item is in the list
if (pos != d_listItems.end())
{
// make sure the item is no longer related to us
(*pos)->d_ownerList = 0;
// remove item
d_listItems.erase(pos);
// trigger list update
handleUpdatedItemData();
}
}
return false;
}
/************************************************************************
Set sorting enabled state
************************************************************************/
void ItemListBase::setSortEnabled(bool setting)
{
if (d_sortEnabled != setting)
{
d_sortEnabled = setting;
if (d_sortEnabled && !d_initialising)
{
sortList();
}
WindowEventArgs e(this);
onSortEnabledChanged(e);
}
}
/************************************************************************
Set the user sorting callback
************************************************************************/
void ItemListBase::setSortCallback(SortCallback cb)
{
if (d_sortCallback != cb)
{
d_sortCallback = cb;
if (d_sortEnabled && !d_initialising)
{
sortList();
}
handleUpdatedItemData(true);
}
}
/************************************************************************
Handle sort enabled changed
************************************************************************/
void ItemListBase::onSortEnabledChanged(WindowEventArgs& e)
{
fireEvent(EventSortEnabledChanged, e);
}
/************************************************************************
Handle sort mode changed
************************************************************************/
void ItemListBase::onSortModeChanged(WindowEventArgs& e)
{
fireEvent(EventSortModeChanged, e);
}
/************************************************************************
Sort list
************************************************************************/
void ItemListBase::sortList(bool relayout)
{
std::sort(d_listItems.begin(), d_listItems.end(), getRealSortCallback());
if (relayout)
{
layoutItemWidgets();
}
}
/************************************************************************
Get the real function pointer to use for the sorting operation
************************************************************************/
ItemListBase::SortCallback ItemListBase::getRealSortCallback() const
{
switch (d_sortMode)
{
case Ascending:
return &ItemEntry_less;
case Descending:
return &ItemEntry_greater;
case UserSort:
return (d_sortCallback!=0) ? d_sortCallback : &ItemEntry_less;
// we default to ascending sorting
default:
return &ItemEntry_less;
}
}
/************************************************************************
Set sort mode
************************************************************************/
void ItemListBase::setSortMode(SortMode mode)
{
if (d_sortMode != mode)
{
d_sortMode = mode;
if (d_sortEnabled && !d_initialising)
sortList();
WindowEventArgs e(this);
onSortModeChanged(e);
}
}
} // End of CEGUI namespace section
| gorkinovich/DefendersOfMankind | dependencies/CEGUI/cegui/src/elements/CEGUIItemListBase.cpp | C++ | gpl-3.0 | 21,769 |
<?php
/**
* PHP script to be used as 404 handler to create and stream out a
* not yet existing image thumbnail.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* http://www.gnu.org/copyleft/gpl.html
*
* @file
* @ingroup Media
*/
define( 'THUMB_HANDLER', true );
# Execute thumb.php, having set THUMB_HANDLER so that
# it knows to extract params from a thumbnail file URL.
require __DIR__ . '/thumb.php';
| BRL-CAD/web | wiki/thumb_handler.php | PHP | bsd-2-clause | 1,085 |
AliAnalysisTaskSEMultDQdev *AddTaskMultDQdev(const Bool_t bOutputList = kTRUE)
{
AliAnalysisManager *mgr = AliAnalysisManager::GetAnalysisManager();
if (!mgr) {
::Error("AddTaskMuonDistributions", "No analysis manager to connect to.");
return nullptr;
}
//=============================================================================
auto task(new AliAnalysisTaskSEMultDQdev("AliAnalysisTaskSEMultDQdev"));
mgr->AddTask(task);
mgr->ConnectInput( task, 0, mgr->GetCommonInputContainer());
mgr->ConnectOutput(task, 0, mgr->GetCommonOutputContainer());
if (bOutputList) mgr->ConnectOutput(task, 1, mgr->CreateContainer("listEvts", TList::Class(),
AliAnalysisManager::kOutputContainer,
AliAnalysisManager::GetCommonFileName()));
//=============================================================================
return task;
}
| AMechler/AliPhysics | PWG/muon/AddTaskMultDQdev.C | C++ | bsd-3-clause | 986 |
/* LzFind.c -- Match finder for LZ algorithms
2008-10-04 : Igor Pavlov : Public domain */
#include "crn_core.h"
#include <string.h>
#include "lzma_LzFind.h"
#include "lzma_LzHash.h"
namespace crnlib {
#define kEmptyHashValue 0
#define kMaxValForNormalize ((UInt32)0xFFFFFFFF)
#define kNormalizeStepMin (1 << 10) /* it must be power of 2 */
#define kNormalizeMask (~(kNormalizeStepMin - 1))
#define kMaxHistorySize ((UInt32)3 << 30)
#define kStartMaxLen 3
static void LzInWindow_Free(CMatchFinder *p, ISzAlloc *alloc)
{
if (!p->directInput)
{
alloc->Free(alloc, p->bufferBase);
p->bufferBase = 0;
}
}
/* keepSizeBefore + keepSizeAfter + keepSizeReserv must be < 4G) */
static int LzInWindow_Create(CMatchFinder *p, UInt32 keepSizeReserv, ISzAlloc *alloc)
{
UInt32 blockSize = p->keepSizeBefore + p->keepSizeAfter + keepSizeReserv;
if (p->directInput)
{
p->blockSize = blockSize;
return 1;
}
if (p->bufferBase == 0 || p->blockSize != blockSize)
{
LzInWindow_Free(p, alloc);
p->blockSize = blockSize;
p->bufferBase = (Byte *)alloc->Alloc(alloc, (size_t)blockSize);
}
return (p->bufferBase != 0);
}
Byte *MatchFinder_GetPointerToCurrentPos(CMatchFinder *p) { return p->buffer; }
Byte MatchFinder_GetIndexByte(CMatchFinder *p, Int32 index) { return p->buffer[index]; }
UInt32 MatchFinder_GetNumAvailableBytes(CMatchFinder *p) { return p->streamPos - p->pos; }
void MatchFinder_ReduceOffsets(CMatchFinder *p, UInt32 subValue)
{
p->posLimit -= subValue;
p->pos -= subValue;
p->streamPos -= subValue;
}
static void MatchFinder_ReadBlock(CMatchFinder *p)
{
if (p->streamEndWasReached || p->result != SZ_OK)
return;
for (;;)
{
Byte *dest = p->buffer + (p->streamPos - p->pos);
size_t size = (p->bufferBase + p->blockSize - dest);
if (size == 0)
return;
p->result = p->stream->Read(p->stream, dest, &size);
if (p->result != SZ_OK)
return;
if (size == 0)
{
p->streamEndWasReached = 1;
return;
}
p->streamPos += (UInt32)size;
if (p->streamPos - p->pos > p->keepSizeAfter)
return;
}
}
void MatchFinder_MoveBlock(CMatchFinder *p)
{
memmove(p->bufferBase,
p->buffer - p->keepSizeBefore,
(size_t)(p->streamPos - p->pos + p->keepSizeBefore));
p->buffer = p->bufferBase + p->keepSizeBefore;
}
int MatchFinder_NeedMove(CMatchFinder *p)
{
/* if (p->streamEndWasReached) return 0; */
return ((size_t)(p->bufferBase + p->blockSize - p->buffer) <= p->keepSizeAfter);
}
void MatchFinder_ReadIfRequired(CMatchFinder *p)
{
if (p->streamEndWasReached)
return;
if (p->keepSizeAfter >= p->streamPos - p->pos)
MatchFinder_ReadBlock(p);
}
static void MatchFinder_CheckAndMoveAndRead(CMatchFinder *p)
{
if (MatchFinder_NeedMove(p))
MatchFinder_MoveBlock(p);
MatchFinder_ReadBlock(p);
}
static void MatchFinder_SetDefaultSettings(CMatchFinder *p)
{
p->cutValue = 32;
p->btMode = 1;
p->numHashBytes = 4;
/* p->skipModeBits = 0; */
p->directInput = 0;
p->bigHash = 0;
}
#define kCrcPoly 0xEDB88320
void MatchFinder_Construct(CMatchFinder *p)
{
UInt32 i;
p->bufferBase = 0;
p->directInput = 0;
p->hash = 0;
MatchFinder_SetDefaultSettings(p);
for (i = 0; i < 256; i++)
{
UInt32 r = i;
int j;
for (j = 0; j < 8; j++)
r = (r >> 1) ^ (kCrcPoly & ~((r & 1) - 1));
p->crc[i] = r;
}
}
static void MatchFinder_FreeThisClassMemory(CMatchFinder *p, ISzAlloc *alloc)
{
alloc->Free(alloc, p->hash);
p->hash = 0;
}
void MatchFinder_Free(CMatchFinder *p, ISzAlloc *alloc)
{
MatchFinder_FreeThisClassMemory(p, alloc);
LzInWindow_Free(p, alloc);
}
static CLzRef* AllocRefs(UInt32 num, ISzAlloc *alloc)
{
size_t sizeInBytes = (size_t)num * sizeof(CLzRef);
if (sizeInBytes / sizeof(CLzRef) != num)
return 0;
return (CLzRef *)alloc->Alloc(alloc, sizeInBytes);
}
int MatchFinder_Create(CMatchFinder *p, UInt32 historySize,
UInt32 keepAddBufferBefore, UInt32 matchMaxLen, UInt32 keepAddBufferAfter,
ISzAlloc *alloc)
{
UInt32 sizeReserv;
if (historySize > kMaxHistorySize)
{
MatchFinder_Free(p, alloc);
return 0;
}
sizeReserv = historySize >> 1;
if (historySize > ((UInt32)2 << 30))
sizeReserv = historySize >> 2;
sizeReserv += (keepAddBufferBefore + matchMaxLen + keepAddBufferAfter) / 2 + (1 << 19);
p->keepSizeBefore = historySize + keepAddBufferBefore + 1;
p->keepSizeAfter = matchMaxLen + keepAddBufferAfter;
/* we need one additional byte, since we use MoveBlock after pos++ and before dictionary using */
if (LzInWindow_Create(p, sizeReserv, alloc))
{
UInt32 newCyclicBufferSize = (historySize /* >> p->skipModeBits */) + 1;
UInt32 hs;
p->matchMaxLen = matchMaxLen;
{
p->fixedHashSize = 0;
if (p->numHashBytes == 2)
hs = (1 << 16) - 1;
else
{
hs = historySize - 1;
hs |= (hs >> 1);
hs |= (hs >> 2);
hs |= (hs >> 4);
hs |= (hs >> 8);
hs >>= 1;
/* hs >>= p->skipModeBits; */
hs |= 0xFFFF; /* don't change it! It's required for Deflate */
if (hs > (1 << 24))
{
if (p->numHashBytes == 3)
hs = (1 << 24) - 1;
else
hs >>= 1;
}
}
p->hashMask = hs;
hs++;
if (p->numHashBytes > 2) p->fixedHashSize += kHash2Size;
if (p->numHashBytes > 3) p->fixedHashSize += kHash3Size;
if (p->numHashBytes > 4) p->fixedHashSize += kHash4Size;
hs += p->fixedHashSize;
}
{
UInt32 prevSize = p->hashSizeSum + p->numSons;
UInt32 newSize;
p->historySize = historySize;
p->hashSizeSum = hs;
p->cyclicBufferSize = newCyclicBufferSize;
p->numSons = (p->btMode ? newCyclicBufferSize * 2 : newCyclicBufferSize);
newSize = p->hashSizeSum + p->numSons;
if (p->hash != 0 && prevSize == newSize)
return 1;
MatchFinder_FreeThisClassMemory(p, alloc);
p->hash = AllocRefs(newSize, alloc);
if (p->hash != 0)
{
p->son = p->hash + p->hashSizeSum;
return 1;
}
}
}
MatchFinder_Free(p, alloc);
return 0;
}
static void MatchFinder_SetLimits(CMatchFinder *p)
{
UInt32 limit = kMaxValForNormalize - p->pos;
UInt32 limit2 = p->cyclicBufferSize - p->cyclicBufferPos;
if (limit2 < limit)
limit = limit2;
limit2 = p->streamPos - p->pos;
if (limit2 <= p->keepSizeAfter)
{
if (limit2 > 0)
limit2 = 1;
}
else
limit2 -= p->keepSizeAfter;
if (limit2 < limit)
limit = limit2;
{
UInt32 lenLimit = p->streamPos - p->pos;
if (lenLimit > p->matchMaxLen)
lenLimit = p->matchMaxLen;
p->lenLimit = lenLimit;
}
p->posLimit = p->pos + limit;
}
void MatchFinder_Init(CMatchFinder *p)
{
UInt32 i;
for (i = 0; i < p->hashSizeSum; i++)
p->hash[i] = kEmptyHashValue;
p->cyclicBufferPos = 0;
p->buffer = p->bufferBase;
p->pos = p->streamPos = p->cyclicBufferSize;
p->result = SZ_OK;
p->streamEndWasReached = 0;
MatchFinder_ReadBlock(p);
MatchFinder_SetLimits(p);
}
static UInt32 MatchFinder_GetSubValue(CMatchFinder *p)
{
return (p->pos - p->historySize - 1) & kNormalizeMask;
}
void MatchFinder_Normalize3(UInt32 subValue, CLzRef *items, UInt32 numItems)
{
UInt32 i;
for (i = 0; i < numItems; i++)
{
UInt32 value = items[i];
if (value <= subValue)
value = kEmptyHashValue;
else
value -= subValue;
items[i] = value;
}
}
static void MatchFinder_Normalize(CMatchFinder *p)
{
UInt32 subValue = MatchFinder_GetSubValue(p);
MatchFinder_Normalize3(subValue, p->hash, p->hashSizeSum + p->numSons);
MatchFinder_ReduceOffsets(p, subValue);
}
static void MatchFinder_CheckLimits(CMatchFinder *p)
{
if (p->pos == kMaxValForNormalize)
MatchFinder_Normalize(p);
if (!p->streamEndWasReached && p->keepSizeAfter == p->streamPos - p->pos)
MatchFinder_CheckAndMoveAndRead(p);
if (p->cyclicBufferPos == p->cyclicBufferSize)
p->cyclicBufferPos = 0;
MatchFinder_SetLimits(p);
}
static UInt32 * Hc_GetMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
UInt32 _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue,
UInt32 *distances, UInt32 maxLen)
{
son[_cyclicBufferPos] = curMatch;
for (;;)
{
UInt32 delta = pos - curMatch;
if (cutValue-- == 0 || delta >= _cyclicBufferSize)
return distances;
{
const Byte *pb = cur - delta;
curMatch = son[_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)];
if (pb[maxLen] == cur[maxLen] && *pb == *cur)
{
UInt32 len = 0;
while (++len != lenLimit)
if (pb[len] != cur[len])
break;
if (maxLen < len)
{
*distances++ = maxLen = len;
*distances++ = delta - 1;
if (len == lenLimit)
return distances;
}
}
}
}
}
UInt32 * GetMatchesSpec1(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
UInt32 _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue,
UInt32 *distances, UInt32 maxLen)
{
CLzRef *ptr0 = son + (_cyclicBufferPos << 1) + 1;
CLzRef *ptr1 = son + (_cyclicBufferPos << 1);
UInt32 len0 = 0, len1 = 0;
for (;;)
{
UInt32 delta = pos - curMatch;
if (cutValue-- == 0 || delta >= _cyclicBufferSize)
{
*ptr0 = *ptr1 = kEmptyHashValue;
return distances;
}
{
CLzRef *pair = son + ((_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1);
const Byte *pb = cur - delta;
UInt32 len = (len0 < len1 ? len0 : len1);
if (pb[len] == cur[len])
{
if (++len != lenLimit && pb[len] == cur[len])
while (++len != lenLimit)
if (pb[len] != cur[len])
break;
if (maxLen < len)
{
*distances++ = maxLen = len;
*distances++ = delta - 1;
if (len == lenLimit)
{
*ptr1 = pair[0];
*ptr0 = pair[1];
return distances;
}
}
}
if (pb[len] < cur[len])
{
*ptr1 = curMatch;
ptr1 = pair + 1;
curMatch = *ptr1;
len1 = len;
}
else
{
*ptr0 = curMatch;
ptr0 = pair;
curMatch = *ptr0;
len0 = len;
}
}
}
}
static void SkipMatchesSpec(UInt32 lenLimit, UInt32 curMatch, UInt32 pos, const Byte *cur, CLzRef *son,
UInt32 _cyclicBufferPos, UInt32 _cyclicBufferSize, UInt32 cutValue)
{
CLzRef *ptr0 = son + (_cyclicBufferPos << 1) + 1;
CLzRef *ptr1 = son + (_cyclicBufferPos << 1);
UInt32 len0 = 0, len1 = 0;
for (;;)
{
UInt32 delta = pos - curMatch;
if (cutValue-- == 0 || delta >= _cyclicBufferSize)
{
*ptr0 = *ptr1 = kEmptyHashValue;
return;
}
{
CLzRef *pair = son + ((_cyclicBufferPos - delta + ((delta > _cyclicBufferPos) ? _cyclicBufferSize : 0)) << 1);
const Byte *pb = cur - delta;
UInt32 len = (len0 < len1 ? len0 : len1);
if (pb[len] == cur[len])
{
while (++len != lenLimit)
if (pb[len] != cur[len])
break;
{
if (len == lenLimit)
{
*ptr1 = pair[0];
*ptr0 = pair[1];
return;
}
}
}
if (pb[len] < cur[len])
{
*ptr1 = curMatch;
ptr1 = pair + 1;
curMatch = *ptr1;
len1 = len;
}
else
{
*ptr0 = curMatch;
ptr0 = pair;
curMatch = *ptr0;
len0 = len;
}
}
}
}
#define MOVE_POS \
++p->cyclicBufferPos; \
p->buffer++; \
if (++p->pos == p->posLimit) MatchFinder_CheckLimits(p);
#define MOVE_POS_RET MOVE_POS return offset;
static void MatchFinder_MovePos(CMatchFinder *p) { MOVE_POS; }
#define GET_MATCHES_HEADER2(minLen, ret_op) \
UInt32 lenLimit; UInt32 hashValue; const Byte *cur; UInt32 curMatch; \
lenLimit = p->lenLimit; { if (lenLimit < minLen) { MatchFinder_MovePos(p); ret_op; }} \
cur = p->buffer;
#define GET_MATCHES_HEADER(minLen) GET_MATCHES_HEADER2(minLen, return 0)
#define SKIP_HEADER(minLen) GET_MATCHES_HEADER2(minLen, continue)
#define MF_PARAMS(p) p->pos, p->buffer, p->son, p->cyclicBufferPos, p->cyclicBufferSize, p->cutValue
#define GET_MATCHES_FOOTER(offset, maxLen) \
offset = (UInt32)(GetMatchesSpec1(lenLimit, curMatch, MF_PARAMS(p), \
distances + offset, maxLen) - distances); MOVE_POS_RET;
#define SKIP_FOOTER \
SkipMatchesSpec(lenLimit, curMatch, MF_PARAMS(p)); MOVE_POS;
static UInt32 Bt2_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
UInt32 offset;
GET_MATCHES_HEADER(2)
HASH2_CALC;
curMatch = p->hash[hashValue];
p->hash[hashValue] = p->pos;
offset = 0;
GET_MATCHES_FOOTER(offset, 1)
}
UInt32 Bt3Zip_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
UInt32 offset;
GET_MATCHES_HEADER(3)
HASH_ZIP_CALC;
curMatch = p->hash[hashValue];
p->hash[hashValue] = p->pos;
offset = 0;
GET_MATCHES_FOOTER(offset, 2)
}
static UInt32 Bt3_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
UInt32 hash2Value, delta2, maxLen, offset;
GET_MATCHES_HEADER(3)
HASH3_CALC;
delta2 = p->pos - p->hash[hash2Value];
curMatch = p->hash[kFix3HashSize + hashValue];
p->hash[hash2Value] =
p->hash[kFix3HashSize + hashValue] = p->pos;
maxLen = 2;
offset = 0;
if (delta2 < p->cyclicBufferSize && *(cur - delta2) == *cur)
{
for (; maxLen != lenLimit; maxLen++)
if (cur[(ptrdiff_t)maxLen - delta2] != cur[maxLen])
break;
distances[0] = maxLen;
distances[1] = delta2 - 1;
offset = 2;
if (maxLen == lenLimit)
{
SkipMatchesSpec(lenLimit, curMatch, MF_PARAMS(p));
MOVE_POS_RET;
}
}
GET_MATCHES_FOOTER(offset, maxLen)
}
static UInt32 Bt4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
UInt32 hash2Value, hash3Value, delta2, delta3, maxLen, offset;
GET_MATCHES_HEADER(4)
HASH4_CALC;
delta2 = p->pos - p->hash[ hash2Value];
delta3 = p->pos - p->hash[kFix3HashSize + hash3Value];
curMatch = p->hash[kFix4HashSize + hashValue];
p->hash[ hash2Value] =
p->hash[kFix3HashSize + hash3Value] =
p->hash[kFix4HashSize + hashValue] = p->pos;
maxLen = 1;
offset = 0;
if (delta2 < p->cyclicBufferSize && *(cur - delta2) == *cur)
{
distances[0] = maxLen = 2;
distances[1] = delta2 - 1;
offset = 2;
}
if (delta2 != delta3 && delta3 < p->cyclicBufferSize && *(cur - delta3) == *cur)
{
maxLen = 3;
distances[offset + 1] = delta3 - 1;
offset += 2;
delta2 = delta3;
}
if (offset != 0)
{
for (; maxLen != lenLimit; maxLen++)
if (cur[(ptrdiff_t)maxLen - delta2] != cur[maxLen])
break;
distances[offset - 2] = maxLen;
if (maxLen == lenLimit)
{
SkipMatchesSpec(lenLimit, curMatch, MF_PARAMS(p));
MOVE_POS_RET;
}
}
if (maxLen < 3)
maxLen = 3;
GET_MATCHES_FOOTER(offset, maxLen)
}
static UInt32 Hc4_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
UInt32 hash2Value, hash3Value, delta2, delta3, maxLen, offset;
GET_MATCHES_HEADER(4)
HASH4_CALC;
delta2 = p->pos - p->hash[ hash2Value];
delta3 = p->pos - p->hash[kFix3HashSize + hash3Value];
curMatch = p->hash[kFix4HashSize + hashValue];
p->hash[ hash2Value] =
p->hash[kFix3HashSize + hash3Value] =
p->hash[kFix4HashSize + hashValue] = p->pos;
maxLen = 1;
offset = 0;
if (delta2 < p->cyclicBufferSize && *(cur - delta2) == *cur)
{
distances[0] = maxLen = 2;
distances[1] = delta2 - 1;
offset = 2;
}
if (delta2 != delta3 && delta3 < p->cyclicBufferSize && *(cur - delta3) == *cur)
{
maxLen = 3;
distances[offset + 1] = delta3 - 1;
offset += 2;
delta2 = delta3;
}
if (offset != 0)
{
for (; maxLen != lenLimit; maxLen++)
if (cur[(ptrdiff_t)maxLen - delta2] != cur[maxLen])
break;
distances[offset - 2] = maxLen;
if (maxLen == lenLimit)
{
p->son[p->cyclicBufferPos] = curMatch;
MOVE_POS_RET;
}
}
if (maxLen < 3)
maxLen = 3;
offset = (UInt32)(Hc_GetMatchesSpec(lenLimit, curMatch, MF_PARAMS(p),
distances + offset, maxLen) - (distances));
MOVE_POS_RET
}
UInt32 Hc3Zip_MatchFinder_GetMatches(CMatchFinder *p, UInt32 *distances)
{
UInt32 offset;
GET_MATCHES_HEADER(3)
HASH_ZIP_CALC;
curMatch = p->hash[hashValue];
p->hash[hashValue] = p->pos;
offset = (UInt32)(Hc_GetMatchesSpec(lenLimit, curMatch, MF_PARAMS(p),
distances, 2) - (distances));
MOVE_POS_RET
}
static void Bt2_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{
do
{
SKIP_HEADER(2)
HASH2_CALC;
curMatch = p->hash[hashValue];
p->hash[hashValue] = p->pos;
SKIP_FOOTER
}
while (--num != 0);
}
void Bt3Zip_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{
do
{
SKIP_HEADER(3)
HASH_ZIP_CALC;
curMatch = p->hash[hashValue];
p->hash[hashValue] = p->pos;
SKIP_FOOTER
}
while (--num != 0);
}
static void Bt3_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{
do
{
UInt32 hash2Value;
SKIP_HEADER(3)
HASH3_CALC;
curMatch = p->hash[kFix3HashSize + hashValue];
p->hash[hash2Value] =
p->hash[kFix3HashSize + hashValue] = p->pos;
SKIP_FOOTER
}
while (--num != 0);
}
static void Bt4_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{
do
{
UInt32 hash2Value, hash3Value;
SKIP_HEADER(4)
HASH4_CALC;
curMatch = p->hash[kFix4HashSize + hashValue];
p->hash[ hash2Value] =
p->hash[kFix3HashSize + hash3Value] = p->pos;
p->hash[kFix4HashSize + hashValue] = p->pos;
SKIP_FOOTER
}
while (--num != 0);
}
static void Hc4_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{
do
{
UInt32 hash2Value, hash3Value;
SKIP_HEADER(4)
HASH4_CALC;
curMatch = p->hash[kFix4HashSize + hashValue];
p->hash[ hash2Value] =
p->hash[kFix3HashSize + hash3Value] =
p->hash[kFix4HashSize + hashValue] = p->pos;
p->son[p->cyclicBufferPos] = curMatch;
MOVE_POS
}
while (--num != 0);
}
void Hc3Zip_MatchFinder_Skip(CMatchFinder *p, UInt32 num)
{
do
{
SKIP_HEADER(3)
HASH_ZIP_CALC;
curMatch = p->hash[hashValue];
p->hash[hashValue] = p->pos;
p->son[p->cyclicBufferPos] = curMatch;
MOVE_POS
}
while (--num != 0);
}
void MatchFinder_CreateVTable(CMatchFinder *p, IMatchFinder *vTable)
{
vTable->Init = (Mf_Init_Func)MatchFinder_Init;
vTable->GetIndexByte = (Mf_GetIndexByte_Func)MatchFinder_GetIndexByte;
vTable->GetNumAvailableBytes = (Mf_GetNumAvailableBytes_Func)MatchFinder_GetNumAvailableBytes;
vTable->GetPointerToCurrentPos = (Mf_GetPointerToCurrentPos_Func)MatchFinder_GetPointerToCurrentPos;
if (!p->btMode)
{
vTable->GetMatches = (Mf_GetMatches_Func)Hc4_MatchFinder_GetMatches;
vTable->Skip = (Mf_Skip_Func)Hc4_MatchFinder_Skip;
}
else if (p->numHashBytes == 2)
{
vTable->GetMatches = (Mf_GetMatches_Func)Bt2_MatchFinder_GetMatches;
vTable->Skip = (Mf_Skip_Func)Bt2_MatchFinder_Skip;
}
else if (p->numHashBytes == 3)
{
vTable->GetMatches = (Mf_GetMatches_Func)Bt3_MatchFinder_GetMatches;
vTable->Skip = (Mf_Skip_Func)Bt3_MatchFinder_Skip;
}
else
{
vTable->GetMatches = (Mf_GetMatches_Func)Bt4_MatchFinder_GetMatches;
vTable->Skip = (Mf_Skip_Func)Bt4_MatchFinder_Skip;
}
}
}
| no1dead/xenia | third_party/crunch/crnlib/lzma_LzFind.cpp | C++ | bsd-3-clause | 19,547 |
import * as module from './export-referrer-checker.py';
if ('DedicatedWorkerGlobalScope' in self &&
self instanceof DedicatedWorkerGlobalScope) {
postMessage(module.referrer);
} else if (
'SharedWorkerGlobalScope' in self &&
self instanceof SharedWorkerGlobalScope) {
onconnect = e => {
e.ports[0].postMessage(module.referrer);
};
}
| scheib/chromium | third_party/blink/web_tests/external/wpt/workers/modules/resources/static-import-same-origin-referrer-checker-worker.js | JavaScript | bsd-3-clause | 355 |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <algorithm>
#include <iostream>
#include <limits>
#include "base/compiler_specific.h"
#include "base/memory/scoped_ptr.h"
#include "net/spdy/hpack_output_stream.h"
#include "net/spdy/mock_spdy_framer_visitor.h"
#include "net/spdy/spdy_frame_builder.h"
#include "net/spdy/spdy_frame_reader.h"
#include "net/spdy/spdy_framer.h"
#include "net/spdy/spdy_protocol.h"
#include "net/spdy/spdy_test_utils.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/platform_test.h"
using base::StringPiece;
using std::string;
using std::max;
using std::min;
using std::numeric_limits;
using testing::ElementsAre;
using testing::Pair;
using testing::_;
namespace net {
namespace test {
static const size_t kMaxDecompressedSize = 1024;
class MockDebugVisitor : public SpdyFramerDebugVisitorInterface {
public:
MOCK_METHOD4(OnSendCompressedFrame, void(SpdyStreamId stream_id,
SpdyFrameType type,
size_t payload_len,
size_t frame_len));
MOCK_METHOD3(OnReceiveCompressedFrame, void(SpdyStreamId stream_id,
SpdyFrameType type,
size_t frame_len));
};
class SpdyFramerTestUtil {
public:
// Decompress a single frame using the decompression context held by
// the SpdyFramer. The implemention is meant for use only in tests
// and will CHECK fail if the input is anything other than a single,
// well-formed compressed frame.
//
// Returns a new decompressed SpdyFrame.
template<class SpdyFrameType> static SpdyFrame* DecompressFrame(
SpdyFramer* framer, const SpdyFrameType& frame) {
DecompressionVisitor visitor(framer->protocol_version());
framer->set_visitor(&visitor);
CHECK_EQ(frame.size(), framer->ProcessInput(frame.data(), frame.size()));
CHECK_EQ(SpdyFramer::SPDY_RESET, framer->state());
framer->set_visitor(NULL);
char* buffer = visitor.ReleaseBuffer();
CHECK(buffer != NULL);
SpdyFrame* decompressed_frame = new SpdyFrame(buffer, visitor.size(), true);
SetFrameLength(decompressed_frame,
visitor.size() - framer->GetControlFrameHeaderSize(),
framer->protocol_version());
return decompressed_frame;
}
class DecompressionVisitor : public SpdyFramerVisitorInterface {
public:
explicit DecompressionVisitor(SpdyMajorVersion version)
: version_(version), size_(0), finished_(false) {}
void ResetBuffer() {
CHECK(buffer_.get() == NULL);
CHECK_EQ(0u, size_);
CHECK(!finished_);
buffer_.reset(new char[kMaxDecompressedSize]);
}
virtual void OnError(SpdyFramer* framer) OVERRIDE { LOG(FATAL); }
virtual void OnDataFrameHeader(SpdyStreamId stream_id,
size_t length,
bool fin) OVERRIDE {
LOG(FATAL) << "Unexpected data frame header";
}
virtual void OnStreamFrameData(SpdyStreamId stream_id,
const char* data,
size_t len,
bool fin) OVERRIDE {
LOG(FATAL);
}
virtual bool OnControlFrameHeaderData(SpdyStreamId stream_id,
const char* header_data,
size_t len) OVERRIDE {
CHECK(buffer_.get() != NULL);
CHECK_GE(kMaxDecompressedSize, size_ + len);
CHECK(!finished_);
if (len != 0) {
memcpy(buffer_.get() + size_, header_data, len);
size_ += len;
} else {
// Done.
finished_ = true;
}
return true;
}
virtual void OnSynStream(SpdyStreamId stream_id,
SpdyStreamId associated_stream_id,
SpdyPriority priority,
bool fin,
bool unidirectional) OVERRIDE {
SpdyFramer framer(version_);
framer.set_enable_compression(false);
SpdySynStreamIR syn_stream(stream_id);
syn_stream.set_associated_to_stream_id(associated_stream_id);
syn_stream.set_priority(priority);
syn_stream.set_fin(fin);
syn_stream.set_unidirectional(unidirectional);
scoped_ptr<SpdyFrame> frame(framer.SerializeSynStream(syn_stream));
ResetBuffer();
memcpy(buffer_.get(), frame->data(), framer.GetSynStreamMinimumSize());
size_ += framer.GetSynStreamMinimumSize();
}
virtual void OnSynReply(SpdyStreamId stream_id, bool fin) OVERRIDE {
SpdyFramer framer(version_);
framer.set_enable_compression(false);
SpdyHeadersIR headers(stream_id);
headers.set_fin(fin);
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers));
ResetBuffer();
memcpy(buffer_.get(), frame->data(), framer.GetHeadersMinimumSize());
size_ += framer.GetSynStreamMinimumSize();
}
virtual void OnRstStream(SpdyStreamId stream_id,
SpdyRstStreamStatus status) OVERRIDE {
LOG(FATAL);
}
virtual void OnSetting(SpdySettingsIds id,
uint8 flags,
uint32 value) OVERRIDE {
LOG(FATAL);
}
virtual void OnPing(SpdyPingId unique_id, bool is_ack) OVERRIDE {
LOG(FATAL);
}
virtual void OnSettingsEnd() OVERRIDE { LOG(FATAL); }
virtual void OnGoAway(SpdyStreamId last_accepted_stream_id,
SpdyGoAwayStatus status) OVERRIDE {
LOG(FATAL);
}
virtual void OnHeaders(SpdyStreamId stream_id,
bool fin,
bool end) OVERRIDE {
SpdyFramer framer(version_);
framer.set_enable_compression(false);
SpdyHeadersIR headers(stream_id);
headers.set_fin(fin);
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers));
ResetBuffer();
memcpy(buffer_.get(), frame->data(), framer.GetHeadersMinimumSize());
size_ += framer.GetHeadersMinimumSize();
}
virtual void OnWindowUpdate(SpdyStreamId stream_id, int delta_window_size) {
LOG(FATAL);
}
virtual void OnPushPromise(SpdyStreamId stream_id,
SpdyStreamId promised_stream_id,
bool end) OVERRIDE {
SpdyFramer framer(version_);
framer.set_enable_compression(false);
SpdyPushPromiseIR push_promise(stream_id, promised_stream_id);
scoped_ptr<SpdyFrame> frame(framer.SerializePushPromise(push_promise));
ResetBuffer();
memcpy(buffer_.get(), frame->data(), framer.GetPushPromiseMinimumSize());
size_ += framer.GetPushPromiseMinimumSize();
}
virtual void OnContinuation(SpdyStreamId stream_id, bool end) OVERRIDE {
LOG(FATAL);
}
virtual void OnPriority(SpdyStreamId stream_id,
SpdyStreamId parent_stream_id,
uint8 weight,
bool exclusive) OVERRIDE {
// Do nothing.
}
virtual bool OnUnknownFrame(SpdyStreamId stream_id,
int frame_type) OVERRIDE {
LOG(FATAL);
return false;
}
char* ReleaseBuffer() {
CHECK(finished_);
return buffer_.release();
}
virtual void OnWindowUpdate(SpdyStreamId stream_id,
uint32 delta_window_size) OVERRIDE {
LOG(FATAL);
}
size_t size() const {
CHECK(finished_);
return size_;
}
private:
SpdyMajorVersion version_;
scoped_ptr<char[]> buffer_;
size_t size_;
bool finished_;
DISALLOW_COPY_AND_ASSIGN(DecompressionVisitor);
};
private:
DISALLOW_COPY_AND_ASSIGN(SpdyFramerTestUtil);
};
class TestSpdyVisitor : public SpdyFramerVisitorInterface,
public SpdyFramerDebugVisitorInterface {
public:
// This is larger than our max frame size because header blocks that
// are too long can spill over into CONTINUATION frames.
static const size_t kDefaultHeaderBufferSize = 16 * 1024 * 1024;
explicit TestSpdyVisitor(SpdyMajorVersion version)
: framer_(version),
use_compression_(false),
error_count_(0),
syn_frame_count_(0),
syn_reply_frame_count_(0),
headers_frame_count_(0),
push_promise_frame_count_(0),
goaway_count_(0),
setting_count_(0),
settings_ack_sent_(0),
settings_ack_received_(0),
continuation_count_(0),
altsvc_count_(0),
priority_count_(0),
test_altsvc_ir_(0),
on_unknown_frame_result_(false),
last_window_update_stream_(0),
last_window_update_delta_(0),
last_push_promise_stream_(0),
last_push_promise_promised_stream_(0),
data_bytes_(0),
fin_frame_count_(0),
fin_opaque_data_(),
fin_flag_count_(0),
zero_length_data_frame_count_(0),
control_frame_header_data_count_(0),
zero_length_control_frame_header_data_count_(0),
data_frame_count_(0),
last_payload_len_(0),
last_frame_len_(0),
header_buffer_(new char[kDefaultHeaderBufferSize]),
header_buffer_length_(0),
header_buffer_size_(kDefaultHeaderBufferSize),
header_stream_id_(static_cast<SpdyStreamId>(-1)),
header_control_type_(DATA),
header_buffer_valid_(false) {}
virtual void OnError(SpdyFramer* f) OVERRIDE {
LOG(INFO) << "SpdyFramer Error: "
<< SpdyFramer::ErrorCodeToString(f->error_code());
++error_count_;
}
virtual void OnDataFrameHeader(SpdyStreamId stream_id,
size_t length,
bool fin) OVERRIDE {
++data_frame_count_;
header_stream_id_ = stream_id;
}
virtual void OnStreamFrameData(SpdyStreamId stream_id,
const char* data,
size_t len,
bool fin) OVERRIDE {
EXPECT_EQ(header_stream_id_, stream_id);
if (len == 0)
++zero_length_data_frame_count_;
data_bytes_ += len;
std::cerr << "OnStreamFrameData(" << stream_id << ", \"";
if (len > 0) {
for (size_t i = 0 ; i < len; ++i) {
std::cerr << std::hex << (0xFF & (unsigned int)data[i]) << std::dec;
}
}
std::cerr << "\", " << len << ")\n";
}
virtual bool OnControlFrameHeaderData(SpdyStreamId stream_id,
const char* header_data,
size_t len) OVERRIDE {
++control_frame_header_data_count_;
CHECK_EQ(header_stream_id_, stream_id);
if (len == 0) {
++zero_length_control_frame_header_data_count_;
// Indicates end-of-header-block.
headers_.clear();
CHECK(header_buffer_valid_);
size_t parsed_length = framer_.ParseHeaderBlockInBuffer(
header_buffer_.get(), header_buffer_length_, &headers_);
LOG_IF(DFATAL, header_buffer_length_ != parsed_length)
<< "Check failed: header_buffer_length_ == parsed_length "
<< "(" << header_buffer_length_ << " vs. " << parsed_length << ")";
return true;
}
const size_t available = header_buffer_size_ - header_buffer_length_;
if (len > available) {
header_buffer_valid_ = false;
return false;
}
memcpy(header_buffer_.get() + header_buffer_length_, header_data, len);
header_buffer_length_ += len;
return true;
}
virtual void OnSynStream(SpdyStreamId stream_id,
SpdyStreamId associated_stream_id,
SpdyPriority priority,
bool fin,
bool unidirectional) OVERRIDE {
++syn_frame_count_;
if (framer_.protocol_version() > SPDY3) {
InitHeaderStreaming(HEADERS, stream_id);
} else {
InitHeaderStreaming(SYN_STREAM, stream_id);
}
if (fin) {
++fin_flag_count_;
}
}
virtual void OnSynReply(SpdyStreamId stream_id, bool fin) OVERRIDE {
++syn_reply_frame_count_;
if (framer_.protocol_version() > SPDY3) {
InitHeaderStreaming(HEADERS, stream_id);
} else {
InitHeaderStreaming(SYN_REPLY, stream_id);
}
if (fin) {
++fin_flag_count_;
}
}
virtual void OnRstStream(SpdyStreamId stream_id,
SpdyRstStreamStatus status) OVERRIDE {
++fin_frame_count_;
}
virtual bool OnRstStreamFrameData(const char* rst_stream_data,
size_t len) OVERRIDE {
if ((rst_stream_data != NULL) && (len > 0)) {
fin_opaque_data_ += std::string(rst_stream_data, len);
}
return true;
}
virtual void OnSetting(SpdySettingsIds id,
uint8 flags,
uint32 value) OVERRIDE {
++setting_count_;
}
virtual void OnSettingsAck() OVERRIDE {
DCHECK_LT(SPDY3, framer_.protocol_version());
++settings_ack_received_;
}
virtual void OnSettingsEnd() OVERRIDE {
if (framer_.protocol_version() <= SPDY3) { return; }
++settings_ack_sent_;
}
virtual void OnPing(SpdyPingId unique_id, bool is_ack) OVERRIDE {
DLOG(FATAL);
}
virtual void OnGoAway(SpdyStreamId last_accepted_stream_id,
SpdyGoAwayStatus status) OVERRIDE {
++goaway_count_;
}
virtual void OnHeaders(SpdyStreamId stream_id, bool fin, bool end) OVERRIDE {
++headers_frame_count_;
InitHeaderStreaming(HEADERS, stream_id);
if (fin) {
++fin_flag_count_;
}
}
virtual void OnWindowUpdate(SpdyStreamId stream_id,
uint32 delta_window_size) OVERRIDE {
last_window_update_stream_ = stream_id;
last_window_update_delta_ = delta_window_size;
}
virtual void OnPushPromise(SpdyStreamId stream_id,
SpdyStreamId promised_stream_id,
bool end) OVERRIDE {
++push_promise_frame_count_;
InitHeaderStreaming(PUSH_PROMISE, stream_id);
last_push_promise_stream_ = stream_id;
last_push_promise_promised_stream_ = promised_stream_id;
}
virtual void OnContinuation(SpdyStreamId stream_id, bool end) OVERRIDE {
++continuation_count_;
}
virtual void OnAltSvc(SpdyStreamId stream_id,
uint32 max_age,
uint16 port,
StringPiece protocol_id,
StringPiece host,
StringPiece origin) OVERRIDE {
test_altsvc_ir_.set_stream_id(stream_id);
test_altsvc_ir_.set_max_age(max_age);
test_altsvc_ir_.set_port(port);
test_altsvc_ir_.set_protocol_id(protocol_id.as_string());
test_altsvc_ir_.set_host(host.as_string());
if (origin.length() > 0) {
test_altsvc_ir_.set_origin(origin.as_string());
}
++altsvc_count_;
}
virtual void OnPriority(SpdyStreamId stream_id,
SpdyStreamId parent_stream_id,
uint8 weight,
bool exclusive) OVERRIDE {
++priority_count_;
}
virtual bool OnUnknownFrame(SpdyStreamId stream_id, int frame_type) OVERRIDE {
DLOG(INFO) << "Unknown frame type " << frame_type;
return on_unknown_frame_result_;
}
virtual void OnSendCompressedFrame(SpdyStreamId stream_id,
SpdyFrameType type,
size_t payload_len,
size_t frame_len) OVERRIDE {
last_payload_len_ = payload_len;
last_frame_len_ = frame_len;
}
virtual void OnReceiveCompressedFrame(SpdyStreamId stream_id,
SpdyFrameType type,
size_t frame_len) OVERRIDE {
last_frame_len_ = frame_len;
}
// Convenience function which runs a framer simulation with particular input.
void SimulateInFramer(const unsigned char* input, size_t size) {
framer_.set_enable_compression(use_compression_);
framer_.set_visitor(this);
size_t input_remaining = size;
const char* input_ptr = reinterpret_cast<const char*>(input);
while (input_remaining > 0 &&
framer_.error_code() == SpdyFramer::SPDY_NO_ERROR) {
// To make the tests more interesting, we feed random (amd small) chunks
// into the framer. This simulates getting strange-sized reads from
// the socket.
const size_t kMaxReadSize = 32;
size_t bytes_read =
(rand() % min(input_remaining, kMaxReadSize)) + 1;
size_t bytes_processed = framer_.ProcessInput(input_ptr, bytes_read);
input_remaining -= bytes_processed;
input_ptr += bytes_processed;
}
}
void InitHeaderStreaming(SpdyFrameType header_control_type,
SpdyStreamId stream_id) {
if (!SpdyConstants::IsValidFrameType(framer_.protocol_version(),
SpdyConstants::SerializeFrameType(framer_.protocol_version(),
header_control_type))) {
DLOG(FATAL) << "Attempted to init header streaming with "
<< "invalid control frame type: "
<< header_control_type;
}
memset(header_buffer_.get(), 0, header_buffer_size_);
header_buffer_length_ = 0;
header_stream_id_ = stream_id;
header_control_type_ = header_control_type;
header_buffer_valid_ = true;
DCHECK_NE(header_stream_id_, SpdyFramer::kInvalidStream);
}
// Override the default buffer size (16K). Call before using the framer!
void set_header_buffer_size(size_t header_buffer_size) {
header_buffer_size_ = header_buffer_size;
header_buffer_.reset(new char[header_buffer_size]);
}
static size_t header_data_chunk_max_size() {
return SpdyFramer::kHeaderDataChunkMaxSize;
}
SpdyFramer framer_;
bool use_compression_;
// Counters from the visitor callbacks.
int error_count_;
int syn_frame_count_;
int syn_reply_frame_count_;
int headers_frame_count_;
int push_promise_frame_count_;
int goaway_count_;
int setting_count_;
int settings_ack_sent_;
int settings_ack_received_;
int continuation_count_;
int altsvc_count_;
int priority_count_;
SpdyAltSvcIR test_altsvc_ir_;
bool on_unknown_frame_result_;
SpdyStreamId last_window_update_stream_;
uint32 last_window_update_delta_;
SpdyStreamId last_push_promise_stream_;
SpdyStreamId last_push_promise_promised_stream_;
int data_bytes_;
int fin_frame_count_; // The count of RST_STREAM type frames received.
std::string fin_opaque_data_;
int fin_flag_count_; // The count of frames with the FIN flag set.
int zero_length_data_frame_count_; // The count of zero-length data frames.
int control_frame_header_data_count_; // The count of chunks received.
// The count of zero-length control frame header data chunks received.
int zero_length_control_frame_header_data_count_;
int data_frame_count_;
size_t last_payload_len_;
size_t last_frame_len_;
// Header block streaming state:
scoped_ptr<char[]> header_buffer_;
size_t header_buffer_length_;
size_t header_buffer_size_;
SpdyStreamId header_stream_id_;
SpdyFrameType header_control_type_;
bool header_buffer_valid_;
SpdyHeaderBlock headers_;
};
// Retrieves serialized headers from a HEADERS or SYN_STREAM frame.
base::StringPiece GetSerializedHeaders(const SpdyFrame* frame,
const SpdyFramer& framer) {
SpdyFrameReader reader(frame->data(), frame->size());
if (framer.protocol_version() > SPDY3) {
reader.Seek(3); // Seek past the frame length.
} else {
reader.Seek(2); // Seek past the frame length.
}
SpdyFrameType frame_type;
if (framer.protocol_version() > SPDY3) {
uint8 serialized_type;
reader.ReadUInt8(&serialized_type);
frame_type = SpdyConstants::ParseFrameType(framer.protocol_version(),
serialized_type);
DCHECK_EQ(HEADERS, frame_type);
uint8 flags;
reader.ReadUInt8(&flags);
if (flags & HEADERS_FLAG_PRIORITY) {
frame_type = SYN_STREAM;
}
} else {
uint16 serialized_type;
reader.ReadUInt16(&serialized_type);
frame_type = SpdyConstants::ParseFrameType(framer.protocol_version(),
serialized_type);
DCHECK(frame_type == HEADERS ||
frame_type == SYN_STREAM) << frame_type;
}
if (frame_type == SYN_STREAM) {
return StringPiece(frame->data() + framer.GetSynStreamMinimumSize(),
frame->size() - framer.GetSynStreamMinimumSize());
} else {
return StringPiece(frame->data() + framer.GetHeadersMinimumSize(),
frame->size() - framer.GetHeadersMinimumSize());
}
}
} // namespace test
} // namespace net
using net::test::SetFrameLength;
using net::test::SetFrameFlags;
using net::test::CompareCharArraysWithHexError;
using net::test::SpdyFramerTestUtil;
using net::test::TestSpdyVisitor;
using net::test::GetSerializedHeaders;
namespace net {
class SpdyFramerTest : public ::testing::TestWithParam<SpdyMajorVersion> {
protected:
virtual void SetUp() {
spdy_version_ = GetParam();
spdy_version_ch_ = static_cast<unsigned char>(
SpdyConstants::SerializeMajorVersion(spdy_version_));
}
void CompareFrame(const string& description,
const SpdyFrame& actual_frame,
const unsigned char* expected,
const int expected_len) {
const unsigned char* actual =
reinterpret_cast<const unsigned char*>(actual_frame.data());
CompareCharArraysWithHexError(
description, actual, actual_frame.size(), expected, expected_len);
}
void CompareFrames(const string& description,
const SpdyFrame& expected_frame,
const SpdyFrame& actual_frame) {
CompareCharArraysWithHexError(
description,
reinterpret_cast<const unsigned char*>(expected_frame.data()),
expected_frame.size(),
reinterpret_cast<const unsigned char*>(actual_frame.data()),
actual_frame.size());
}
// Returns true if the two header blocks have equivalent content.
bool CompareHeaderBlocks(const SpdyHeaderBlock* expected,
const SpdyHeaderBlock* actual) {
if (expected->size() != actual->size()) {
LOG(ERROR) << "Expected " << expected->size() << " headers; actually got "
<< actual->size() << ".";
return false;
}
for (SpdyHeaderBlock::const_iterator it = expected->begin();
it != expected->end();
++it) {
SpdyHeaderBlock::const_iterator it2 = actual->find(it->first);
if (it2 == actual->end()) {
LOG(ERROR) << "Expected header name '" << it->first << "'.";
return false;
}
if (it->second.compare(it2->second) != 0) {
LOG(ERROR) << "Expected header named '" << it->first
<< "' to have a value of '" << it->second
<< "'. The actual value received was '" << it2->second
<< "'.";
return false;
}
}
return true;
}
bool IsSpdy2() { return spdy_version_ == SPDY2; }
bool IsSpdy3() { return spdy_version_ == SPDY3; }
bool IsSpdy4() { return spdy_version_ == SPDY4; }
bool IsSpdy5() { return spdy_version_ == SPDY5; }
// Version of SPDY protocol to be used.
SpdyMajorVersion spdy_version_;
unsigned char spdy_version_ch_;
};
// All tests are run with 3 different SPDY versions: SPDY/2, SPDY/3, SPDY/4.
INSTANTIATE_TEST_CASE_P(SpdyFramerTests,
SpdyFramerTest,
::testing::Values(SPDY2, SPDY3, SPDY4));
// Test that we ignore cookie where both name and value are empty.
TEST_P(SpdyFramerTest, HeaderBlockWithEmptyCookie) {
if (spdy_version_ > SPDY3) {
// Not implemented for hpack.
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(true);
SpdyHeadersIR headers(1);
headers.set_priority(1);
headers.SetHeader("cookie",
"=; key=value; ; = ; foo; bar=; ; = ; k2=v2 ; =");
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers));
EXPECT_TRUE(frame.get() != NULL);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = true;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(frame->data()),
frame->size());
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
EXPECT_FALSE(CompareHeaderBlocks(&headers.name_value_block(),
&visitor.headers_));
EXPECT_EQ(1u, visitor.headers_.size());
EXPECT_EQ("key=value; foo; bar=; k2=v2 ", visitor.headers_["cookie"]);
}
// Test that we can encode and decode a SpdyHeaderBlock in serialized form.
TEST_P(SpdyFramerTest, HeaderBlockInBuffer) {
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
// Encode the header block into a Headers frame.
SpdyHeadersIR headers(1);
headers.set_priority(1);
headers.SetHeader("alpha", "beta");
headers.SetHeader("gamma", "charlie");
headers.SetHeader("cookie", "key1=value1; key2=value2");
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers));
EXPECT_TRUE(frame.get() != NULL);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(frame->data()),
frame->size());
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
EXPECT_TRUE(CompareHeaderBlocks(&headers.name_value_block(),
&visitor.headers_));
}
// Test that if there's not a full frame, we fail to parse it.
TEST_P(SpdyFramerTest, UndersizedHeaderBlockInBuffer) {
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
// Encode the header block into a Headers frame.
SpdyHeadersIR headers(1);
headers.set_priority(1);
headers.SetHeader("alpha", "beta");
headers.SetHeader("gamma", "charlie");
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers));
EXPECT_TRUE(frame.get() != NULL);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(frame->data()),
frame->size() - 2);
EXPECT_EQ(0, visitor.zero_length_control_frame_header_data_count_);
EXPECT_EQ(0u, visitor.headers_.size());
}
// Test that if we receive a SYN_REPLY with stream ID zero, we signal an error
// (but don't crash).
TEST_P(SpdyFramerTest, SynReplyWithStreamIdZero) {
if (spdy_version_ > SPDY3) {
return;
}
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdySynReplyIR syn_reply(0);
syn_reply.SetHeader("alpha", "beta");
scoped_ptr<SpdySerializedFrame> frame(framer.SerializeSynReply(syn_reply));
ASSERT_TRUE(frame.get() != NULL);
// We shouldn't have to read the whole frame before we signal an error.
EXPECT_CALL(visitor, OnError(testing::Eq(&framer)));
EXPECT_GT(frame->size(), framer.ProcessInput(frame->data(), frame->size()));
EXPECT_TRUE(framer.HasError());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
// Test that if we receive a HEADERS with stream ID zero, we signal an error
// (but don't crash).
TEST_P(SpdyFramerTest, HeadersWithStreamIdZero) {
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdyHeadersIR headers_ir(0);
headers_ir.SetHeader("alpha", "beta");
scoped_ptr<SpdySerializedFrame> frame(framer.SerializeHeaders(headers_ir));
ASSERT_TRUE(frame.get() != NULL);
// We shouldn't have to read the whole frame before we signal an error.
EXPECT_CALL(visitor, OnError(testing::Eq(&framer)));
EXPECT_GT(frame->size(), framer.ProcessInput(frame->data(), frame->size()));
EXPECT_TRUE(framer.HasError());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
// Test that if we receive a PUSH_PROMISE with stream ID zero, we signal an
// error (but don't crash).
TEST_P(SpdyFramerTest, PushPromiseWithStreamIdZero) {
if (spdy_version_ <= SPDY3) {
return;
}
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdyPushPromiseIR push_promise(0, 4);
push_promise.SetHeader("alpha", "beta");
scoped_ptr<SpdySerializedFrame> frame(
framer.SerializePushPromise(push_promise));
ASSERT_TRUE(frame.get() != NULL);
// We shouldn't have to read the whole frame before we signal an error.
EXPECT_CALL(visitor, OnError(testing::Eq(&framer)));
EXPECT_GT(frame->size(), framer.ProcessInput(frame->data(), frame->size()));
EXPECT_TRUE(framer.HasError());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
// Test that if we receive a PUSH_PROMISE with promised stream ID zero, we
// signal an error (but don't crash).
TEST_P(SpdyFramerTest, PushPromiseWithPromisedStreamIdZero) {
if (spdy_version_ <= SPDY3) {
return;
}
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdyPushPromiseIR push_promise(3, 0);
push_promise.SetHeader("alpha", "beta");
scoped_ptr<SpdySerializedFrame> frame(
framer.SerializePushPromise(push_promise));
ASSERT_TRUE(frame.get() != NULL);
// We shouldn't have to read the whole frame before we signal an error.
EXPECT_CALL(visitor, OnError(testing::Eq(&framer)));
EXPECT_GT(frame->size(), framer.ProcessInput(frame->data(), frame->size()));
EXPECT_TRUE(framer.HasError());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
TEST_P(SpdyFramerTest, DuplicateHeader) {
if (spdy_version_ > SPDY3) {
// TODO(jgraettinger): Punting on this because we haven't determined
// whether duplicate HPACK headers other than Cookie are an error.
// If they are, this will need to be updated to use HpackOutputStream.
return;
}
SpdyFramer framer(spdy_version_);
// Frame builder with plentiful buffer size.
SpdyFrameBuilder frame(1024, spdy_version_);
if (spdy_version_ <= SPDY3) {
frame.WriteControlFrameHeader(framer, SYN_STREAM, CONTROL_FLAG_NONE);
frame.WriteUInt32(3); // stream_id
frame.WriteUInt32(0); // associated stream id
frame.WriteUInt16(0); // Priority.
} else {
frame.BeginNewFrame(framer, HEADERS, HEADERS_FLAG_PRIORITY, 3);
frame.WriteUInt32(framer.GetHighestPriority());
}
if (IsSpdy2()) {
frame.WriteUInt16(2); // Number of headers.
frame.WriteString("name");
frame.WriteString("value1");
frame.WriteString("name");
frame.WriteString("value2");
} else {
frame.WriteUInt32(2); // Number of headers.
frame.WriteStringPiece32("name");
frame.WriteStringPiece32("value1");
frame.WriteStringPiece32("name");
frame.WriteStringPiece32("value2");
}
// write the length
frame.RewriteLength(framer);
SpdyHeaderBlock new_headers;
framer.set_enable_compression(false);
scoped_ptr<SpdyFrame> control_frame(frame.take());
base::StringPiece serialized_headers =
GetSerializedHeaders(control_frame.get(), framer);
// This should fail because duplicate headers are verboten by the spec.
EXPECT_FALSE(framer.ParseHeaderBlockInBuffer(serialized_headers.data(),
serialized_headers.size(),
&new_headers));
}
TEST_P(SpdyFramerTest, MultiValueHeader) {
SpdyFramer framer(spdy_version_);
// Frame builder with plentiful buffer size.
SpdyFrameBuilder frame(1024, spdy_version_);
if (spdy_version_ <= SPDY3) {
frame.WriteControlFrameHeader(framer, SYN_STREAM, CONTROL_FLAG_NONE);
frame.WriteUInt32(3); // stream_id
frame.WriteUInt32(0); // associated stream id
frame.WriteUInt16(0); // Priority.
} else {
frame.BeginNewFrame(framer,
HEADERS,
HEADERS_FLAG_PRIORITY | HEADERS_FLAG_END_HEADERS,
3);
frame.WriteUInt32(0); // Priority exclusivity and dependent stream.
frame.WriteUInt8(255); // Priority weight.
}
string value("value1\0value2", 13);
if (IsSpdy2()) {
frame.WriteUInt16(1); // Number of headers.
frame.WriteString("name");
frame.WriteString(value);
} else if (spdy_version_ > SPDY3) {
// TODO(jgraettinger): If this pattern appears again, move to test class.
std::map<string, string> header_set;
header_set["name"] = value;
string buffer;
HpackEncoder encoder(ObtainHpackHuffmanTable());
encoder.EncodeHeaderSetWithoutCompression(header_set, &buffer);
frame.WriteBytes(&buffer[0], buffer.size());
} else {
frame.WriteUInt32(1); // Number of headers.
frame.WriteStringPiece32("name");
frame.WriteStringPiece32(value);
}
// write the length
frame.RewriteLength(framer);
framer.set_enable_compression(false);
scoped_ptr<SpdyFrame> control_frame(frame.take());
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_THAT(visitor.headers_, ElementsAre(
Pair("name", value)));
}
TEST_P(SpdyFramerTest, BasicCompression) {
if (spdy_version_ > SPDY3) {
// Deflate compression doesn't apply to HPACK.
return;
}
scoped_ptr<TestSpdyVisitor> visitor(new TestSpdyVisitor(spdy_version_));
SpdyFramer framer(spdy_version_);
framer.set_debug_visitor(visitor.get());
SpdySynStreamIR syn_stream(1);
syn_stream.set_priority(1);
syn_stream.SetHeader("server", "SpdyServer 1.0");
syn_stream.SetHeader("date", "Mon 12 Jan 2009 12:12:12 PST");
syn_stream.SetHeader("status", "200");
syn_stream.SetHeader("version", "HTTP/1.1");
syn_stream.SetHeader("content-type", "text/html");
syn_stream.SetHeader("content-length", "12");
scoped_ptr<SpdyFrame> frame1(framer.SerializeSynStream(syn_stream));
size_t uncompressed_size1 = visitor->last_payload_len_;
size_t compressed_size1 =
visitor->last_frame_len_ - framer.GetSynStreamMinimumSize();
if (IsSpdy2()) {
EXPECT_EQ(139u, uncompressed_size1);
#if defined(USE_SYSTEM_ZLIB)
EXPECT_EQ(155u, compressed_size1);
#else // !defined(USE_SYSTEM_ZLIB)
EXPECT_EQ(135u, compressed_size1);
#endif // !defined(USE_SYSTEM_ZLIB)
} else {
EXPECT_EQ(165u, uncompressed_size1);
#if defined(USE_SYSTEM_ZLIB)
EXPECT_EQ(181u, compressed_size1);
#else // !defined(USE_SYSTEM_ZLIB)
EXPECT_EQ(117u, compressed_size1);
#endif // !defined(USE_SYSTEM_ZLIB)
}
scoped_ptr<SpdyFrame> frame2(framer.SerializeSynStream(syn_stream));
size_t uncompressed_size2 = visitor->last_payload_len_;
size_t compressed_size2 =
visitor->last_frame_len_ - framer.GetSynStreamMinimumSize();
// Expect the second frame to be more compact than the first.
EXPECT_LE(frame2->size(), frame1->size());
// Decompress the first frame
scoped_ptr<SpdyFrame> frame3(
SpdyFramerTestUtil::DecompressFrame(&framer, *frame1));
// Decompress the second frame
visitor.reset(new TestSpdyVisitor(spdy_version_));
framer.set_debug_visitor(visitor.get());
scoped_ptr<SpdyFrame> frame4(
SpdyFramerTestUtil::DecompressFrame(&framer, *frame2));
size_t uncompressed_size4 =
frame4->size() - framer.GetSynStreamMinimumSize();
size_t compressed_size4 =
visitor->last_frame_len_ - framer.GetSynStreamMinimumSize();
if (IsSpdy2()) {
EXPECT_EQ(139u, uncompressed_size4);
#if defined(USE_SYSTEM_ZLIB)
EXPECT_EQ(149u, compressed_size4);
#else // !defined(USE_SYSTEM_ZLIB)
EXPECT_EQ(101u, compressed_size4);
#endif // !defined(USE_SYSTEM_ZLIB)
} else {
EXPECT_EQ(165u, uncompressed_size4);
#if defined(USE_SYSTEM_ZLIB)
EXPECT_EQ(175u, compressed_size4);
#else // !defined(USE_SYSTEM_ZLIB)
EXPECT_EQ(102u, compressed_size4);
#endif // !defined(USE_SYSTEM_ZLIB)
}
EXPECT_EQ(uncompressed_size1, uncompressed_size2);
EXPECT_EQ(uncompressed_size1, uncompressed_size4);
EXPECT_EQ(compressed_size2, compressed_size4);
// Expect frames 3 & 4 to be the same.
CompareFrames("Uncompressed SYN_STREAM", *frame3, *frame4);
// Expect frames 3 to be the same as a uncompressed frame created
// from scratch.
framer.set_enable_compression(false);
scoped_ptr<SpdyFrame> uncompressed_frame(
framer.SerializeSynStream(syn_stream));
CompareFrames("Uncompressed SYN_STREAM", *frame3, *uncompressed_frame);
}
TEST_P(SpdyFramerTest, CompressEmptyHeaders) {
// See crbug.com/172383
SpdyHeadersIR headers(1);
headers.SetHeader("server", "SpdyServer 1.0");
headers.SetHeader("date", "Mon 12 Jan 2009 12:12:12 PST");
headers.SetHeader("status", "200");
headers.SetHeader("version", "HTTP/1.1");
headers.SetHeader("content-type", "text/html");
headers.SetHeader("content-length", "12");
headers.SetHeader("x-empty-header", "");
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(true);
scoped_ptr<SpdyFrame> frame1(framer.SerializeHeaders(headers));
}
TEST_P(SpdyFramerTest, Basic) {
const unsigned char kV2Input[] = {
0x80, spdy_version_ch_, 0x00, 0x01, // SYN Stream #1
0x00, 0x00, 0x00, 0x14,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01,
0x00, 0x02, 'h', 'h',
0x00, 0x02, 'v', 'v',
0x80, spdy_version_ch_, 0x00, 0x08, // HEADERS on Stream #1
0x00, 0x00, 0x00, 0x18,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x02,
0x00, 0x02, 'h', '2',
0x00, 0x02, 'v', '2',
0x00, 0x02, 'h', '3',
0x00, 0x02, 'v', '3',
0x00, 0x00, 0x00, 0x01, // DATA on Stream #1
0x00, 0x00, 0x00, 0x0c,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0x80, spdy_version_ch_, 0x00, 0x01, // SYN Stream #3
0x00, 0x00, 0x00, 0x0c,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x03, // DATA on Stream #3
0x00, 0x00, 0x00, 0x08,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0x00, 0x00, 0x00, 0x01, // DATA on Stream #1
0x00, 0x00, 0x00, 0x04,
0xde, 0xad, 0xbe, 0xef,
0x80, spdy_version_ch_, 0x00, 0x03, // RST_STREAM on Stream #1
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x05, // RST_STREAM_CANCEL
0x00, 0x00, 0x00, 0x03, // DATA on Stream #3
0x00, 0x00, 0x00, 0x00,
0x80, spdy_version_ch_, 0x00, 0x03, // RST_STREAM on Stream #3
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x05, // RST_STREAM_CANCEL
};
const unsigned char kV3Input[] = {
0x80, spdy_version_ch_, 0x00, 0x01, // SYN Stream #1
0x00, 0x00, 0x00, 0x1a,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x02, 'h', 'h',
0x00, 0x00, 0x00, 0x02,
'v', 'v',
0x80, spdy_version_ch_, 0x00, 0x08, // HEADERS on Stream #1
0x00, 0x00, 0x00, 0x20,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x02,
'h', '2',
0x00, 0x00, 0x00, 0x02,
'v', '2', 0x00, 0x00,
0x00, 0x02, 'h', '3',
0x00, 0x00, 0x00, 0x02,
'v', '3',
0x00, 0x00, 0x00, 0x01, // DATA on Stream #1
0x00, 0x00, 0x00, 0x0c,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0x80, spdy_version_ch_, 0x00, 0x01, // SYN Stream #3
0x00, 0x00, 0x00, 0x0e,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00,
0x00, 0x00, 0x00, 0x03, // DATA on Stream #3
0x00, 0x00, 0x00, 0x08,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0x00, 0x00, 0x00, 0x01, // DATA on Stream #1
0x00, 0x00, 0x00, 0x04,
0xde, 0xad, 0xbe, 0xef,
0x80, spdy_version_ch_, 0x00, 0x03, // RST_STREAM on Stream #1
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x05, // RST_STREAM_CANCEL
0x00, 0x00, 0x00, 0x03, // DATA on Stream #3
0x00, 0x00, 0x00, 0x00,
0x80, spdy_version_ch_, 0x00, 0x03, // RST_STREAM on Stream #3
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x05, // RST_STREAM_CANCEL
};
// SYN_STREAM doesn't exist in SPDY4, so instead we send
// HEADERS frames with PRIORITY and END_HEADERS set.
const unsigned char kV4Input[] = {
0x00, 0x00, 0x05, 0x01, // HEADERS: PRIORITY | END_HEADERS
0x24, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, // Stream 1, Priority 0
0x00, 0x82, // :method: GET
0x00, 0x00, 0x01, 0x01, // HEADERS: END_HEADERS
0x04, 0x00, 0x00, 0x00, // Stream 1
0x01, 0x8c, // :status: 200
0x00, 0x00, 0x0c, 0x00, // DATA on Stream #1
0x00, 0x00, 0x00, 0x00,
0x01, 0xde, 0xad, 0xbe,
0xef, 0xde, 0xad, 0xbe,
0xef, 0xde, 0xad, 0xbe,
0xef,
0x00, 0x00, 0x05, 0x01, // HEADERS: PRIORITY | END_HEADERS
0x24, 0x00, 0x00, 0x00,
0x03, 0x00, 0x00, 0x00, // Stream 3, Priority 0
0x00, 0x82, // :method: GET
0x00, 0x00, 0x08, 0x00, // DATA on Stream #3
0x00, 0x00, 0x00, 0x00,
0x03, 0xde, 0xad, 0xbe,
0xef, 0xde, 0xad, 0xbe,
0xef,
0x00, 0x00, 0x04, 0x00, // DATA on Stream #1
0x00, 0x00, 0x00, 0x00,
0x01, 0xde, 0xad, 0xbe,
0xef,
0x00, 0x00, 0x04, 0x03, // RST_STREAM on Stream #1
0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
0x08, // RST_STREAM_CANCEL
0x00, 0x00, 0x00, 0x00, // DATA on Stream #3
0x00, 0x00, 0x00, 0x00,
0x03,
0x00, 0x00, 0x0f, 0x03, // RST_STREAM on Stream #3
0x00, 0x00, 0x00, 0x00,
0x03, 0x00, 0x00, 0x00, // RST_STREAM_CANCEL
0x08, 0x52, 0x45, 0x53, // opaque data
0x45, 0x54, 0x53, 0x54,
0x52, 0x45, 0x41, 0x4d,
};
TestSpdyVisitor visitor(spdy_version_);
if (IsSpdy2()) {
visitor.SimulateInFramer(kV2Input, sizeof(kV2Input));
} else if (IsSpdy3()) {
visitor.SimulateInFramer(kV3Input, sizeof(kV3Input));
} else {
visitor.SimulateInFramer(kV4Input, sizeof(kV4Input));
}
EXPECT_EQ(2, visitor.syn_frame_count_);
EXPECT_EQ(0, visitor.syn_reply_frame_count_);
EXPECT_EQ(1, visitor.headers_frame_count_);
EXPECT_EQ(24, visitor.data_bytes_);
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(2, visitor.fin_frame_count_);
if (IsSpdy4()) {
base::StringPiece reset_stream = "RESETSTREAM";
EXPECT_EQ(reset_stream, visitor.fin_opaque_data_);
} else {
EXPECT_TRUE(visitor.fin_opaque_data_.empty());
}
EXPECT_EQ(0, visitor.fin_flag_count_);
EXPECT_EQ(0, visitor.zero_length_data_frame_count_);
EXPECT_EQ(4, visitor.data_frame_count_);
visitor.fin_opaque_data_.clear();
}
// Test that the FIN flag on a data frame signifies EOF.
TEST_P(SpdyFramerTest, FinOnDataFrame) {
const unsigned char kV2Input[] = {
0x80, spdy_version_ch_, 0x00, 0x01, // SYN Stream #1
0x00, 0x00, 0x00, 0x14,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01,
0x00, 0x02, 'h', 'h',
0x00, 0x02, 'v', 'v',
0x80, spdy_version_ch_, 0x00, 0x02, // SYN REPLY Stream #1
0x00, 0x00, 0x00, 0x10,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x01,
0x00, 0x02, 'a', 'a',
0x00, 0x02, 'b', 'b',
0x00, 0x00, 0x00, 0x01, // DATA on Stream #1
0x00, 0x00, 0x00, 0x0c,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0x00, 0x00, 0x00, 0x01, // DATA on Stream #1, with EOF
0x01, 0x00, 0x00, 0x04,
0xde, 0xad, 0xbe, 0xef,
};
const unsigned char kV3Input[] = {
0x80, spdy_version_ch_, 0x00, 0x01, // SYN Stream #1
0x00, 0x00, 0x00, 0x1a,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x00, 0x00,
0x00, 0x02, 'h', 'h',
0x00, 0x00, 0x00, 0x02,
'v', 'v',
0x80, spdy_version_ch_, 0x00, 0x02, // SYN REPLY Stream #1
0x00, 0x00, 0x00, 0x14,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x02,
'a', 'a', 0x00, 0x00,
0x00, 0x02, 'b', 'b',
0x00, 0x00, 0x00, 0x01, // DATA on Stream #1
0x00, 0x00, 0x00, 0x0c,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0x00, 0x00, 0x00, 0x01, // DATA on Stream #1, with EOF
0x01, 0x00, 0x00, 0x04,
0xde, 0xad, 0xbe, 0xef,
};
// SYN_STREAM and SYN_REPLY don't exist in SPDY4, so instead we send
// HEADERS frames with PRIORITY(SYN_STREAM only) and END_HEADERS set.
const unsigned char kV4Input[] = {
0x00, 0x00, 0x05, 0x01, // HEADERS: PRIORITY | END_HEADERS
0x24, 0x00, 0x00, 0x00, // Stream 1
0x01, 0x00, 0x00, 0x00, // Priority 0
0x00, 0x82, // :method: GET
0x00, 0x00, 0x01, 0x01, // HEADERS: END_HEADERS
0x04, 0x00, 0x00, 0x00, // Stream 1
0x01, 0x8c, // :status: 200
0x00, 0x00, 0x0c, 0x00, // DATA on Stream #1
0x00, 0x00, 0x00, 0x00,
0x01, 0xde, 0xad, 0xbe,
0xef, 0xde, 0xad, 0xbe,
0xef, 0xde, 0xad, 0xbe,
0xef,
0x00, 0x00, 0x04, 0x00, // DATA on Stream #1, with FIN
0x01, 0x00, 0x00, 0x00,
0x01, 0xde, 0xad, 0xbe,
0xef,
};
TestSpdyVisitor visitor(spdy_version_);
if (IsSpdy2()) {
visitor.SimulateInFramer(kV2Input, sizeof(kV2Input));
} else if (IsSpdy3()) {
visitor.SimulateInFramer(kV3Input, sizeof(kV3Input));
} else {
visitor.SimulateInFramer(kV4Input, sizeof(kV4Input));
}
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.syn_frame_count_);
if (IsSpdy4()) {
EXPECT_EQ(0, visitor.syn_reply_frame_count_);
EXPECT_EQ(1, visitor.headers_frame_count_);
} else {
EXPECT_EQ(1, visitor.syn_reply_frame_count_);
EXPECT_EQ(0, visitor.headers_frame_count_);
}
EXPECT_EQ(16, visitor.data_bytes_);
EXPECT_EQ(0, visitor.fin_frame_count_);
EXPECT_EQ(0, visitor.fin_flag_count_);
EXPECT_EQ(1, visitor.zero_length_data_frame_count_);
EXPECT_EQ(2, visitor.data_frame_count_);
}
// Test that the FIN flag on a SYN reply frame signifies EOF.
TEST_P(SpdyFramerTest, FinOnSynReplyFrame) {
const unsigned char kV2Input[] = {
0x80, spdy_version_ch_, 0x00, // SYN Stream #1
0x01, 0x00, 0x00, 0x00,
0x14, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x02, 'h',
'h', 0x00, 0x02, 'v',
'v',
0x80, spdy_version_ch_, 0x00, // SYN REPLY Stream #1
0x02, 0x01, 0x00, 0x00,
0x10, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
0x01, 0x00, 0x02, 'a',
'a', 0x00, 0x02, 'b',
'b',
};
const unsigned char kV3Input[] = {
0x80, spdy_version_ch_, 0x00, // SYN Stream #1
0x01, 0x00, 0x00, 0x00,
0x1a, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x01, 0x00,
0x00, 0x00, 0x02, 'h',
'h', 0x00, 0x00, 0x00,
0x02, 'v', 'v',
0x80, spdy_version_ch_, 0x00, // SYN REPLY Stream #1
0x02, 0x01, 0x00, 0x00,
0x14, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
0x02, 'a', 'a', 0x00,
0x00, 0x00, 0x02, 'b',
'b',
};
// SYN_STREAM and SYN_REPLY don't exist in SPDY4, so instead we send
// HEADERS frames with PRIORITY(SYN_STREAM only) and END_HEADERS set.
const unsigned char kV4Input[] = {
0x00, 0x00, 0x05, 0x01, // HEADERS: PRIORITY | END_HEADERS
0x24, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00, // Stream 1, Priority 0
0x00, 0x82, // :method: GET
0x00, 0x00, 0x01, 0x01, // HEADERS: FIN | END_HEADERS
0x05, 0x00, 0x00, 0x00,
0x01, 0x8c, // Stream 1, :status: 200
};
TestSpdyVisitor visitor(spdy_version_);
if (IsSpdy2()) {
visitor.SimulateInFramer(kV2Input, sizeof(kV2Input));
} else if (IsSpdy3()) {
visitor.SimulateInFramer(kV3Input, sizeof(kV3Input));
} else {
visitor.SimulateInFramer(kV4Input, sizeof(kV4Input));
}
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.syn_frame_count_);
if (IsSpdy4()) {
EXPECT_EQ(0, visitor.syn_reply_frame_count_);
EXPECT_EQ(1, visitor.headers_frame_count_);
} else {
EXPECT_EQ(1, visitor.syn_reply_frame_count_);
EXPECT_EQ(0, visitor.headers_frame_count_);
}
EXPECT_EQ(0, visitor.data_bytes_);
EXPECT_EQ(0, visitor.fin_frame_count_);
EXPECT_EQ(1, visitor.fin_flag_count_);
EXPECT_EQ(1, visitor.zero_length_data_frame_count_);
EXPECT_EQ(0, visitor.data_frame_count_);
}
TEST_P(SpdyFramerTest, HeaderCompression) {
if (spdy_version_ > SPDY3) {
// Deflate compression doesn't apply to HPACK.
return;
}
SpdyFramer send_framer(spdy_version_);
SpdyFramer recv_framer(spdy_version_);
send_framer.set_enable_compression(true);
recv_framer.set_enable_compression(true);
const char kHeader1[] = "header1";
const char kHeader2[] = "header2";
const char kHeader3[] = "header3";
const char kValue1[] = "value1";
const char kValue2[] = "value2";
const char kValue3[] = "value3";
// SYN_STREAM #1
SpdyHeaderBlock block;
block[kHeader1] = kValue1;
block[kHeader2] = kValue2;
SpdySynStreamIR syn_ir_1(1);
syn_ir_1.set_name_value_block(block);
scoped_ptr<SpdyFrame> syn_frame_1(send_framer.SerializeFrame(syn_ir_1));
EXPECT_TRUE(syn_frame_1.get() != NULL);
// SYN_STREAM #2
block[kHeader3] = kValue3;
SpdySynStreamIR syn_stream(3);
syn_stream.set_name_value_block(block);
scoped_ptr<SpdyFrame> syn_frame_2(send_framer.SerializeSynStream(syn_stream));
EXPECT_TRUE(syn_frame_2.get() != NULL);
// Now start decompressing
scoped_ptr<SpdyFrame> decompressed;
scoped_ptr<SpdyFrame> uncompressed;
base::StringPiece serialized_headers;
SpdyHeaderBlock decompressed_headers;
// Decompress SYN_STREAM #1
decompressed.reset(
SpdyFramerTestUtil::DecompressFrame(&recv_framer, *syn_frame_1));
EXPECT_TRUE(decompressed.get() != NULL);
serialized_headers = GetSerializedHeaders(decompressed.get(), send_framer);
EXPECT_TRUE(recv_framer.ParseHeaderBlockInBuffer(serialized_headers.data(),
serialized_headers.size(),
&decompressed_headers));
EXPECT_EQ(2u, decompressed_headers.size());
EXPECT_EQ(kValue1, decompressed_headers[kHeader1]);
EXPECT_EQ(kValue2, decompressed_headers[kHeader2]);
// Decompress SYN_STREAM #2
decompressed.reset(
SpdyFramerTestUtil::DecompressFrame(&recv_framer, *syn_frame_2));
EXPECT_TRUE(decompressed.get() != NULL);
serialized_headers = GetSerializedHeaders(decompressed.get(), send_framer);
decompressed_headers.clear();
EXPECT_TRUE(recv_framer.ParseHeaderBlockInBuffer(serialized_headers.data(),
serialized_headers.size(),
&decompressed_headers));
EXPECT_EQ(3u, decompressed_headers.size());
EXPECT_EQ(kValue1, decompressed_headers[kHeader1]);
EXPECT_EQ(kValue2, decompressed_headers[kHeader2]);
EXPECT_EQ(kValue3, decompressed_headers[kHeader3]);
}
// Verify we can decompress the stream even if handed over to the
// framer 1 byte at a time.
TEST_P(SpdyFramerTest, UnclosedStreamDataCompressorsOneByteAtATime) {
SpdyFramer send_framer(spdy_version_);
send_framer.set_enable_compression(true);
const char kHeader1[] = "header1";
const char kHeader2[] = "header2";
const char kValue1[] = "value1";
const char kValue2[] = "value2";
SpdyHeadersIR headers(1);
headers.SetHeader(kHeader1, kValue1);
headers.SetHeader(kHeader2, kValue2);
scoped_ptr<SpdyFrame> headers_frame(send_framer.SerializeHeaders(headers));
EXPECT_TRUE(headers_frame.get() != NULL);
const char bytes[] = "this is a test test test test test!";
SpdyDataIR data_ir(1, StringPiece(bytes, arraysize(bytes)));
data_ir.set_fin(true);
scoped_ptr<SpdyFrame> send_frame(send_framer.SerializeData(data_ir));
EXPECT_TRUE(send_frame.get() != NULL);
// Run the inputs through the framer.
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = true;
const unsigned char* data;
data = reinterpret_cast<const unsigned char*>(headers_frame->data());
for (size_t idx = 0; idx < headers_frame->size(); ++idx) {
visitor.SimulateInFramer(data + idx, 1);
ASSERT_EQ(0, visitor.error_count_);
}
data = reinterpret_cast<const unsigned char*>(send_frame->data());
for (size_t idx = 0; idx < send_frame->size(); ++idx) {
visitor.SimulateInFramer(data + idx, 1);
ASSERT_EQ(0, visitor.error_count_);
}
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(0, visitor.syn_frame_count_);
EXPECT_EQ(0, visitor.syn_reply_frame_count_);
EXPECT_EQ(1, visitor.headers_frame_count_);
EXPECT_EQ(arraysize(bytes), static_cast<unsigned>(visitor.data_bytes_));
EXPECT_EQ(0, visitor.fin_frame_count_);
EXPECT_EQ(0, visitor.fin_flag_count_);
EXPECT_EQ(1, visitor.zero_length_data_frame_count_);
EXPECT_EQ(1, visitor.data_frame_count_);
}
TEST_P(SpdyFramerTest, WindowUpdateFrame) {
SpdyFramer framer(spdy_version_);
scoped_ptr<SpdyFrame> frame(framer.SerializeWindowUpdate(
SpdyWindowUpdateIR(1, 0x12345678)));
const char kDescription[] = "WINDOW_UPDATE frame, stream 1, delta 0x12345678";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x09,
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x01,
0x12, 0x34, 0x56, 0x78
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x04, 0x08,
0x00, 0x00, 0x00, 0x00,
0x01, 0x12, 0x34, 0x56,
0x78
};
if (IsSpdy4()) {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
TEST_P(SpdyFramerTest, CreateDataFrame) {
SpdyFramer framer(spdy_version_);
{
const char kDescription[] = "'hello' data frame, no FIN";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x05,
'h', 'e', 'l', 'l',
'o'
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x05, 0x00,
0x00, 0x00, 0x00, 0x00,
0x01, 'h', 'e', 'l',
'l', 'o'
};
const char bytes[] = "hello";
SpdyDataIR data_ir(1, StringPiece(bytes, strlen(bytes)));
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
if (IsSpdy4()) {
CompareFrame(
kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(
kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
SpdyDataIR data_header_ir(1);
data_header_ir.SetDataShallow(base::StringPiece(bytes, strlen(bytes)));
frame.reset(framer.SerializeDataFrameHeaderWithPaddingLengthField(
data_header_ir));
CompareCharArraysWithHexError(
kDescription,
reinterpret_cast<const unsigned char*>(frame->data()),
framer.GetDataFrameMinimumSize(),
IsSpdy4() ? kV4FrameData : kV3FrameData,
framer.GetDataFrameMinimumSize());
}
{
const char kDescription[] = "'hello' data frame with more padding, no FIN";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x05,
'h', 'e', 'l', 'l',
'o'
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0xfd, 0x00, // Length = 253. PADDED set.
0x08, 0x00, 0x00, 0x00,
0x01, 0xf7, // Pad length field.
'h', 'e', 'l', 'l', // Data
'o',
// Padding of 247 zeros.
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
'0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0',
};
const char bytes[] = "hello";
SpdyDataIR data_ir(1, StringPiece(bytes, strlen(bytes)));
// 247 zeros and the pad length field make the overall padding to be 248
// bytes.
data_ir.set_padding_len(248);
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
if (IsSpdy4()) {
CompareFrame(
kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(
kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
frame.reset(framer.SerializeDataFrameHeaderWithPaddingLengthField(data_ir));
CompareCharArraysWithHexError(
kDescription,
reinterpret_cast<const unsigned char*>(frame->data()),
framer.GetDataFrameMinimumSize(),
IsSpdy4() ? kV4FrameData : kV3FrameData,
framer.GetDataFrameMinimumSize());
}
{
const char kDescription[] = "'hello' data frame with few padding, no FIN";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x05,
'h', 'e', 'l', 'l',
'o'
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x0d, 0x00, // Length = 13. PADDED set.
0x08, 0x00, 0x00, 0x00,
0x01, 0x07, // Pad length field.
'h', 'e', 'l', 'l', // Data
'o',
'0', '0', '0', '0', // Padding
'0', '0', '0'
};
const char bytes[] = "hello";
SpdyDataIR data_ir(1, StringPiece(bytes, strlen(bytes)));
// 7 zeros and the pad length field make the overall padding to be 8 bytes.
data_ir.set_padding_len(8);
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
if (IsSpdy4()) {
CompareFrame(
kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(
kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
{
const char kDescription[] =
"'hello' data frame with 1 byte padding, no FIN";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x05,
'h', 'e', 'l', 'l',
'o'
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x06, 0x00, // Length = 6. PADDED set.
0x08, 0x00, 0x00, 0x00,
0x01, 0x00, // Pad length field.
'h', 'e', 'l', 'l', // Data
'o',
};
const char bytes[] = "hello";
SpdyDataIR data_ir(1, StringPiece(bytes, strlen(bytes)));
// The pad length field itself is used for the 1-byte padding and no padding
// payload is needed.
data_ir.set_padding_len(1);
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
if (IsSpdy4()) {
CompareFrame(
kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(
kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
frame.reset(framer.SerializeDataFrameHeaderWithPaddingLengthField(data_ir));
CompareCharArraysWithHexError(
kDescription,
reinterpret_cast<const unsigned char*>(frame->data()),
framer.GetDataFrameMinimumSize(),
IsSpdy4() ? kV4FrameData : kV3FrameData,
framer.GetDataFrameMinimumSize());
}
{
const char kDescription[] = "Data frame with negative data byte, no FIN";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x01,
0xff
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x01, 0x00, 0x00,
0x00, 0x00, 0x00, 0x01,
0xff
};
SpdyDataIR data_ir(1, StringPiece("\xff", 1));
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
if (IsSpdy4()) {
CompareFrame(
kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(
kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
{
const char kDescription[] = "'hello' data frame, with FIN";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x00, 0x00, 0x00, 0x01,
0x01, 0x00, 0x00, 0x05,
'h', 'e', 'l', 'l',
'o'
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x05, 0x00,
0x01, 0x00, 0x00, 0x00,
0x01, 'h', 'e', 'l',
'l', 'o'
};
SpdyDataIR data_ir(1, StringPiece("hello", 5));
data_ir.set_fin(true);
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
if (IsSpdy4()) {
CompareFrame(
kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(
kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
{
const char kDescription[] = "Empty data frame";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x01,
};
SpdyDataIR data_ir(1, StringPiece());
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
if (IsSpdy4()) {
CompareFrame(
kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(
kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
frame.reset(framer.SerializeDataFrameHeaderWithPaddingLengthField(data_ir));
CompareCharArraysWithHexError(
kDescription,
reinterpret_cast<const unsigned char*>(frame->data()),
framer.GetDataFrameMinimumSize(),
IsSpdy4() ? kV4FrameData : kV3FrameData,
framer.GetDataFrameMinimumSize());
}
{
const char kDescription[] = "Data frame with max stream ID";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x7f, 0xff, 0xff, 0xff,
0x01, 0x00, 0x00, 0x05,
'h', 'e', 'l', 'l',
'o'
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x05, 0x00,
0x01, 0x7f, 0xff, 0xff,
0xff, 'h', 'e', 'l',
'l', 'o'
};
SpdyDataIR data_ir(0x7fffffff, "hello");
data_ir.set_fin(true);
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
if (IsSpdy4()) {
CompareFrame(
kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(
kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
if (!IsSpdy4()) {
// This test does not apply to SPDY 4 because the max frame size is smaller
// than 4MB.
const char kDescription[] = "Large data frame";
const int kDataSize = 4 * 1024 * 1024; // 4 MB
const string kData(kDataSize, 'A');
const unsigned char kFrameHeader[] = {
0x00, 0x00, 0x00, 0x01,
0x01, 0x40, 0x00, 0x00,
};
const int kFrameSize = arraysize(kFrameHeader) + kDataSize;
scoped_ptr<unsigned char[]> expected_frame_data(
new unsigned char[kFrameSize]);
memcpy(expected_frame_data.get(), kFrameHeader, arraysize(kFrameHeader));
memset(expected_frame_data.get() + arraysize(kFrameHeader), 'A', kDataSize);
SpdyDataIR data_ir(1, StringPiece(kData.data(), kData.size()));
data_ir.set_fin(true);
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
CompareFrame(kDescription, *frame, expected_frame_data.get(), kFrameSize);
}
}
TEST_P(SpdyFramerTest, CreateSynStreamUncompressed) {
if (!IsSpdy2() && !IsSpdy3()) {
// SYN_STREAM unsupported in SPDY>3
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
{
const char kDescription[] = "SYN_STREAM frame, lowest pri, no FIN";
const unsigned char kPri = IsSpdy2() ? 0xC0 : 0xE0;
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x01,
0x00, 0x00, 0x00, 0x20,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
kPri, 0x00, 0x00, 0x02,
0x00, 0x03, 'b', 'a',
'r', 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x03, 'b', 'a', 'r'
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x01,
0x00, 0x00, 0x00, 0x2a,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
kPri, 0x00, 0x00, 0x00,
0x00, 0x02, 0x00, 0x00,
0x00, 0x03, 'b', 'a',
'r', 0x00, 0x00, 0x00,
0x03, 'f', 'o', 'o',
0x00, 0x00, 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x00, 0x00, 0x03, 'b',
'a', 'r'
};
SpdySynStreamIR syn_stream(1);
syn_stream.set_priority(framer.GetLowestPriority());
syn_stream.SetHeader("bar", "foo");
syn_stream.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynStream(syn_stream));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
LOG(FATAL) << "Unsupported version in test.";
}
}
{
const char kDescription[] =
"SYN_STREAM frame with a 0-length header name, highest pri, FIN, "
"max stream ID";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x01,
0x01, 0x00, 0x00, 0x1D,
0x7f, 0xff, 0xff, 0xff,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x03, 'f', 'o', 'o',
0x00, 0x03, 'b', 'a',
'r'
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x01,
0x01, 0x00, 0x00, 0x27,
0x7f, 0xff, 0xff, 0xff,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x00,
0x00, 0x02, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00,
0x00, 0x03, 'f', 'o',
'o', 0x00, 0x00, 0x00,
0x03, 'f', 'o', 'o',
0x00, 0x00, 0x00, 0x03,
'b', 'a', 'r'
};
SpdySynStreamIR syn_stream(0x7fffffff);
syn_stream.set_associated_to_stream_id(0x7fffffff);
syn_stream.set_priority(framer.GetHighestPriority());
syn_stream.set_fin(true);
syn_stream.SetHeader("", "foo");
syn_stream.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynStream(syn_stream));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
LOG(FATAL) << "Unsupported version in test.";
}
}
{
const char kDescription[] =
"SYN_STREAM frame with a 0-length header val, high pri, FIN, "
"max stream ID";
const unsigned char kPri = IsSpdy2() ? 0x40 : 0x20;
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x01,
0x01, 0x00, 0x00, 0x1D,
0x7f, 0xff, 0xff, 0xff,
0x7f, 0xff, 0xff, 0xff,
kPri, 0x00, 0x00, 0x02,
0x00, 0x03, 'b', 'a',
'r', 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x00
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x01,
0x01, 0x00, 0x00, 0x27,
0x7f, 0xff, 0xff, 0xff,
0x7f, 0xff, 0xff, 0xff,
kPri, 0x00, 0x00, 0x00,
0x00, 0x02, 0x00, 0x00,
0x00, 0x03, 'b', 'a',
'r', 0x00, 0x00, 0x00,
0x03, 'f', 'o', 'o',
0x00, 0x00, 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x00, 0x00, 0x00
};
SpdySynStreamIR syn_stream(0x7fffffff);
syn_stream.set_associated_to_stream_id(0x7fffffff);
syn_stream.set_priority(1);
syn_stream.set_fin(true);
syn_stream.SetHeader("bar", "foo");
syn_stream.SetHeader("foo", "");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynStream(syn_stream));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
LOG(FATAL) << "Unsupported version in test.";
}
}
}
// TODO(phajdan.jr): Clean up after we no longer need
// to workaround http://crbug.com/139744.
#if !defined(USE_SYSTEM_ZLIB)
TEST_P(SpdyFramerTest, CreateSynStreamCompressed) {
if (!IsSpdy2() && !IsSpdy3()) {
// SYN_STREAM not supported for SPDY>3
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(true);
{
const char kDescription[] =
"SYN_STREAM frame, low pri, no FIN";
const SpdyPriority priority = IsSpdy2() ? 2 : 4;
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x01,
0x00, 0x00, 0x00, 0x36,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
0x80, 0x00, 0x38, 0xea,
0xdf, 0xa2, 0x51, 0xb2,
0x62, 0x60, 0x62, 0x60,
0x4e, 0x4a, 0x2c, 0x62,
0x60, 0x06, 0x08, 0xa0,
0xb4, 0xfc, 0x7c, 0x80,
0x00, 0x62, 0x60, 0x4e,
0xcb, 0xcf, 0x67, 0x60,
0x06, 0x08, 0xa0, 0xa4,
0xc4, 0x22, 0x80, 0x00,
0x02, 0x00, 0x00, 0x00,
0xff, 0xff,
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x01,
0x00, 0x00, 0x00, 0x37,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x00,
0x80, 0x00, 0x38, 0xEA,
0xE3, 0xC6, 0xA7, 0xC2,
0x02, 0xE5, 0x0E, 0x50,
0xC2, 0x4B, 0x4A, 0x04,
0xE5, 0x0B, 0x66, 0x80,
0x00, 0x4A, 0xCB, 0xCF,
0x07, 0x08, 0x20, 0x10,
0x95, 0x96, 0x9F, 0x0F,
0xA2, 0x00, 0x02, 0x28,
0x29, 0xB1, 0x08, 0x20,
0x80, 0x00, 0x00, 0x00,
0x00, 0xFF, 0xFF,
};
SpdySynStreamIR syn_stream(1);
syn_stream.set_priority(priority);
syn_stream.SetHeader("bar", "foo");
syn_stream.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynStream(syn_stream));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
LOG(FATAL) << "Unsupported version in test.";
}
}
}
#endif // !defined(USE_SYSTEM_ZLIB)
TEST_P(SpdyFramerTest, CreateSynReplyUncompressed) {
if (spdy_version_ > SPDY3) {
// SYN_REPLY unsupported in SPDY>3
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
{
const char kDescription[] = "SYN_REPLY frame, no FIN";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x02,
0x00, 0x00, 0x00, 0x1C,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x02,
0x00, 0x03, 'b', 'a',
'r', 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x03, 'b', 'a', 'r'
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x02,
0x00, 0x00, 0x00, 0x24,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x03,
'b', 'a', 'r', 0x00,
0x00, 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x00,
0x00, 0x03, 'f', 'o',
'o', 0x00, 0x00, 0x00,
0x03, 'b', 'a', 'r'
};
SpdySynReplyIR syn_reply(1);
syn_reply.SetHeader("bar", "foo");
syn_reply.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynReply(syn_reply));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
LOG(FATAL) << "Unsupported version in test.";
}
}
{
const char kDescription[] =
"SYN_REPLY frame with a 0-length header name, FIN, max stream ID";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x02,
0x01, 0x00, 0x00, 0x19,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x03, 'f', 'o', 'o',
0x00, 0x03, 'b', 'a',
'r'
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x02,
0x01, 0x00, 0x00, 0x21,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x00, 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x00,
0x00, 0x03, 'b', 'a',
'r'
};
SpdySynReplyIR syn_reply(0x7fffffff);
syn_reply.set_fin(true);
syn_reply.SetHeader("", "foo");
syn_reply.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynReply(syn_reply));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
LOG(FATAL) << "Unsupported version in test.";
}
}
{
const char kDescription[] =
"SYN_REPLY frame with a 0-length header val, FIN, max stream ID";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x02,
0x01, 0x00, 0x00, 0x19,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x02,
0x00, 0x03, 'b', 'a',
'r', 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x00
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x02,
0x01, 0x00, 0x00, 0x21,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x03,
'b', 'a', 'r', 0x00,
0x00, 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x00,
0x00, 0x03, 'f', 'o',
'o', 0x00, 0x00, 0x00,
0x00
};
SpdySynReplyIR syn_reply(0x7fffffff);
syn_reply.set_fin(true);
syn_reply.SetHeader("bar", "foo");
syn_reply.SetHeader("foo", "");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynReply(syn_reply));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
LOG(FATAL) << "Unsupported version in test.";
}
}
}
// TODO(phajdan.jr): Clean up after we no longer need
// to workaround http://crbug.com/139744.
#if !defined(USE_SYSTEM_ZLIB)
TEST_P(SpdyFramerTest, CreateSynReplyCompressed) {
if (spdy_version_ > SPDY3) {
// SYN_REPLY unsupported in SPDY>3
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(true);
{
const char kDescription[] = "SYN_REPLY frame, no FIN";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x02,
0x00, 0x00, 0x00, 0x32,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x38, 0xea,
0xdf, 0xa2, 0x51, 0xb2,
0x62, 0x60, 0x62, 0x60,
0x4e, 0x4a, 0x2c, 0x62,
0x60, 0x06, 0x08, 0xa0,
0xb4, 0xfc, 0x7c, 0x80,
0x00, 0x62, 0x60, 0x4e,
0xcb, 0xcf, 0x67, 0x60,
0x06, 0x08, 0xa0, 0xa4,
0xc4, 0x22, 0x80, 0x00,
0x02, 0x00, 0x00, 0x00,
0xff, 0xff,
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x02,
0x00, 0x00, 0x00, 0x31,
0x00, 0x00, 0x00, 0x01,
0x38, 0xea, 0xe3, 0xc6,
0xa7, 0xc2, 0x02, 0xe5,
0x0e, 0x50, 0xc2, 0x4b,
0x4a, 0x04, 0xe5, 0x0b,
0x66, 0x80, 0x00, 0x4a,
0xcb, 0xcf, 0x07, 0x08,
0x20, 0x10, 0x95, 0x96,
0x9f, 0x0f, 0xa2, 0x00,
0x02, 0x28, 0x29, 0xb1,
0x08, 0x20, 0x80, 0x00,
0x00, 0x00, 0x00, 0xff,
0xff,
};
SpdySynReplyIR syn_reply(1);
syn_reply.SetHeader("bar", "foo");
syn_reply.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynReply(syn_reply));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
LOG(FATAL) << "Unsupported version in test.";
}
}
}
#endif // !defined(USE_SYSTEM_ZLIB)
TEST_P(SpdyFramerTest, CreateRstStream) {
SpdyFramer framer(spdy_version_);
{
const char kDescription[] = "RST_STREAM frame";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x03,
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x01,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x07, 0x03,
0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
0x01, 0x52, 0x53, 0x54
};
SpdyRstStreamIR rst_stream(1, RST_STREAM_PROTOCOL_ERROR, "RST");
scoped_ptr<SpdyFrame> frame(framer.SerializeRstStream(rst_stream));
if (IsSpdy4()) {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
{
const char kDescription[] = "RST_STREAM frame with max stream ID";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x03,
0x00, 0x00, 0x00, 0x08,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x01,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x04, 0x03,
0x00, 0x7f, 0xff, 0xff,
0xff, 0x00, 0x00, 0x00,
0x01,
};
SpdyRstStreamIR rst_stream(0x7FFFFFFF,
RST_STREAM_PROTOCOL_ERROR,
"");
scoped_ptr<SpdyFrame> frame(framer.SerializeRstStream(rst_stream));
if (IsSpdy4()) {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
{
const char kDescription[] = "RST_STREAM frame with max status code";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x03,
0x00, 0x00, 0x00, 0x08,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x06,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x04, 0x03,
0x00, 0x7f, 0xff, 0xff,
0xff, 0x00, 0x00, 0x00,
0x06,
};
SpdyRstStreamIR rst_stream(0x7FFFFFFF,
RST_STREAM_INTERNAL_ERROR,
"");
scoped_ptr<SpdyFrame> frame(framer.SerializeRstStream(rst_stream));
if (IsSpdy4()) {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
}
TEST_P(SpdyFramerTest, CreateSettings) {
SpdyFramer framer(spdy_version_);
{
const char kDescription[] = "Network byte order SETTINGS frame";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x0c,
0x00, 0x00, 0x00, 0x01,
0x07, 0x00, 0x00, 0x01,
0x0a, 0x0b, 0x0c, 0x0d,
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x0c,
0x00, 0x00, 0x00, 0x01,
0x01, 0x00, 0x00, 0x07,
0x0a, 0x0b, 0x0c, 0x0d,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x06, 0x04,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x04, 0x0a,
0x0b, 0x0c, 0x0d,
};
uint32 kValue = 0x0a0b0c0d;
SpdySettingsIR settings_ir;
SpdySettingsFlags kFlags = static_cast<SpdySettingsFlags>(0x01);
SpdySettingsIds kId = SETTINGS_INITIAL_WINDOW_SIZE;
SettingsMap settings;
settings[kId] = SettingsFlagsAndValue(kFlags, kValue);
EXPECT_EQ(kFlags, settings[kId].first);
EXPECT_EQ(kValue, settings[kId].second);
settings_ir.AddSetting(kId,
kFlags & SETTINGS_FLAG_PLEASE_PERSIST,
kFlags & SETTINGS_FLAG_PERSISTED,
kValue);
scoped_ptr<SpdyFrame> frame(framer.SerializeSettings(settings_ir));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
}
}
{
const char kDescription[] = "Basic SETTINGS frame";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x24,
0x00, 0x00, 0x00, 0x04,
0x01, 0x00, 0x00, 0x00, // 1st Setting
0x00, 0x00, 0x00, 0x05,
0x02, 0x00, 0x00, 0x00, // 2nd Setting
0x00, 0x00, 0x00, 0x06,
0x03, 0x00, 0x00, 0x00, // 3rd Setting
0x00, 0x00, 0x00, 0x07,
0x04, 0x00, 0x00, 0x00, // 4th Setting
0x00, 0x00, 0x00, 0x08,
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x24,
0x00, 0x00, 0x00, 0x04,
0x00, 0x00, 0x00, 0x01, // 1st Setting
0x00, 0x00, 0x00, 0x05,
0x00, 0x00, 0x00, 0x02, // 2nd Setting
0x00, 0x00, 0x00, 0x06,
0x00, 0x00, 0x00, 0x03, // 3rd Setting
0x00, 0x00, 0x00, 0x07,
0x00, 0x00, 0x00, 0x04, // 4th Setting
0x00, 0x00, 0x00, 0x08,
};
// These end up seemingly out of order because of the way that our internal
// ordering for settings_ir works. HTTP2 has no requirement on ordering on
// the wire.
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x18, 0x04,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x03, // 3rd Setting
0x00, 0x00, 0x00, 0x07,
0x00, 0x04, // 4th Setting
0x00, 0x00, 0x00, 0x08,
0x00, 0x01, // 1st Setting
0x00, 0x00, 0x00, 0x05,
0x00, 0x02, // 2nd Setting
0x00, 0x00, 0x00, 0x06,
};
SpdySettingsIR settings_ir;
settings_ir.AddSetting(SpdyConstants::ParseSettingId(spdy_version_, 1),
false, // persist
false, // persisted
5);
settings_ir.AddSetting(SpdyConstants::ParseSettingId(spdy_version_, 2),
false, // persist
false, // persisted
6);
settings_ir.AddSetting(SpdyConstants::ParseSettingId(spdy_version_, 3),
false, // persist
false, // persisted
7);
settings_ir.AddSetting(SpdyConstants::ParseSettingId(spdy_version_, 4),
false, // persist
false, // persisted
8);
scoped_ptr<SpdyFrame> frame(framer.SerializeSettings(settings_ir));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
}
}
{
const char kDescription[] = "Empty SETTINGS frame";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x04,
0x00, 0x00, 0x00, 0x00,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x00, 0x04,
0x00, 0x00, 0x00, 0x00,
0x00,
};
SpdySettingsIR settings_ir;
scoped_ptr<SpdyFrame> frame(framer.SerializeSettings(settings_ir));
if (IsSpdy4()) {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
}
TEST_P(SpdyFramerTest, CreatePingFrame) {
SpdyFramer framer(spdy_version_);
{
const char kDescription[] = "PING frame";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x06,
0x00, 0x00, 0x00, 0x04,
0x12, 0x34, 0x56, 0x78,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x08, 0x06,
0x00, 0x00, 0x00, 0x00,
0x00, 0x12, 0x34, 0x56,
0x78, 0x9a, 0xbc, 0xde,
0xff,
};
const unsigned char kV4FrameDataWithAck[] = {
0x00, 0x00, 0x08, 0x06,
0x01, 0x00, 0x00, 0x00,
0x00, 0x12, 0x34, 0x56,
0x78, 0x9a, 0xbc, 0xde,
0xff,
};
scoped_ptr<SpdyFrame> frame;
if (IsSpdy4()) {
const SpdyPingId kPingId = 0x123456789abcdeffULL;
SpdyPingIR ping_ir(kPingId);
// Tests SpdyPingIR when the ping is not an ack.
ASSERT_FALSE(ping_ir.is_ack());
frame.reset(framer.SerializePing(ping_ir));
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
// Tests SpdyPingIR when the ping is an ack.
ping_ir.set_is_ack(true);
frame.reset(framer.SerializePing(ping_ir));
CompareFrame(kDescription, *frame,
kV4FrameDataWithAck, arraysize(kV4FrameDataWithAck));
} else {
frame.reset(framer.SerializePing(SpdyPingIR(0x12345678ull)));
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
}
TEST_P(SpdyFramerTest, CreateGoAway) {
SpdyFramer framer(spdy_version_);
{
const char kDescription[] = "GOAWAY frame";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x07,
0x00, 0x00, 0x00, 0x04,
0x00, 0x00, 0x00, 0x00, // Stream Id
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x07,
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x00, // Stream Id
0x00, 0x00, 0x00, 0x00, // Status
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x0a, 0x07,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // Stream id
0x00, 0x00, 0x00, 0x00, // Status
0x00, 0x47, 0x41, // Opaque Description
};
SpdyGoAwayIR goaway_ir(0, GOAWAY_OK, "GA");
scoped_ptr<SpdyFrame> frame(framer.SerializeGoAway(goaway_ir));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
}
}
{
const char kDescription[] = "GOAWAY frame with max stream ID, status";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x07,
0x00, 0x00, 0x00, 0x04,
0x7f, 0xff, 0xff, 0xff, // Stream Id
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x07,
0x00, 0x00, 0x00, 0x08,
0x7f, 0xff, 0xff, 0xff, // Stream Id
0x00, 0x00, 0x00, 0x01, // Status: PROTOCOL_ERROR.
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x0a, 0x07,
0x00, 0x00, 0x00, 0x00,
0x00, 0x7f, 0xff, 0xff, // Stream Id
0xff, 0x00, 0x00, 0x00, // Status: INTERNAL_ERROR.
0x02, 0x47, 0x41, // Opaque Description
};
SpdyGoAwayIR goaway_ir(0x7FFFFFFF, GOAWAY_INTERNAL_ERROR, "GA");
scoped_ptr<SpdyFrame> frame(framer.SerializeGoAway(goaway_ir));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
}
}
}
TEST_P(SpdyFramerTest, CreateHeadersUncompressed) {
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
{
const char kDescription[] = "HEADERS frame, no FIN";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x08,
0x00, 0x00, 0x00, 0x1C,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x02,
0x00, 0x03, 'b', 'a',
'r', 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x03, 'b', 'a', 'r'
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x08,
0x00, 0x00, 0x00, 0x24,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x03,
'b', 'a', 'r', 0x00,
0x00, 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x00,
0x00, 0x03, 'f', 'o',
'o', 0x00, 0x00, 0x00,
0x03, 'b', 'a', 'r'
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x12, 0x01, // Headers: END_HEADERS
0x04, 0x00, 0x00, 0x00, // Stream 1
0x01, 0x00, 0x03, 0x62, // @.ba
0x61, 0x72, 0x03, 0x66, // r.fo
0x6f, 0x6f, 0x00, 0x03, // o@.f
0x66, 0x6f, 0x6f, 0x03, // oo.b
0x62, 0x61, 0x72, // ar
};
SpdyHeadersIR headers_ir(1);
headers_ir.SetHeader("bar", "foo");
headers_ir.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers_ir));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
}
}
{
const char kDescription[] =
"HEADERS frame with a 0-length header name, FIN, max stream ID";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x08,
0x01, 0x00, 0x00, 0x19,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x03, 'f', 'o', 'o',
0x00, 0x03, 'b', 'a',
'r'
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x08,
0x01, 0x00, 0x00, 0x21,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x00, 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x00,
0x00, 0x03, 'b', 'a',
'r'
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x0f, 0x01, // Headers: FIN | END_HEADERS
0x05, 0x7f, 0xff, 0xff, // Stream 0x7fffffff
0xff, 0x00, 0x00, 0x03, // @..
0x66, 0x6f, 0x6f, 0x00, // foo@
0x03, 0x66, 0x6f, 0x6f, // .foo
0x03, 0x62, 0x61, 0x72, // .bar
};
SpdyHeadersIR headers_ir(0x7fffffff);
headers_ir.set_fin(true);
headers_ir.SetHeader("", "foo");
headers_ir.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers_ir));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
}
}
{
const char kDescription[] =
"HEADERS frame with a 0-length header val, FIN, max stream ID";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x08,
0x01, 0x00, 0x00, 0x19,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x02,
0x00, 0x03, 'b', 'a',
'r', 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x03,
'f', 'o', 'o', 0x00,
0x00
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x08,
0x01, 0x00, 0x00, 0x21,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x03,
'b', 'a', 'r', 0x00,
0x00, 0x00, 0x03, 'f',
'o', 'o', 0x00, 0x00,
0x00, 0x03, 'f', 'o',
'o', 0x00, 0x00, 0x00,
0x00
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x0f, 0x01, // Headers: FIN | END_HEADERS
0x05, 0x7f, 0xff, 0xff, // Stream 0x7fffffff
0xff, 0x00, 0x03, 0x62, // @.b
0x61, 0x72, 0x03, 0x66, // ar.f
0x6f, 0x6f, 0x00, 0x03, // oo@.
0x66, 0x6f, 0x6f, 0x00, // foo.
};
SpdyHeadersIR headers_ir(0x7fffffff);
headers_ir.set_fin(true);
headers_ir.SetHeader("bar", "foo");
headers_ir.SetHeader("foo", "");
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers_ir));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
}
}
{
const char kDescription[] =
"HEADERS frame with a 0-length header val, FIN, max stream ID, pri";
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x14, 0x01, // Headers: FIN | END_HEADERS | PRIORITY
0x25, 0x7f, 0xff, 0xff, // Stream 0x7fffffff
0xff, 0x00, 0x00, 0x00, // parent stream
0x00, 0xdb, // weight
0x00, 0x03, 0x62, 0x61, // @.ba
0x72, 0x03, 0x66, 0x6f, // r.fo
0x6f, 0x00, 0x03, 0x66, // o@.f
0x6f, 0x6f, 0x00, // oo.
};
SpdyHeadersIR headers_ir(0x7fffffff);
headers_ir.set_fin(true);
headers_ir.set_priority(1);
headers_ir.set_has_priority(true);
headers_ir.SetHeader("bar", "foo");
headers_ir.SetHeader("foo", "");
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers_ir));
if (IsSpdy2() || IsSpdy3()) {
// HEADERS with priority not supported.
} else {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
}
}
}
// TODO(phajdan.jr): Clean up after we no longer need
// to workaround http://crbug.com/139744.
#if !defined(USE_SYSTEM_ZLIB)
TEST_P(SpdyFramerTest, CreateHeadersCompressed) {
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(true);
{
const char kDescription[] = "HEADERS frame, no FIN";
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x08,
0x00, 0x00, 0x00, 0x32,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x38, 0xea,
0xdf, 0xa2, 0x51, 0xb2,
0x62, 0x60, 0x62, 0x60,
0x4e, 0x4a, 0x2c, 0x62,
0x60, 0x06, 0x08, 0xa0,
0xb4, 0xfc, 0x7c, 0x80,
0x00, 0x62, 0x60, 0x4e,
0xcb, 0xcf, 0x67, 0x60,
0x06, 0x08, 0xa0, 0xa4,
0xc4, 0x22, 0x80, 0x00,
0x02, 0x00, 0x00, 0x00,
0xff, 0xff,
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x08,
0x00, 0x00, 0x00, 0x31,
0x00, 0x00, 0x00, 0x01,
0x38, 0xea, 0xe3, 0xc6,
0xa7, 0xc2, 0x02, 0xe5,
0x0e, 0x50, 0xc2, 0x4b,
0x4a, 0x04, 0xe5, 0x0b,
0x66, 0x80, 0x00, 0x4a,
0xcb, 0xcf, 0x07, 0x08,
0x20, 0x10, 0x95, 0x96,
0x9f, 0x0f, 0xa2, 0x00,
0x02, 0x28, 0x29, 0xb1,
0x08, 0x20, 0x80, 0x00,
0x00, 0x00, 0x00, 0xff,
0xff,
};
SpdyHeadersIR headers_ir(1);
headers_ir.SetHeader("bar", "foo");
headers_ir.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers_ir));
if (IsSpdy2()) {
CompareFrame(kDescription, *frame, kV2FrameData, arraysize(kV2FrameData));
} else if (IsSpdy3()) {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
} else {
// Deflate compression doesn't apply to HPACK.
}
}
}
#endif // !defined(USE_SYSTEM_ZLIB)
TEST_P(SpdyFramerTest, CreateWindowUpdate) {
SpdyFramer framer(spdy_version_);
{
const char kDescription[] = "WINDOW_UPDATE frame";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x09,
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x01,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x04, 0x08,
0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
0x01,
};
scoped_ptr<SpdyFrame> frame(
framer.SerializeWindowUpdate(SpdyWindowUpdateIR(1, 1)));
if (IsSpdy4()) {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
{
const char kDescription[] = "WINDOW_UPDATE frame with max stream ID";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x09,
0x00, 0x00, 0x00, 0x08,
0x7f, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x01,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x04, 0x08,
0x00, 0x7f, 0xff, 0xff,
0xff, 0x00, 0x00, 0x00,
0x01,
};
scoped_ptr<SpdyFrame> frame(framer.SerializeWindowUpdate(
SpdyWindowUpdateIR(0x7FFFFFFF, 1)));
if (IsSpdy4()) {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
{
const char kDescription[] = "WINDOW_UPDATE frame with max window delta";
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x09,
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x01,
0x7f, 0xff, 0xff, 0xff,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x04, 0x08,
0x00, 0x00, 0x00, 0x00,
0x01, 0x7f, 0xff, 0xff,
0xff,
};
scoped_ptr<SpdyFrame> frame(framer.SerializeWindowUpdate(
SpdyWindowUpdateIR(1, 0x7FFFFFFF)));
if (IsSpdy4()) {
CompareFrame(kDescription, *frame, kV4FrameData, arraysize(kV4FrameData));
} else {
CompareFrame(kDescription, *frame, kV3FrameData, arraysize(kV3FrameData));
}
}
}
TEST_P(SpdyFramerTest, SerializeBlocked) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
const char kDescription[] = "BLOCKED frame";
const unsigned char kType = static_cast<unsigned char>(
SpdyConstants::SerializeFrameType(spdy_version_, BLOCKED));
const unsigned char kFrameData[] = {
0x00, 0x00, 0x00, kType, 0x00,
0x00, 0x00, 0x00, 0x00,
};
SpdyBlockedIR blocked_ir(0);
scoped_ptr<SpdySerializedFrame> frame(framer.SerializeFrame(blocked_ir));
CompareFrame(kDescription, *frame, kFrameData, arraysize(kFrameData));
}
TEST_P(SpdyFramerTest, CreateBlocked) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
const char kDescription[] = "BLOCKED frame";
const SpdyStreamId kStreamId = 3;
scoped_ptr<SpdySerializedFrame> frame_serialized(
framer.SerializeBlocked(SpdyBlockedIR(kStreamId)));
SpdyBlockedIR blocked_ir(kStreamId);
scoped_ptr<SpdySerializedFrame> frame_created(
framer.SerializeFrame(blocked_ir));
CompareFrames(kDescription, *frame_serialized, *frame_created);
}
TEST_P(SpdyFramerTest, CreatePushPromiseUncompressed) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
const char kDescription[] = "PUSH_PROMISE frame";
const unsigned char kFrameData[] = {
0x00, 0x00, 0x16, 0x05, 0x04, // PUSH_PROMISE: END_HEADERS
0x00, 0x00, 0x00, 0x2a, // Stream 42
0x00, 0x00, 0x00, 0x39, // Promised stream 57
0x00, 0x03, 0x62, 0x61, // @.ba
0x72, 0x03, 0x66, 0x6f, // r.fo
0x6f, 0x00, 0x03, 0x66, // o@.f
0x6f, 0x6f, 0x03, 0x62, // oo.b
0x61, 0x72, // ar
};
SpdyPushPromiseIR push_promise(42, 57);
push_promise.SetHeader("bar", "foo");
push_promise.SetHeader("foo", "bar");
scoped_ptr<SpdySerializedFrame> frame(
framer.SerializePushPromise(push_promise));
CompareFrame(kDescription, *frame, kFrameData, arraysize(kFrameData));
}
TEST_P(SpdyFramerTest, CreateContinuationUncompressed) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
const char kDescription[] = "CONTINUATION frame";
const unsigned char kFrameData[] = {
0x00, 0x00, 0x12, 0x09, 0x00, // CONTINUATION
0x00, 0x00, 0x00, 0x2a, // Stream 42
0x00, 0x03, 0x62, 0x61, // @.ba
0x72, 0x03, 0x66, 0x6f, // r.fo
0x6f, 0x00, 0x03, 0x66, // o@.f
0x6f, 0x6f, 0x03, 0x62, // oo.b
0x61, 0x72, // ar
};
SpdyContinuationIR continuation(42);
continuation.SetHeader("bar", "foo");
continuation.SetHeader("foo", "bar");
scoped_ptr<SpdySerializedFrame> frame(
framer.SerializeContinuation(continuation));
CompareFrame(kDescription, *frame, kFrameData, arraysize(kFrameData));
}
TEST_P(SpdyFramerTest, CreateAltSvc) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
const char kDescription[] = "ALTSVC frame";
const unsigned char kType = static_cast<unsigned char>(
SpdyConstants::SerializeFrameType(spdy_version_, ALTSVC));
const unsigned char kFrameData[] = {
0x00, 0x00, 0x17, kType, 0x00,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x05,
0x01, 0xbb, 0x00, 0x04, // Port = 443
'p', 'i', 'd', '1', // Protocol-ID
0x04, 'h', 'o', 's',
't', 'o', 'r', 'i',
'g', 'i', 'n',
};
SpdyAltSvcIR altsvc_ir(3);
altsvc_ir.set_max_age(5);
altsvc_ir.set_port(443);
altsvc_ir.set_protocol_id("pid1");
altsvc_ir.set_host("host");
altsvc_ir.set_origin("origin");
scoped_ptr<SpdySerializedFrame> frame(framer.SerializeFrame(altsvc_ir));
CompareFrame(kDescription, *frame, kFrameData, arraysize(kFrameData));
}
TEST_P(SpdyFramerTest, CreatePriority) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
const char kDescription[] = "PRIORITY frame";
const unsigned char kType = static_cast<unsigned char>(
SpdyConstants::SerializeFrameType(spdy_version_, PRIORITY));
const unsigned char kFrameData[] = {
0x00, 0x00, 0x05, kType, 0x00,
0x00, 0x00, 0x00, 0x02, // Stream ID = 2
0x80, 0x00, 0x00, 0x01, // Exclusive dependency, parent stream ID = 1
0x10, // Weight = 16
};
SpdyPriorityIR priority_ir(2, 1, 16, true);
scoped_ptr<SpdySerializedFrame> frame(framer.SerializeFrame(priority_ir));
CompareFrame(kDescription, *frame, kFrameData, arraysize(kFrameData));
}
TEST_P(SpdyFramerTest, ReadCompressedSynStreamHeaderBlock) {
if (spdy_version_ > SPDY3) {
// SYN_STREAM not supported in SPDY>3
return;
}
SpdyFramer framer(spdy_version_);
SpdySynStreamIR syn_stream(1);
syn_stream.set_priority(1);
syn_stream.SetHeader("aa", "vv");
syn_stream.SetHeader("bb", "ww");
SpdyHeaderBlock headers = syn_stream.name_value_block();
scoped_ptr<SpdyFrame> control_frame(framer.SerializeSynStream(syn_stream));
EXPECT_TRUE(control_frame.get() != NULL);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = true;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_EQ(1, visitor.syn_frame_count_);
EXPECT_TRUE(CompareHeaderBlocks(&headers, &visitor.headers_));
}
TEST_P(SpdyFramerTest, ReadCompressedSynReplyHeaderBlock) {
if (spdy_version_ > SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
SpdySynReplyIR syn_reply(1);
syn_reply.SetHeader("alpha", "beta");
syn_reply.SetHeader("gamma", "delta");
SpdyHeaderBlock headers = syn_reply.name_value_block();
scoped_ptr<SpdyFrame> control_frame(framer.SerializeSynReply(syn_reply));
EXPECT_TRUE(control_frame.get() != NULL);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = true;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
if (IsSpdy4()) {
EXPECT_EQ(0, visitor.syn_reply_frame_count_);
EXPECT_EQ(1, visitor.headers_frame_count_);
} else {
EXPECT_EQ(1, visitor.syn_reply_frame_count_);
EXPECT_EQ(0, visitor.headers_frame_count_);
}
EXPECT_TRUE(CompareHeaderBlocks(&headers, &visitor.headers_));
}
TEST_P(SpdyFramerTest, ReadCompressedHeadersHeaderBlock) {
SpdyFramer framer(spdy_version_);
SpdyHeadersIR headers_ir(1);
headers_ir.SetHeader("alpha", "beta");
headers_ir.SetHeader("gamma", "delta");
SpdyHeaderBlock headers = headers_ir.name_value_block();
scoped_ptr<SpdyFrame> control_frame(framer.SerializeHeaders(headers_ir));
EXPECT_TRUE(control_frame.get() != NULL);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = true;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_EQ(1, visitor.headers_frame_count_);
// control_frame_header_data_count_ depends on the random sequence
// produced by rand(), so adding, removing or running single tests
// alters this value. The best we can do is assert that it happens
// at least twice.
EXPECT_LE(2, visitor.control_frame_header_data_count_);
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
EXPECT_EQ(0, visitor.zero_length_data_frame_count_);
EXPECT_TRUE(CompareHeaderBlocks(&headers, &visitor.headers_));
}
TEST_P(SpdyFramerTest, ReadCompressedHeadersHeaderBlockWithHalfClose) {
SpdyFramer framer(spdy_version_);
SpdyHeadersIR headers_ir(1);
headers_ir.set_fin(true);
headers_ir.SetHeader("alpha", "beta");
headers_ir.SetHeader("gamma", "delta");
SpdyHeaderBlock headers = headers_ir.name_value_block();
scoped_ptr<SpdyFrame> control_frame(framer.SerializeHeaders(headers_ir));
EXPECT_TRUE(control_frame.get() != NULL);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = true;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_EQ(1, visitor.headers_frame_count_);
// control_frame_header_data_count_ depends on the random sequence
// produced by rand(), so adding, removing or running single tests
// alters this value. The best we can do is assert that it happens
// at least twice.
EXPECT_LE(2, visitor.control_frame_header_data_count_);
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
EXPECT_EQ(1, visitor.zero_length_data_frame_count_);
EXPECT_TRUE(CompareHeaderBlocks(&headers, &visitor.headers_));
}
TEST_P(SpdyFramerTest, ControlFrameAtMaxSizeLimit) {
if (spdy_version_ > SPDY3) {
// TODO(jgraettinger): This test setup doesn't work with HPACK.
return;
}
// First find the size of the header value in order to just reach the control
// frame max size.
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
SpdySynStreamIR syn_stream(1);
syn_stream.set_priority(1);
syn_stream.SetHeader("aa", "");
scoped_ptr<SpdyFrame> control_frame(framer.SerializeSynStream(syn_stream));
const size_t kBigValueSize =
framer.GetControlFrameBufferMaxSize() - control_frame->size();
// Create a frame at exactly that size.
string big_value(kBigValueSize, 'x');
syn_stream.SetHeader("aa", big_value);
control_frame.reset(framer.SerializeSynStream(syn_stream));
EXPECT_TRUE(control_frame.get() != NULL);
EXPECT_EQ(framer.GetControlFrameBufferMaxSize(), control_frame->size());
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_TRUE(visitor.header_buffer_valid_);
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.syn_frame_count_);
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
EXPECT_EQ(0, visitor.zero_length_data_frame_count_);
EXPECT_LT(kBigValueSize, visitor.header_buffer_length_);
}
TEST_P(SpdyFramerTest, ControlFrameTooLarge) {
if (spdy_version_ > SPDY3) {
// TODO(jgraettinger): This test setup doesn't work with HPACK.
return;
}
// First find the size of the header value in order to just reach the control
// frame max size.
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
SpdySynStreamIR syn_stream(1);
syn_stream.SetHeader("aa", "");
syn_stream.set_priority(1);
scoped_ptr<SpdyFrame> control_frame(framer.SerializeSynStream(syn_stream));
const size_t kBigValueSize =
framer.GetControlFrameBufferMaxSize() - control_frame->size() + 1;
// Create a frame at exatly that size.
string big_value(kBigValueSize, 'x');
syn_stream.SetHeader("aa", big_value);
// Upstream branches here and wraps SPDY4 with EXPECT_DEBUG_DFATAL. We
// neither support that in Chromium, nor do we use the same DFATAL (see
// SpdyFrameBuilder::WriteFramePrefix()).
control_frame.reset(framer.SerializeSynStream(syn_stream));
EXPECT_TRUE(control_frame.get() != NULL);
EXPECT_EQ(framer.GetControlFrameBufferMaxSize() + 1,
control_frame->size());
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_FALSE(visitor.header_buffer_valid_);
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_CONTROL_PAYLOAD_TOO_LARGE,
visitor.framer_.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
EXPECT_EQ(0, visitor.syn_frame_count_);
EXPECT_EQ(0u, visitor.header_buffer_length_);
}
TEST_P(SpdyFramerTest, TooLargeHeadersFrameUsesContinuation) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
SpdyHeadersIR headers(1);
// Exact payload length will change with HPACK, but this should be long
// enough to cause an overflow.
const size_t kBigValueSize = framer.GetControlFrameBufferMaxSize();
string big_value(kBigValueSize, 'x');
headers.SetHeader("aa", big_value);
scoped_ptr<SpdyFrame> control_frame(framer.SerializeHeaders(headers));
EXPECT_TRUE(control_frame.get() != NULL);
EXPECT_GT(control_frame->size(), framer.GetControlFrameBufferMaxSize());
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_TRUE(visitor.header_buffer_valid_);
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.headers_frame_count_);
EXPECT_EQ(16, visitor.continuation_count_);
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
}
TEST_P(SpdyFramerTest, TooLargePushPromiseFrameUsesContinuation) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
SpdyPushPromiseIR push_promise(1, 2);
// Exact payload length will change with HPACK, but this should be long
// enough to cause an overflow.
const size_t kBigValueSize = framer.GetControlFrameBufferMaxSize();
string big_value(kBigValueSize, 'x');
push_promise.SetHeader("aa", big_value);
scoped_ptr<SpdyFrame> control_frame(
framer.SerializePushPromise(push_promise));
EXPECT_TRUE(control_frame.get() != NULL);
EXPECT_GT(control_frame->size(), framer.GetControlFrameBufferMaxSize());
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_TRUE(visitor.header_buffer_valid_);
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.push_promise_frame_count_);
EXPECT_EQ(16, visitor.continuation_count_);
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
}
// Check that the framer stops delivering header data chunks once the visitor
// declares it doesn't want any more. This is important to guard against
// "zip bomb" types of attacks.
TEST_P(SpdyFramerTest, ControlFrameMuchTooLarge) {
const size_t kHeaderBufferChunks = 4;
const size_t kHeaderBufferSize =
TestSpdyVisitor::header_data_chunk_max_size() * kHeaderBufferChunks;
const size_t kBigValueSize = kHeaderBufferSize * 2;
string big_value(kBigValueSize, 'x');
SpdyFramer framer(spdy_version_);
SpdyHeadersIR headers(1);
headers.set_priority(1);
headers.set_fin(true);
headers.SetHeader("aa", big_value);
scoped_ptr<SpdyFrame> control_frame(framer.SerializeHeaders(headers));
EXPECT_TRUE(control_frame.get() != NULL);
TestSpdyVisitor visitor(spdy_version_);
visitor.set_header_buffer_size(kHeaderBufferSize);
visitor.use_compression_ = true;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_FALSE(visitor.header_buffer_valid_);
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_CONTROL_PAYLOAD_TOO_LARGE,
visitor.framer_.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
// The framer should have stoped delivering chunks after the visitor
// signaled "stop" by returning false from OnControlFrameHeaderData().
//
// control_frame_header_data_count_ depends on the random sequence
// produced by rand(), so adding, removing or running single tests
// alters this value. The best we can do is assert that it happens
// at least kHeaderBufferChunks + 1.
EXPECT_LE(kHeaderBufferChunks + 1,
static_cast<unsigned>(visitor.control_frame_header_data_count_));
EXPECT_EQ(0, visitor.zero_length_control_frame_header_data_count_);
// The framer should not have sent half-close to the visitor.
EXPECT_EQ(0, visitor.zero_length_data_frame_count_);
}
TEST_P(SpdyFramerTest, DecompressCorruptHeaderBlock) {
if (spdy_version_ > SPDY3) {
// Deflate compression doesn't apply to HPACK.
return;
}
SpdyFramer framer(spdy_version_);
framer.set_enable_compression(false);
// Construct a SYN_STREAM control frame without compressing the header block,
// and have the framer try to decompress it. This will cause the framer to
// deal with a decompression error.
SpdySynStreamIR syn_stream(1);
syn_stream.set_priority(1);
syn_stream.SetHeader("aa", "alpha beta gamma delta");
scoped_ptr<SpdyFrame> control_frame(framer.SerializeSynStream(syn_stream));
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = true;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_DECOMPRESS_FAILURE, visitor.framer_.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
EXPECT_EQ(0u, visitor.header_buffer_length_);
}
TEST_P(SpdyFramerTest, ControlFrameSizesAreValidated) {
SpdyFramer framer(spdy_version_);
// Create a GoAway frame that has a few extra bytes at the end.
// We create enough overhead to overflow the framer's control frame buffer.
ASSERT_GE(250u, SpdyFramer::kControlFrameBufferSize);
const unsigned char length = 1 + SpdyFramer::kControlFrameBufferSize;
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x07,
0x00, 0x00, 0x00, length,
0x00, 0x00, 0x00, 0x00, // Stream ID
0x00, 0x00, 0x00, 0x00, // Status
};
// SPDY version 4 and up GOAWAY frames are only bound to a minimal length,
// since it may carry opaque data. Verify that minimal length is tested.
const unsigned char less_than_min_length =
framer.GetGoAwayMinimumSize() - framer.GetControlFrameHeaderSize() - 1;
const unsigned char kV4FrameData[] = {
0x00, 0x00, static_cast<uint8>(less_than_min_length), 0x07,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // Stream Id
0x00, 0x00, 0x00, 0x00, // Status
0x00,
};
const size_t pad_length =
length + framer.GetControlFrameHeaderSize() -
(IsSpdy4() ? sizeof(kV4FrameData) : sizeof(kV3FrameData));
string pad('A', pad_length);
TestSpdyVisitor visitor(spdy_version_);
if (IsSpdy4()) {
visitor.SimulateInFramer(kV4FrameData, sizeof(kV4FrameData));
} else {
visitor.SimulateInFramer(kV3FrameData, sizeof(kV3FrameData));
}
visitor.SimulateInFramer(
reinterpret_cast<const unsigned char*>(pad.c_str()),
pad.length());
EXPECT_EQ(1, visitor.error_count_); // This generated an error.
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME,
visitor.framer_.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
EXPECT_EQ(0, visitor.goaway_count_); // Frame not parsed.
}
TEST_P(SpdyFramerTest, ReadZeroLenSettingsFrame) {
SpdyFramer framer(spdy_version_);
SpdySettingsIR settings_ir;
scoped_ptr<SpdyFrame> control_frame(framer.SerializeSettings(settings_ir));
SetFrameLength(control_frame.get(), 0, spdy_version_);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
framer.GetControlFrameHeaderSize());
if (spdy_version_ <= SPDY3) {
// Should generate an error, since zero-len settings frames are unsupported.
EXPECT_EQ(1, visitor.error_count_);
} else {
// Zero-len settings frames are permitted as of SPDY 4.
EXPECT_EQ(0, visitor.error_count_);
}
}
// Tests handling of SETTINGS frames with invalid length.
TEST_P(SpdyFramerTest, ReadBogusLenSettingsFrame) {
SpdyFramer framer(spdy_version_);
SpdySettingsIR settings_ir;
// Add a setting to pad the frame so that we don't get a buffer overflow when
// calling SimulateInFramer() below.
settings_ir.AddSetting(SETTINGS_INITIAL_WINDOW_SIZE,
false,
false,
0x00000002);
scoped_ptr<SpdyFrame> control_frame(framer.SerializeSettings(settings_ir));
const size_t kNewLength = 14;
SetFrameLength(control_frame.get(), kNewLength, spdy_version_);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
framer.GetControlFrameHeaderSize() + kNewLength);
// Should generate an error, since its not possible to have a
// settings frame of length kNewLength.
EXPECT_EQ(1, visitor.error_count_);
}
// Tests handling of SETTINGS frames larger than the frame buffer size.
TEST_P(SpdyFramerTest, ReadLargeSettingsFrame) {
SpdyFramer framer(spdy_version_);
SpdySettingsIR settings_ir;
settings_ir.AddSetting(SpdyConstants::ParseSettingId(spdy_version_, 1),
false, // persist
false, // persisted
5);
settings_ir.AddSetting(SpdyConstants::ParseSettingId(spdy_version_, 2),
false, // persist
false, // persisted
6);
settings_ir.AddSetting(SpdyConstants::ParseSettingId(spdy_version_, 3),
false, // persist
false, // persisted
7);
scoped_ptr<SpdyFrame> control_frame(framer.SerializeSettings(settings_ir));
EXPECT_LT(SpdyFramer::kControlFrameBufferSize,
control_frame->size());
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
// Read all at once.
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(3, visitor.setting_count_);
if (spdy_version_ > SPDY3) {
EXPECT_EQ(1, visitor.settings_ack_sent_);
}
// Read data in small chunks.
size_t framed_data = 0;
size_t unframed_data = control_frame->size();
size_t kReadChunkSize = 5; // Read five bytes at a time.
while (unframed_data > 0) {
size_t to_read = min(kReadChunkSize, unframed_data);
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data() + framed_data),
to_read);
unframed_data -= to_read;
framed_data += to_read;
}
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(3 * 2, visitor.setting_count_);
if (spdy_version_ > SPDY3) {
EXPECT_EQ(2, visitor.settings_ack_sent_);
}
}
// Tests handling of SETTINGS frame with duplicate entries.
TEST_P(SpdyFramerTest, ReadDuplicateSettings) {
SpdyFramer framer(spdy_version_);
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x1C,
0x00, 0x00, 0x00, 0x03,
0x01, 0x00, 0x00, 0x00, // 1st Setting
0x00, 0x00, 0x00, 0x02,
0x01, 0x00, 0x00, 0x00, // 2nd (duplicate) Setting
0x00, 0x00, 0x00, 0x03,
0x03, 0x00, 0x00, 0x00, // 3rd (unprocessed) Setting
0x00, 0x00, 0x00, 0x03,
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x1C,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x01, // 1st Setting
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x01, // 2nd (duplicate) Setting
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x03, // 3rd (unprocessed) Setting
0x00, 0x00, 0x00, 0x03,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x12, 0x04,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x01, // 1st Setting
0x00, 0x00, 0x00, 0x02,
0x00, 0x01, // 2nd (duplicate) Setting
0x00, 0x00, 0x00, 0x03,
0x00, 0x03, // 3rd (unprocessed) Setting
0x00, 0x00, 0x00, 0x03,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
if (IsSpdy2()) {
visitor.SimulateInFramer(kV2FrameData, sizeof(kV2FrameData));
} else if (IsSpdy3()) {
visitor.SimulateInFramer(kV3FrameData, sizeof(kV3FrameData));
} else {
visitor.SimulateInFramer(kV4FrameData, sizeof(kV4FrameData));
}
if (!IsSpdy4()) {
EXPECT_EQ(1, visitor.setting_count_);
EXPECT_EQ(1, visitor.error_count_);
} else {
// In SPDY 4+, duplicate settings are allowed;
// each setting replaces the previous value for that setting.
EXPECT_EQ(3, visitor.setting_count_);
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.settings_ack_sent_);
}
}
// Tests handling of SETTINGS frame with a setting we don't recognize.
TEST_P(SpdyFramerTest, ReadUnknownSettingsId) {
SpdyFramer framer(spdy_version_);
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x1C,
0x00, 0x00, 0x00, 0x01,
0x10, 0x00, 0x00, 0x00, // 1st Setting
0x00, 0x00, 0x00, 0x02,
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x1C,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, 0x10, // 1st Setting
0x00, 0x00, 0x00, 0x02,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x06, 0x04,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x10, // 1st Setting
0x00, 0x00, 0x00, 0x02,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
if (IsSpdy2()) {
visitor.SimulateInFramer(kV2FrameData, sizeof(kV2FrameData));
} else if (IsSpdy3()) {
visitor.SimulateInFramer(kV3FrameData, sizeof(kV3FrameData));
} else {
visitor.SimulateInFramer(kV4FrameData, sizeof(kV4FrameData));
}
if (!IsSpdy4()) {
EXPECT_EQ(0, visitor.setting_count_);
EXPECT_EQ(1, visitor.error_count_);
} else {
// In SPDY 4+, we ignore unknown settings because of extensions.
EXPECT_EQ(0, visitor.setting_count_);
EXPECT_EQ(0, visitor.error_count_);
}
}
// Tests handling of SETTINGS frame with entries out of order.
TEST_P(SpdyFramerTest, ReadOutOfOrderSettings) {
SpdyFramer framer(spdy_version_);
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x1C,
0x00, 0x00, 0x00, 0x03,
0x02, 0x00, 0x00, 0x00, // 1st Setting
0x00, 0x00, 0x00, 0x02,
0x01, 0x00, 0x00, 0x00, // 2nd (out of order) Setting
0x00, 0x00, 0x00, 0x03,
0x03, 0x00, 0x00, 0x00, // 3rd (unprocessed) Setting
0x00, 0x00, 0x00, 0x03,
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x04,
0x00, 0x00, 0x00, 0x1C,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x02, // 1st Setting
0x00, 0x00, 0x00, 0x02,
0x00, 0x00, 0x00, 0x01, // 2nd (out of order) Setting
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x01, 0x03, // 3rd (unprocessed) Setting
0x00, 0x00, 0x00, 0x03,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x12, 0x04,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x02, // 1st Setting
0x00, 0x00, 0x00, 0x02,
0x00, 0x01, // 2nd (out of order) Setting
0x00, 0x00, 0x00, 0x03,
0x00, 0x03, // 3rd (unprocessed) Setting
0x00, 0x00, 0x00, 0x03,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
if (IsSpdy2()) {
visitor.SimulateInFramer(kV2FrameData, sizeof(kV2FrameData));
} else if (IsSpdy3()) {
visitor.SimulateInFramer(kV3FrameData, sizeof(kV3FrameData));
} else {
visitor.SimulateInFramer(kV4FrameData, sizeof(kV4FrameData));
}
if (!IsSpdy4()) {
EXPECT_EQ(1, visitor.setting_count_);
EXPECT_EQ(1, visitor.error_count_);
} else {
// In SPDY 4+, settings are allowed in any order.
EXPECT_EQ(3, visitor.setting_count_);
EXPECT_EQ(0, visitor.error_count_);
}
}
TEST_P(SpdyFramerTest, ProcessSettingsAckFrame) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
const unsigned char kFrameData[] = {
0x00, 0x00, 0x00, 0x04, 0x01,
0x00, 0x00, 0x00, 0x00,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(kFrameData, sizeof(kFrameData));
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(0, visitor.setting_count_);
EXPECT_EQ(1, visitor.settings_ack_received_);
}
TEST_P(SpdyFramerTest, ProcessDataFrameWithPadding) {
if (spdy_version_ <= SPDY3) {
return;
}
const int kPaddingLen = 119;
const char data_payload[] = "hello";
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdyDataIR data_ir(1, StringPiece(data_payload, strlen(data_payload)));
data_ir.set_padding_len(kPaddingLen);
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
ASSERT_TRUE(frame.get() != NULL);
int bytes_consumed = 0;
// Send the frame header.
EXPECT_CALL(visitor, OnDataFrameHeader(1,
kPaddingLen + strlen(data_payload),
false));
CHECK_EQ(framer.GetDataFrameMinimumSize(),
framer.ProcessInput(frame->data(),
framer.GetDataFrameMinimumSize()));
CHECK_EQ(framer.state(), SpdyFramer::SPDY_READ_PADDING_LENGTH);
CHECK_EQ(framer.error_code(), SpdyFramer::SPDY_NO_ERROR);
bytes_consumed += framer.GetDataFrameMinimumSize();
// Send the padding length field.
CHECK_EQ(1u, framer.ProcessInput(frame->data() + bytes_consumed, 1));
CHECK_EQ(framer.state(), SpdyFramer::SPDY_FORWARD_STREAM_FRAME);
CHECK_EQ(framer.error_code(), SpdyFramer::SPDY_NO_ERROR);
bytes_consumed += 1;
// Send the first two bytes of the data payload, i.e., "he".
EXPECT_CALL(visitor, OnStreamFrameData(1, _, 2, false));
CHECK_EQ(2u, framer.ProcessInput(frame->data() + bytes_consumed, 2));
CHECK_EQ(framer.state(), SpdyFramer::SPDY_FORWARD_STREAM_FRAME);
CHECK_EQ(framer.error_code(), SpdyFramer::SPDY_NO_ERROR);
bytes_consumed += 2;
// Send the rest three bytes of the data payload, i.e., "llo".
EXPECT_CALL(visitor, OnStreamFrameData(1, _, 3, false));
CHECK_EQ(3u, framer.ProcessInput(frame->data() + bytes_consumed, 3));
CHECK_EQ(framer.state(), SpdyFramer::SPDY_CONSUME_PADDING);
CHECK_EQ(framer.error_code(), SpdyFramer::SPDY_NO_ERROR);
bytes_consumed += 3;
// Send the first 100 bytes of the padding payload.
EXPECT_CALL(visitor, OnStreamFrameData(1, NULL, 100, false));
CHECK_EQ(100u, framer.ProcessInput(frame->data() + bytes_consumed, 100));
CHECK_EQ(framer.state(), SpdyFramer::SPDY_CONSUME_PADDING);
CHECK_EQ(framer.error_code(), SpdyFramer::SPDY_NO_ERROR);
bytes_consumed += 100;
// Send rest of the padding payload.
EXPECT_CALL(visitor, OnStreamFrameData(1, NULL, 18, false));
CHECK_EQ(18u, framer.ProcessInput(frame->data() + bytes_consumed, 18));
CHECK_EQ(framer.state(), SpdyFramer::SPDY_RESET);
CHECK_EQ(framer.error_code(), SpdyFramer::SPDY_NO_ERROR);
}
TEST_P(SpdyFramerTest, ReadWindowUpdate) {
SpdyFramer framer(spdy_version_);
scoped_ptr<SpdyFrame> control_frame(
framer.SerializeWindowUpdate(SpdyWindowUpdateIR(1, 2)));
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_EQ(1u, visitor.last_window_update_stream_);
EXPECT_EQ(2u, visitor.last_window_update_delta_);
}
TEST_P(SpdyFramerTest, ReceiveCredentialFrame) {
if (!IsSpdy3()) {
return;
}
SpdyFramer framer(spdy_version_);
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x0A,
0x00, 0x00, 0x00, 0x33,
0x00, 0x03, 0x00, 0x00,
0x00, 0x05, 'p', 'r',
'o', 'o', 'f', 0x00,
0x00, 0x00, 0x06, 'a',
' ', 'c', 'e', 'r',
't', 0x00, 0x00, 0x00,
0x0C, 'a', 'n', 'o',
't', 'h', 'e', 'r',
' ', 'c', 'e', 'r',
't', 0x00, 0x00, 0x00,
0x0A, 'f', 'i', 'n',
'a', 'l', ' ', 'c',
'e', 'r', 't',
};
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(kV3FrameData, arraysize(kV3FrameData));
EXPECT_EQ(0, visitor.error_count_);
}
TEST_P(SpdyFramerTest, ReadCredentialFrameFollowedByAnotherFrame) {
if (!IsSpdy3()) {
return;
}
SpdyFramer framer(spdy_version_);
const unsigned char kV3FrameData[] = { // Also applies for V2.
0x80, spdy_version_ch_, 0x00, 0x0A,
0x00, 0x00, 0x00, 0x33,
0x00, 0x03, 0x00, 0x00,
0x00, 0x05, 'p', 'r',
'o', 'o', 'f', 0x00,
0x00, 0x00, 0x06, 'a',
' ', 'c', 'e', 'r',
't', 0x00, 0x00, 0x00,
0x0C, 'a', 'n', 'o',
't', 'h', 'e', 'r',
' ', 'c', 'e', 'r',
't', 0x00, 0x00, 0x00,
0x0A, 'f', 'i', 'n',
'a', 'l', ' ', 'c',
'e', 'r', 't',
};
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
string multiple_frame_data(reinterpret_cast<const char*>(kV3FrameData),
arraysize(kV3FrameData));
scoped_ptr<SpdyFrame> control_frame(
framer.SerializeWindowUpdate(SpdyWindowUpdateIR(1, 2)));
multiple_frame_data.append(string(control_frame->data(),
control_frame->size()));
visitor.SimulateInFramer(
reinterpret_cast<unsigned const char*>(multiple_frame_data.data()),
multiple_frame_data.length());
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1u, visitor.last_window_update_stream_);
EXPECT_EQ(2u, visitor.last_window_update_delta_);
}
TEST_P(SpdyFramerTest, ReadCompressedPushPromise) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
SpdyPushPromiseIR push_promise(42, 57);
push_promise.SetHeader("foo", "bar");
push_promise.SetHeader("bar", "foofoo");
SpdyHeaderBlock headers = push_promise.name_value_block();
scoped_ptr<SpdySerializedFrame> frame(
framer.SerializePushPromise(push_promise));
EXPECT_TRUE(frame.get() != NULL);
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = true;
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(frame->data()),
frame->size());
EXPECT_EQ(42u, visitor.last_push_promise_stream_);
EXPECT_EQ(57u, visitor.last_push_promise_promised_stream_);
EXPECT_TRUE(CompareHeaderBlocks(&headers, &visitor.headers_));
}
TEST_P(SpdyFramerTest, ReadHeadersWithContinuation) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x00, 0x14, 0x01, 0x08, // HEADERS: PADDED
0x00, 0x00, 0x00, 0x01, // Stream 1
0x03, // Padding of 3.
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x07, 0x66, 0x6f, 0x6f,
0x3d, 0x62, 0x61, 0x72,
0x00, 0x00, 0x00,
0x00, 0x00, 0x14, 0x09, 0x00, // CONTINUATION
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x08, 0x62, 0x61, 0x7a,
0x3d, 0x62, 0x69, 0x6e,
0x67, 0x00, 0x06, 0x63,
0x00, 0x00, 0x12, 0x09, 0x04, // CONTINUATION: END_HEADERS
0x00, 0x00, 0x00, 0x01, // Stream 1
0x6f, 0x6f, 0x6b, 0x69,
0x65, 0x00, 0x00, 0x04,
0x6e, 0x61, 0x6d, 0x65,
0x05, 0x76, 0x61, 0x6c,
0x75, 0x65,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(kInput, sizeof(kInput));
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.headers_frame_count_);
EXPECT_EQ(2, visitor.continuation_count_);
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
EXPECT_EQ(0, visitor.zero_length_data_frame_count_);
EXPECT_THAT(visitor.headers_, ElementsAre(
Pair("cookie", "foo=bar; baz=bing; "),
Pair("name", "value")));
}
TEST_P(SpdyFramerTest, ReadHeadersWithContinuationAndFin) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x00, 0x10, 0x01, 0x01, // HEADERS: FIN
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x07, 0x66, 0x6f, 0x6f,
0x3d, 0x62, 0x61, 0x72,
0x00, 0x00, 0x14, 0x09, 0x00, // CONTINUATION
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x08, 0x62, 0x61, 0x7a,
0x3d, 0x62, 0x69, 0x6e,
0x67, 0x00, 0x06, 0x63,
0x00, 0x00, 0x12, 0x09, 0x04, // CONTINUATION: END_HEADERS
0x00, 0x00, 0x00, 0x01, // Stream 1
0x6f, 0x6f, 0x6b, 0x69,
0x65, 0x00, 0x00, 0x04,
0x6e, 0x61, 0x6d, 0x65,
0x05, 0x76, 0x61, 0x6c,
0x75, 0x65,
};
SpdyFramer framer(spdy_version_);
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(kInput, sizeof(kInput));
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.headers_frame_count_);
EXPECT_EQ(2, visitor.continuation_count_);
EXPECT_EQ(1, visitor.fin_flag_count_);
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
EXPECT_EQ(1, visitor.zero_length_data_frame_count_);
EXPECT_THAT(visitor.headers_, ElementsAre(
Pair("cookie", "foo=bar; baz=bing; "),
Pair("name", "value")));
}
TEST_P(SpdyFramerTest, ReadPushPromiseWithContinuation) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x00, 0x17, 0x05, 0x08, // PUSH_PROMISE: PADDED
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x00, 0x00, 0x2A, // Promised stream 42
0x02, // Padding of 2.
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x07, 0x66, 0x6f, 0x6f,
0x3d, 0x62, 0x61, 0x72,
0x00, 0x00,
0x00, 0x00, 0x14, 0x09, 0x00, // CONTINUATION
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x08, 0x62, 0x61, 0x7a,
0x3d, 0x62, 0x69, 0x6e,
0x67, 0x00, 0x06, 0x63,
0x00, 0x00, 0x12, 0x09, 0x04, // CONTINUATION: END_HEADERS
0x00, 0x00, 0x00, 0x01, // Stream 1
0x6f, 0x6f, 0x6b, 0x69,
0x65, 0x00, 0x00, 0x04,
0x6e, 0x61, 0x6d, 0x65,
0x05, 0x76, 0x61, 0x6c,
0x75, 0x65,
};
SpdyFramer framer(spdy_version_);
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(kInput, sizeof(kInput));
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1u, visitor.last_push_promise_stream_);
EXPECT_EQ(42u, visitor.last_push_promise_promised_stream_);
EXPECT_EQ(2, visitor.continuation_count_);
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
EXPECT_EQ(0, visitor.zero_length_data_frame_count_);
EXPECT_THAT(visitor.headers_, ElementsAre(
Pair("cookie", "foo=bar; baz=bing; "),
Pair("name", "value")));
}
TEST_P(SpdyFramerTest, ReadContinuationWithWrongStreamId) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x00, 0x10, 0x01, 0x00, // HEADERS
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x07, 0x66, 0x6f, 0x6f,
0x3d, 0x62, 0x61, 0x72,
0x00, 0x00, 0x14, 0x09, 0x00, // CONTINUATION
0x00, 0x00, 0x00, 0x02, // Stream 2
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x08, 0x62, 0x61, 0x7a,
0x3d, 0x62, 0x69, 0x6e,
0x67, 0x00, 0x06, 0x63,
};
SpdyFramer framer(spdy_version_);
TestSpdyVisitor visitor(spdy_version_);
framer.set_visitor(&visitor);
visitor.SimulateInFramer(kInput, sizeof(kInput));
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME,
visitor.framer_.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
EXPECT_EQ(1, visitor.headers_frame_count_);
EXPECT_EQ(0, visitor.continuation_count_);
EXPECT_EQ(0u, visitor.header_buffer_length_);
}
TEST_P(SpdyFramerTest, ReadContinuationOutOfOrder) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x00, 0x18, 0x09, 0x00, // CONTINUATION
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x07, 0x66, 0x6f, 0x6f,
0x3d, 0x62, 0x61, 0x72,
};
SpdyFramer framer(spdy_version_);
TestSpdyVisitor visitor(spdy_version_);
framer.set_visitor(&visitor);
visitor.SimulateInFramer(kInput, sizeof(kInput));
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_UNEXPECTED_FRAME,
visitor.framer_.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
EXPECT_EQ(0, visitor.continuation_count_);
EXPECT_EQ(0u, visitor.header_buffer_length_);
}
TEST_P(SpdyFramerTest, ExpectContinuationReceiveData) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x00, 0x10, 0x01, 0x00, // HEADERS
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x07, 0x66, 0x6f, 0x6f,
0x3d, 0x62, 0x61, 0x72,
0x00, 0x00, 0x00, 0x00, 0x01, // DATA on Stream #1
0x00, 0x00, 0x00, 0x04,
0xde, 0xad, 0xbe, 0xef,
};
SpdyFramer framer(spdy_version_);
TestSpdyVisitor visitor(spdy_version_);
framer.set_visitor(&visitor);
visitor.SimulateInFramer(kInput, sizeof(kInput));
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_UNEXPECTED_FRAME,
visitor.framer_.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
EXPECT_EQ(1, visitor.headers_frame_count_);
EXPECT_EQ(0, visitor.continuation_count_);
EXPECT_EQ(0u, visitor.header_buffer_length_);
EXPECT_EQ(0, visitor.data_frame_count_);
}
TEST_P(SpdyFramerTest, ExpectContinuationReceiveControlFrame) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x00, 0x18, 0x01, 0x00, // HEADERS
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x07, 0x66, 0x6f, 0x6f,
0x3d, 0x62, 0x61, 0x72,
0x00, 0x00, 0x1c, 0x08, 0x00, // HEADERS
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f, // (Note this is a valid continued encoding).
0x6f, 0x6b, 0x69, 0x65,
0x08, 0x62, 0x61, 0x7a,
0x3d, 0x62, 0x69, 0x6e,
0x67, 0x00, 0x06, 0x63,
};
SpdyFramer framer(spdy_version_);
TestSpdyVisitor visitor(spdy_version_);
framer.set_visitor(&visitor);
visitor.SimulateInFramer(kInput, sizeof(kInput));
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_UNEXPECTED_FRAME,
visitor.framer_.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
EXPECT_EQ(1, visitor.headers_frame_count_);
EXPECT_EQ(0, visitor.continuation_count_);
EXPECT_EQ(0u, visitor.header_buffer_length_);
EXPECT_EQ(0, visitor.data_frame_count_);
}
TEST_P(SpdyFramerTest, EndSegmentOnDataFrame) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x00, 0x0c, 0x00, 0x02, // DATA: END_SEGMENT
0x00, 0x00, 0x00, 0x01, // Stream 1
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
0xde, 0xad, 0xbe, 0xef,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(kInput, sizeof(kInput));
// TODO(jgraettinger): Verify END_SEGMENT when support is added.
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(12, visitor.data_bytes_);
EXPECT_EQ(0, visitor.fin_frame_count_);
EXPECT_EQ(0, visitor.fin_flag_count_);
}
TEST_P(SpdyFramerTest, EndSegmentOnHeadersFrame) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x00, 0x10, 0x01, 0x06, // HEADERS: END_SEGMENT | END_HEADERS
0x00, 0x00, 0x00, 0x01, // Stream 1
0x00, 0x06, 0x63, 0x6f,
0x6f, 0x6b, 0x69, 0x65,
0x07, 0x66, 0x6f, 0x6f,
0x3d, 0x62, 0x61, 0x72,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(kInput, sizeof(kInput));
// TODO(jgraettinger): Verify END_SEGMENT when support is added.
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.headers_frame_count_);
EXPECT_EQ(1, visitor.zero_length_control_frame_header_data_count_);
EXPECT_THAT(visitor.headers_, ElementsAre(
Pair("cookie", "foo=bar")));
}
TEST_P(SpdyFramerTest, ReadGarbage) {
SpdyFramer framer(spdy_version_);
unsigned char garbage_frame[256];
memset(garbage_frame, ~0, sizeof(garbage_frame));
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(garbage_frame, sizeof(garbage_frame));
EXPECT_EQ(1, visitor.error_count_);
}
TEST_P(SpdyFramerTest, ReadUnknownExtensionFrame) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
// The unrecognized frame type should still have a valid length.
const unsigned char unknown_frame[] = {
0x00, 0x00, 0x08, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
};
TestSpdyVisitor visitor(spdy_version_);
// Simulate the case where the stream id validation checks out.
visitor.on_unknown_frame_result_ = true;
visitor.use_compression_ = false;
visitor.SimulateInFramer(unknown_frame, arraysize(unknown_frame));
EXPECT_EQ(0, visitor.error_count_);
// Follow it up with a valid control frame to make sure we handle
// subsequent frames correctly.
SpdySettingsIR settings_ir;
settings_ir.AddSetting(SpdyConstants::ParseSettingId(spdy_version_, 1),
false, // persist
false, // persisted
10);
scoped_ptr<SpdyFrame> control_frame(framer.SerializeSettings(settings_ir));
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data()),
control_frame->size());
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1u, static_cast<unsigned>(visitor.setting_count_));
EXPECT_EQ(1u, static_cast<unsigned>(visitor.settings_ack_sent_));
}
TEST_P(SpdyFramerTest, ReadGarbageWithValidLength) {
if (!IsSpdy4()) {
return;
}
SpdyFramer framer(spdy_version_);
const unsigned char kFrameData[] = {
0x00, 0x00, 0x08, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(kFrameData, arraysize(kFrameData));
EXPECT_EQ(1, visitor.error_count_);
}
TEST_P(SpdyFramerTest, ReadGarbageWithValidVersion) {
if (IsSpdy4()) {
// Not valid for SPDY 4 since there is no version field.
return;
}
SpdyFramer framer(spdy_version_);
const unsigned char kFrameData[] = {
0x80, spdy_version_ch_, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
visitor.SimulateInFramer(kFrameData, arraysize(kFrameData));
EXPECT_EQ(1, visitor.error_count_);
}
TEST_P(SpdyFramerTest, ReadGarbageHPACKEncoding) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kInput[] = {
0x00, 0x12, 0x01, 0x04, // HEADER: END_HEADERS
0x00, 0x00, 0x00, 0x01, // Stream 1
0xef, 0xef, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff,
0xff, 0xff,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(kInput, arraysize(kInput));
EXPECT_EQ(1, visitor.error_count_);
}
TEST_P(SpdyFramerTest, SizesTest) {
SpdyFramer framer(spdy_version_);
if (IsSpdy4() || IsSpdy5()) {
EXPECT_EQ(9u, framer.GetDataFrameMinimumSize());
EXPECT_EQ(9u, framer.GetControlFrameHeaderSize());
EXPECT_EQ(14u, framer.GetSynStreamMinimumSize());
EXPECT_EQ(9u, framer.GetSynReplyMinimumSize());
EXPECT_EQ(13u, framer.GetRstStreamMinimumSize());
EXPECT_EQ(9u, framer.GetSettingsMinimumSize());
EXPECT_EQ(17u, framer.GetPingSize());
EXPECT_EQ(17u, framer.GetGoAwayMinimumSize());
EXPECT_EQ(9u, framer.GetHeadersMinimumSize());
EXPECT_EQ(13u, framer.GetWindowUpdateSize());
EXPECT_EQ(9u, framer.GetBlockedSize());
EXPECT_EQ(13u, framer.GetPushPromiseMinimumSize());
EXPECT_EQ(18u, framer.GetAltSvcMinimumSize());
EXPECT_EQ(9u, framer.GetFrameMinimumSize());
EXPECT_EQ(16393u, framer.GetFrameMaximumSize());
EXPECT_EQ(16384u, framer.GetDataFrameMaximumPayload());
} else {
EXPECT_EQ(8u, framer.GetDataFrameMinimumSize());
EXPECT_EQ(8u, framer.GetControlFrameHeaderSize());
EXPECT_EQ(18u, framer.GetSynStreamMinimumSize());
EXPECT_EQ(IsSpdy2() ? 14u : 12u, framer.GetSynReplyMinimumSize());
EXPECT_EQ(16u, framer.GetRstStreamMinimumSize());
EXPECT_EQ(12u, framer.GetSettingsMinimumSize());
EXPECT_EQ(12u, framer.GetPingSize());
EXPECT_EQ(IsSpdy2() ? 12u : 16u, framer.GetGoAwayMinimumSize());
EXPECT_EQ(IsSpdy2() ? 14u : 12u, framer.GetHeadersMinimumSize());
EXPECT_EQ(16u, framer.GetWindowUpdateSize());
EXPECT_EQ(8u, framer.GetFrameMinimumSize());
EXPECT_EQ(16777223u, framer.GetFrameMaximumSize());
EXPECT_EQ(16777215u, framer.GetDataFrameMaximumPayload());
}
}
TEST_P(SpdyFramerTest, StateToStringTest) {
EXPECT_STREQ("ERROR",
SpdyFramer::StateToString(SpdyFramer::SPDY_ERROR));
EXPECT_STREQ("AUTO_RESET",
SpdyFramer::StateToString(SpdyFramer::SPDY_AUTO_RESET));
EXPECT_STREQ("RESET",
SpdyFramer::StateToString(SpdyFramer::SPDY_RESET));
EXPECT_STREQ("READING_COMMON_HEADER",
SpdyFramer::StateToString(
SpdyFramer::SPDY_READING_COMMON_HEADER));
EXPECT_STREQ("CONTROL_FRAME_PAYLOAD",
SpdyFramer::StateToString(
SpdyFramer::SPDY_CONTROL_FRAME_PAYLOAD));
EXPECT_STREQ("IGNORE_REMAINING_PAYLOAD",
SpdyFramer::StateToString(
SpdyFramer::SPDY_IGNORE_REMAINING_PAYLOAD));
EXPECT_STREQ("FORWARD_STREAM_FRAME",
SpdyFramer::StateToString(
SpdyFramer::SPDY_FORWARD_STREAM_FRAME));
EXPECT_STREQ("SPDY_CONTROL_FRAME_BEFORE_HEADER_BLOCK",
SpdyFramer::StateToString(
SpdyFramer::SPDY_CONTROL_FRAME_BEFORE_HEADER_BLOCK));
EXPECT_STREQ("SPDY_CONTROL_FRAME_HEADER_BLOCK",
SpdyFramer::StateToString(
SpdyFramer::SPDY_CONTROL_FRAME_HEADER_BLOCK));
EXPECT_STREQ("SPDY_SETTINGS_FRAME_PAYLOAD",
SpdyFramer::StateToString(
SpdyFramer::SPDY_SETTINGS_FRAME_PAYLOAD));
EXPECT_STREQ("SPDY_ALTSVC_FRAME_PAYLOAD",
SpdyFramer::StateToString(
SpdyFramer::SPDY_ALTSVC_FRAME_PAYLOAD));
EXPECT_STREQ("UNKNOWN_STATE",
SpdyFramer::StateToString(
SpdyFramer::SPDY_ALTSVC_FRAME_PAYLOAD + 1));
}
TEST_P(SpdyFramerTest, ErrorCodeToStringTest) {
EXPECT_STREQ("NO_ERROR",
SpdyFramer::ErrorCodeToString(SpdyFramer::SPDY_NO_ERROR));
EXPECT_STREQ("INVALID_CONTROL_FRAME",
SpdyFramer::ErrorCodeToString(
SpdyFramer::SPDY_INVALID_CONTROL_FRAME));
EXPECT_STREQ("CONTROL_PAYLOAD_TOO_LARGE",
SpdyFramer::ErrorCodeToString(
SpdyFramer::SPDY_CONTROL_PAYLOAD_TOO_LARGE));
EXPECT_STREQ("ZLIB_INIT_FAILURE",
SpdyFramer::ErrorCodeToString(
SpdyFramer::SPDY_ZLIB_INIT_FAILURE));
EXPECT_STREQ("UNSUPPORTED_VERSION",
SpdyFramer::ErrorCodeToString(
SpdyFramer::SPDY_UNSUPPORTED_VERSION));
EXPECT_STREQ("DECOMPRESS_FAILURE",
SpdyFramer::ErrorCodeToString(
SpdyFramer::SPDY_DECOMPRESS_FAILURE));
EXPECT_STREQ("COMPRESS_FAILURE",
SpdyFramer::ErrorCodeToString(
SpdyFramer::SPDY_COMPRESS_FAILURE));
EXPECT_STREQ("SPDY_INVALID_DATA_FRAME_FLAGS",
SpdyFramer::ErrorCodeToString(
SpdyFramer::SPDY_INVALID_DATA_FRAME_FLAGS));
EXPECT_STREQ("SPDY_INVALID_CONTROL_FRAME_FLAGS",
SpdyFramer::ErrorCodeToString(
SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS));
EXPECT_STREQ("UNKNOWN_ERROR",
SpdyFramer::ErrorCodeToString(SpdyFramer::LAST_ERROR));
}
TEST_P(SpdyFramerTest, StatusCodeToStringTest) {
EXPECT_STREQ("INVALID",
SpdyFramer::StatusCodeToString(RST_STREAM_INVALID));
EXPECT_STREQ("PROTOCOL_ERROR",
SpdyFramer::StatusCodeToString(RST_STREAM_PROTOCOL_ERROR));
EXPECT_STREQ("INVALID_STREAM",
SpdyFramer::StatusCodeToString(RST_STREAM_INVALID_STREAM));
EXPECT_STREQ("REFUSED_STREAM",
SpdyFramer::StatusCodeToString(RST_STREAM_REFUSED_STREAM));
EXPECT_STREQ("UNSUPPORTED_VERSION",
SpdyFramer::StatusCodeToString(RST_STREAM_UNSUPPORTED_VERSION));
EXPECT_STREQ("CANCEL",
SpdyFramer::StatusCodeToString(RST_STREAM_CANCEL));
EXPECT_STREQ("INTERNAL_ERROR",
SpdyFramer::StatusCodeToString(RST_STREAM_INTERNAL_ERROR));
EXPECT_STREQ("FLOW_CONTROL_ERROR",
SpdyFramer::StatusCodeToString(RST_STREAM_FLOW_CONTROL_ERROR));
EXPECT_STREQ("UNKNOWN_STATUS",
SpdyFramer::StatusCodeToString(-1));
}
TEST_P(SpdyFramerTest, FrameTypeToStringTest) {
EXPECT_STREQ("DATA",
SpdyFramer::FrameTypeToString(DATA));
EXPECT_STREQ("SYN_STREAM",
SpdyFramer::FrameTypeToString(SYN_STREAM));
EXPECT_STREQ("SYN_REPLY",
SpdyFramer::FrameTypeToString(SYN_REPLY));
EXPECT_STREQ("RST_STREAM",
SpdyFramer::FrameTypeToString(RST_STREAM));
EXPECT_STREQ("SETTINGS",
SpdyFramer::FrameTypeToString(SETTINGS));
EXPECT_STREQ("PING",
SpdyFramer::FrameTypeToString(PING));
EXPECT_STREQ("GOAWAY",
SpdyFramer::FrameTypeToString(GOAWAY));
EXPECT_STREQ("HEADERS",
SpdyFramer::FrameTypeToString(HEADERS));
EXPECT_STREQ("WINDOW_UPDATE",
SpdyFramer::FrameTypeToString(WINDOW_UPDATE));
EXPECT_STREQ("PUSH_PROMISE",
SpdyFramer::FrameTypeToString(PUSH_PROMISE));
EXPECT_STREQ("CREDENTIAL",
SpdyFramer::FrameTypeToString(CREDENTIAL));
EXPECT_STREQ("CONTINUATION",
SpdyFramer::FrameTypeToString(CONTINUATION));
}
TEST_P(SpdyFramerTest, CatchProbableHttpResponse) {
if (IsSpdy4()) {
// TODO(hkhalil): catch probable HTTP response in SPDY 4?
return;
}
{
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
EXPECT_CALL(visitor, OnError(_));
framer.ProcessInput("HTTP/1.1", 8);
EXPECT_TRUE(framer.probable_http_response());
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_DATA_FRAME_FLAGS, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
{
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
EXPECT_CALL(visitor, OnError(_));
framer.ProcessInput("HTTP/1.0", 8);
EXPECT_TRUE(framer.probable_http_response());
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_DATA_FRAME_FLAGS, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
TEST_P(SpdyFramerTest, DataFrameFlagsV2V3) {
if (spdy_version_ > SPDY3) {
return;
}
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdyDataIR data_ir(1, StringPiece("hello", 5));
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (flags & ~DATA_FLAG_FIN) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(visitor, OnDataFrameHeader(1, 5, flags & DATA_FLAG_FIN));
EXPECT_CALL(visitor, OnStreamFrameData(_, _, 5, false));
if (flags & DATA_FLAG_FIN) {
EXPECT_CALL(visitor, OnStreamFrameData(_, _, 0, true));
}
}
framer.ProcessInput(frame->data(), frame->size());
if (flags & ~DATA_FLAG_FIN) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_DATA_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, DataFrameFlagsV4) {
if (spdy_version_ <= SPDY3) {
return;
}
uint8 valid_data_flags = DATA_FLAG_FIN | DATA_FLAG_END_SEGMENT |
DATA_FLAG_PADDED;
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdyDataIR data_ir(1, StringPiece("hello", 5));
scoped_ptr<SpdyFrame> frame(framer.SerializeData(data_ir));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (flags & ~valid_data_flags) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(visitor, OnDataFrameHeader(1, 5, flags & DATA_FLAG_FIN));
if (flags & DATA_FLAG_PADDED) {
// Expect Error since we don't set padded in payload.
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(visitor, OnStreamFrameData(_, _, 5, false));
if (flags & DATA_FLAG_FIN) {
EXPECT_CALL(visitor, OnStreamFrameData(_, _, 0, true));
}
}
}
framer.ProcessInput(frame->data(), frame->size());
if ((flags & ~valid_data_flags) || (flags & DATA_FLAG_PADDED)) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_DATA_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, SynStreamFrameFlags) {
if (!IsSpdy2() && !IsSpdy3()) {
// SYN_STREAM not supported in SPDY>3
return;
}
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
testing::StrictMock<test::MockDebugVisitor> debug_visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
framer.set_debug_visitor(&debug_visitor);
EXPECT_CALL(debug_visitor, OnSendCompressedFrame(8, SYN_STREAM, _, _));
SpdySynStreamIR syn_stream(8);
syn_stream.set_associated_to_stream_id(3);
syn_stream.set_priority(1);
syn_stream.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynStream(syn_stream));
int set_flags = flags;
SetFrameFlags(frame.get(), set_flags, spdy_version_);
if (flags & ~(CONTROL_FLAG_FIN | CONTROL_FLAG_UNIDIRECTIONAL)) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(debug_visitor, OnReceiveCompressedFrame(8, SYN_STREAM, _));
EXPECT_CALL(visitor, OnSynStream(8, 3, 1, flags & CONTROL_FLAG_FIN,
flags & CONTROL_FLAG_UNIDIRECTIONAL));
EXPECT_CALL(visitor, OnControlFrameHeaderData(8, _, _))
.WillRepeatedly(testing::Return(true));
if (flags & DATA_FLAG_FIN) {
EXPECT_CALL(visitor, OnStreamFrameData(_, _, 0, true));
} else {
// Do not close the stream if we are expecting a CONTINUATION frame.
EXPECT_CALL(visitor, OnStreamFrameData(_, _, 0, true)).Times(0);
}
}
framer.ProcessInput(frame->data(), frame->size());
if (flags & ~(CONTROL_FLAG_FIN | CONTROL_FLAG_UNIDIRECTIONAL)) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, SynReplyFrameFlags) {
if (!IsSpdy2() && !IsSpdy3()) {
// SYN_REPLY not supported in SPDY>3
return;
}
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdySynReplyIR syn_reply(37);
syn_reply.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeSynReply(syn_reply));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (flags & ~CONTROL_FLAG_FIN) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(visitor, OnSynReply(37, flags & CONTROL_FLAG_FIN));
EXPECT_CALL(visitor, OnControlFrameHeaderData(37, _, _))
.WillRepeatedly(testing::Return(true));
if (flags & DATA_FLAG_FIN) {
EXPECT_CALL(visitor, OnStreamFrameData(_, _, 0, true));
}
}
framer.ProcessInput(frame->data(), frame->size());
if (flags & ~CONTROL_FLAG_FIN) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, RstStreamFrameFlags) {
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdyRstStreamIR rst_stream(13, RST_STREAM_CANCEL, "");
scoped_ptr<SpdyFrame> frame(framer.SerializeRstStream(rst_stream));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (flags != 0) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(visitor, OnRstStream(13, RST_STREAM_CANCEL));
}
framer.ProcessInput(frame->data(), frame->size());
if (flags != 0) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, SettingsFrameFlagsOldFormat) {
if (spdy_version_ > SPDY3) { return; }
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdySettingsIR settings_ir;
settings_ir.AddSetting(SETTINGS_UPLOAD_BANDWIDTH,
false,
false,
54321);
scoped_ptr<SpdyFrame> frame(framer.SerializeSettings(settings_ir));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (flags & ~SETTINGS_FLAG_CLEAR_PREVIOUSLY_PERSISTED_SETTINGS) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(visitor, OnSettings(
flags & SETTINGS_FLAG_CLEAR_PREVIOUSLY_PERSISTED_SETTINGS));
EXPECT_CALL(visitor, OnSetting(SETTINGS_UPLOAD_BANDWIDTH,
SETTINGS_FLAG_NONE, 54321));
EXPECT_CALL(visitor, OnSettingsEnd());
}
framer.ProcessInput(frame->data(), frame->size());
if (flags & ~SETTINGS_FLAG_CLEAR_PREVIOUSLY_PERSISTED_SETTINGS) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, SettingsFrameFlags) {
if (spdy_version_ <= SPDY3) { return; }
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdySettingsIR settings_ir;
settings_ir.AddSetting(SETTINGS_INITIAL_WINDOW_SIZE, 0, 0, 16);
scoped_ptr<SpdyFrame> frame(framer.SerializeSettings(settings_ir));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (flags != 0) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(visitor, OnSettings(flags & SETTINGS_FLAG_ACK));
EXPECT_CALL(visitor, OnSetting(SETTINGS_INITIAL_WINDOW_SIZE, 0, 16));
EXPECT_CALL(visitor, OnSettingsEnd());
}
framer.ProcessInput(frame->data(), frame->size());
if (flags & ~SETTINGS_FLAG_ACK) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else if (flags & SETTINGS_FLAG_ACK) {
// The frame is invalid because ACK frames should have no payload.
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, GoawayFrameFlags) {
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdyGoAwayIR goaway_ir(97, GOAWAY_OK, "test");
scoped_ptr<SpdyFrame> frame(framer.SerializeGoAway(goaway_ir));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (flags != 0) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(visitor, OnGoAway(97, GOAWAY_OK));
}
framer.ProcessInput(frame->data(), frame->size());
if (flags != 0) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, HeadersFrameFlags) {
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
SpdyHeadersIR headers_ir(57);
if (IsSpdy4() && (flags & HEADERS_FLAG_PRIORITY)) {
headers_ir.set_priority(3);
headers_ir.set_has_priority(true);
}
headers_ir.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame(framer.SerializeHeaders(headers_ir));
int set_flags = flags;
if (IsSpdy4()) {
// TODO(jgraettinger): Add padding to SpdyHeadersIR,
// and implement framing.
set_flags &= ~HEADERS_FLAG_PADDED;
}
SetFrameFlags(frame.get(), set_flags, spdy_version_);
if (!IsSpdy4() && flags & ~CONTROL_FLAG_FIN) {
EXPECT_CALL(visitor, OnError(_));
} else if (IsSpdy4() && flags & ~(CONTROL_FLAG_FIN |
HEADERS_FLAG_END_HEADERS |
HEADERS_FLAG_END_SEGMENT |
HEADERS_FLAG_PADDED |
HEADERS_FLAG_PRIORITY)) {
EXPECT_CALL(visitor, OnError(_));
} else {
if (spdy_version_ > SPDY3 && flags & HEADERS_FLAG_PRIORITY) {
EXPECT_CALL(visitor, OnSynStream(57, // stream id
0, // associated stream id
3, // priority
flags & CONTROL_FLAG_FIN,
false)); // unidirectional
} else {
EXPECT_CALL(visitor, OnHeaders(57,
flags & CONTROL_FLAG_FIN,
(flags & HEADERS_FLAG_END_HEADERS) ||
!IsSpdy4()));
}
EXPECT_CALL(visitor, OnControlFrameHeaderData(57, _, _))
.WillRepeatedly(testing::Return(true));
if (flags & DATA_FLAG_FIN && (!IsSpdy4() ||
flags & HEADERS_FLAG_END_HEADERS)) {
EXPECT_CALL(visitor, OnStreamFrameData(_, _, 0, true));
} else {
// Do not close the stream if we are expecting a CONTINUATION frame.
EXPECT_CALL(visitor, OnStreamFrameData(_, _, 0, true)).Times(0);
}
}
framer.ProcessInput(frame->data(), frame->size());
if (!IsSpdy4() && flags & ~CONTROL_FLAG_FIN) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else if (IsSpdy4() && flags & ~(CONTROL_FLAG_FIN |
HEADERS_FLAG_END_HEADERS |
HEADERS_FLAG_END_SEGMENT |
HEADERS_FLAG_PADDED |
HEADERS_FLAG_PRIORITY)) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else if (IsSpdy4() && ~(flags & HEADERS_FLAG_END_HEADERS)) {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, PingFrameFlags) {
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
scoped_ptr<SpdyFrame> frame(framer.SerializePing(SpdyPingIR(42)));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (spdy_version_ > SPDY3 &&
flags == PING_FLAG_ACK) {
EXPECT_CALL(visitor, OnPing(42, true));
} else if (flags == 0) {
EXPECT_CALL(visitor, OnPing(42, false));
} else {
EXPECT_CALL(visitor, OnError(_));
}
framer.ProcessInput(frame->data(), frame->size());
if ((spdy_version_ > SPDY3 && flags == PING_FLAG_ACK) ||
flags == 0) {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, WindowUpdateFrameFlags) {
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
scoped_ptr<SpdyFrame> frame(framer.SerializeWindowUpdate(
SpdyWindowUpdateIR(4, 1024)));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (flags != 0) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(visitor, OnWindowUpdate(4, 1024));
}
framer.ProcessInput(frame->data(), frame->size());
if (flags != 0) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, PushPromiseFrameFlags) {
if (spdy_version_ <= SPDY3) {
return;
}
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<net::test::MockSpdyFramerVisitor> visitor;
testing::StrictMock<net::test::MockDebugVisitor> debug_visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
framer.set_debug_visitor(&debug_visitor);
EXPECT_CALL(debug_visitor, OnSendCompressedFrame(42, PUSH_PROMISE, _, _));
SpdyPushPromiseIR push_promise(42, 57);
push_promise.SetHeader("foo", "bar");
scoped_ptr<SpdySerializedFrame> frame(
framer.SerializePushPromise(push_promise));
// TODO(jgraettinger): Add padding to SpdyPushPromiseIR,
// and implement framing.
int set_flags = flags & ~HEADERS_FLAG_PADDED;
SetFrameFlags(frame.get(), set_flags, spdy_version_);
if (flags & ~(PUSH_PROMISE_FLAG_END_PUSH_PROMISE | HEADERS_FLAG_PADDED)) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(debug_visitor, OnReceiveCompressedFrame(42, PUSH_PROMISE, _));
EXPECT_CALL(visitor, OnPushPromise(42, 57,
flags & PUSH_PROMISE_FLAG_END_PUSH_PROMISE));
EXPECT_CALL(visitor, OnControlFrameHeaderData(42, _, _))
.WillRepeatedly(testing::Return(true));
}
framer.ProcessInput(frame->data(), frame->size());
if (flags & ~(PUSH_PROMISE_FLAG_END_PUSH_PROMISE | HEADERS_FLAG_PADDED)) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
TEST_P(SpdyFramerTest, ContinuationFrameFlags) {
if (spdy_version_ <= SPDY3) {
return;
}
for (int flags = 0; flags < 256; ++flags) {
SCOPED_TRACE(testing::Message() << "Flags " << flags);
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
testing::StrictMock<net::test::MockDebugVisitor> debug_visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
framer.set_debug_visitor(&debug_visitor);
EXPECT_CALL(debug_visitor, OnSendCompressedFrame(42, HEADERS, _, _));
EXPECT_CALL(debug_visitor, OnReceiveCompressedFrame(42, HEADERS, _));
EXPECT_CALL(visitor, OnHeaders(42, 0, false));
EXPECT_CALL(visitor, OnControlFrameHeaderData(42, _, _))
.WillRepeatedly(testing::Return(true));
SpdyHeadersIR headers_ir(42);
headers_ir.SetHeader("foo", "bar");
scoped_ptr<SpdyFrame> frame0(framer.SerializeHeaders(headers_ir));
SetFrameFlags(frame0.get(), 0, spdy_version_);
SpdyContinuationIR continuation(42);
continuation.SetHeader("foo", "bar");
scoped_ptr<SpdySerializedFrame> frame(
framer.SerializeContinuation(continuation));
SetFrameFlags(frame.get(), flags, spdy_version_);
if (flags & ~(HEADERS_FLAG_END_HEADERS)) {
EXPECT_CALL(visitor, OnError(_));
} else {
EXPECT_CALL(debug_visitor, OnReceiveCompressedFrame(42, CONTINUATION, _));
EXPECT_CALL(visitor, OnContinuation(42,
flags & HEADERS_FLAG_END_HEADERS));
EXPECT_CALL(visitor, OnControlFrameHeaderData(42, _, _))
.WillRepeatedly(testing::Return(true));
}
framer.ProcessInput(frame0->data(), frame0->size());
framer.ProcessInput(frame->data(), frame->size());
if (flags & ~(HEADERS_FLAG_END_HEADERS)) {
EXPECT_EQ(SpdyFramer::SPDY_ERROR, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME_FLAGS,
framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
} else {
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
}
}
// TODO(mlavan): Add TEST_P(SpdyFramerTest, AltSvcFrameFlags)
// TODO(hkhalil): Add TEST_P(SpdyFramerTest, BlockedFrameFlags)
TEST_P(SpdyFramerTest, EmptySynStream) {
if (!IsSpdy2() && !IsSpdy3()) {
// SYN_STREAM not supported in SPDY>3.
return;
}
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
testing::StrictMock<test::MockDebugVisitor> debug_visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
framer.set_debug_visitor(&debug_visitor);
EXPECT_CALL(debug_visitor, OnSendCompressedFrame(1, SYN_STREAM, _, _));
SpdySynStreamIR syn_stream(1);
syn_stream.set_priority(1);
scoped_ptr<SpdyFrame> frame(framer.SerializeSynStream(syn_stream));
// Adjust size to remove the name/value block.
SetFrameLength(
frame.get(),
framer.GetSynStreamMinimumSize() - framer.GetControlFrameHeaderSize(),
spdy_version_);
EXPECT_CALL(debug_visitor, OnReceiveCompressedFrame(1, SYN_STREAM, _));
EXPECT_CALL(visitor, OnSynStream(1, 0, 1, false, false));
EXPECT_CALL(visitor, OnControlFrameHeaderData(1, NULL, 0));
framer.ProcessInput(frame->data(), framer.GetSynStreamMinimumSize());
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
TEST_P(SpdyFramerTest, SettingsFlagsAndId) {
const uint32 kId = 0x020304;
const uint32 kFlags = 0x01;
const uint32 kWireFormat = htonl(IsSpdy2() ? 0x04030201 : 0x01020304);
SettingsFlagsAndId id_and_flags =
SettingsFlagsAndId::FromWireFormat(spdy_version_, kWireFormat);
EXPECT_EQ(kId, id_and_flags.id());
EXPECT_EQ(kFlags, id_and_flags.flags());
EXPECT_EQ(kWireFormat, id_and_flags.GetWireFormat(spdy_version_));
}
// Test handling of a RST_STREAM with out-of-bounds status codes.
TEST_P(SpdyFramerTest, RstStreamStatusBounds) {
const unsigned char kRstStreamStatusTooLow = 0x00;
const unsigned char kRstStreamStatusTooHigh = 0xff;
const unsigned char kV3RstStreamInvalid[] = {
0x80, spdy_version_ch_, 0x00, 0x03,
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, kRstStreamStatusTooLow
};
const unsigned char kV4RstStreamInvalid[] = {
0x00, 0x00, 0x04, 0x03,
0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
kRstStreamStatusTooLow
};
const unsigned char kV3RstStreamNumStatusCodes[] = {
0x80, spdy_version_ch_, 0x00, 0x03,
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x01,
0x00, 0x00, 0x00, kRstStreamStatusTooHigh
};
const unsigned char kV4RstStreamNumStatusCodes[] = {
0x00, 0x00, 0x04, 0x03,
0x00, 0x00, 0x00, 0x00,
0x01, 0x00, 0x00, 0x00,
kRstStreamStatusTooHigh
};
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
if (IsSpdy4()) {
EXPECT_CALL(visitor, OnRstStream(1, RST_STREAM_INTERNAL_ERROR));
framer.ProcessInput(reinterpret_cast<const char*>(kV4RstStreamInvalid),
arraysize(kV4RstStreamInvalid));
} else {
EXPECT_CALL(visitor, OnRstStream(1, RST_STREAM_INVALID));
framer.ProcessInput(reinterpret_cast<const char*>(kV3RstStreamInvalid),
arraysize(kV3RstStreamInvalid));
}
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
framer.Reset();
if (IsSpdy4()) {
EXPECT_CALL(visitor, OnRstStream(1, RST_STREAM_INTERNAL_ERROR));
framer.ProcessInput(
reinterpret_cast<const char*>(kV4RstStreamNumStatusCodes),
arraysize(kV4RstStreamNumStatusCodes));
} else {
EXPECT_CALL(visitor, OnRstStream(1, RST_STREAM_INVALID));
framer.ProcessInput(
reinterpret_cast<const char*>(kV3RstStreamNumStatusCodes),
arraysize(kV3RstStreamNumStatusCodes));
}
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
// Test handling of GOAWAY frames with out-of-bounds status code.
TEST_P(SpdyFramerTest, GoAwayStatusBounds) {
if (spdy_version_ <= SPDY2) {
return;
}
SpdyFramer framer(spdy_version_);
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x07,
0x00, 0x00, 0x00, 0x08,
0x00, 0x00, 0x00, 0x01, // Stream Id
0xff, 0xff, 0xff, 0xff, // Status
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x0a, 0x07,
0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, // Stream id
0x01, 0xff, 0xff, 0xff, // Status
0xff, 0x47, 0x41, // Opaque Description
};
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
framer.set_visitor(&visitor);
if (IsSpdy3()) {
EXPECT_CALL(visitor, OnGoAway(1, GOAWAY_OK));
framer.ProcessInput(reinterpret_cast<const char*>(kV3FrameData),
arraysize(kV3FrameData));
} else {
EXPECT_CALL(visitor, OnGoAway(1, GOAWAY_INTERNAL_ERROR));
framer.ProcessInput(reinterpret_cast<const char*>(kV4FrameData),
arraysize(kV4FrameData));
}
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
// Tests handling of a GOAWAY frame with out-of-bounds stream ID.
TEST_P(SpdyFramerTest, GoAwayStreamIdBounds) {
const unsigned char kV2FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x07,
0x00, 0x00, 0x00, 0x04,
0xff, 0xff, 0xff, 0xff,
};
const unsigned char kV3FrameData[] = {
0x80, spdy_version_ch_, 0x00, 0x07,
0x00, 0x00, 0x00, 0x08,
0xff, 0xff, 0xff, 0xff,
0x00, 0x00, 0x00, 0x00,
};
const unsigned char kV4FrameData[] = {
0x00, 0x00, 0x08, 0x07,
0x00, 0x00, 0x00, 0x00,
0x00, 0xff, 0xff, 0xff,
0xff, 0x00, 0x00, 0x00,
0x00,
};
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
EXPECT_CALL(visitor, OnGoAway(0x7fffffff, GOAWAY_OK));
if (IsSpdy2()) {
framer.ProcessInput(reinterpret_cast<const char*>(kV2FrameData),
arraysize(kV2FrameData));
} else if (IsSpdy3()) {
framer.ProcessInput(reinterpret_cast<const char*>(kV3FrameData),
arraysize(kV3FrameData));
} else {
framer.ProcessInput(reinterpret_cast<const char*>(kV4FrameData),
arraysize(kV4FrameData));
}
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
TEST_P(SpdyFramerTest, OnBlocked) {
if (spdy_version_ <= SPDY3) {
return;
}
const SpdyStreamId kStreamId = 0;
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
EXPECT_CALL(visitor, OnBlocked(kStreamId));
SpdyBlockedIR blocked_ir(0);
scoped_ptr<SpdySerializedFrame> frame(framer.SerializeFrame(blocked_ir));
framer.ProcessInput(frame->data(), framer.GetBlockedSize());
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
TEST_P(SpdyFramerTest, OnAltSvc) {
if (spdy_version_ <= SPDY3) {
return;
}
const SpdyStreamId kStreamId = 1;
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
EXPECT_CALL(visitor, OnAltSvc(kStreamId,
10,
443,
StringPiece("pid"),
StringPiece("h1"),
StringPiece("o1")));
SpdyAltSvcIR altsvc_ir(1);
altsvc_ir.set_max_age(10);
altsvc_ir.set_port(443);
altsvc_ir.set_protocol_id("pid");
altsvc_ir.set_host("h1");
altsvc_ir.set_origin("o1");
scoped_ptr<SpdySerializedFrame> frame(framer.SerializeFrame(altsvc_ir));
framer.ProcessInput(frame->data(), framer.GetAltSvcMinimumSize() +
altsvc_ir.protocol_id().length() +
altsvc_ir.host().length() +
altsvc_ir.origin().length());
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
TEST_P(SpdyFramerTest, OnAltSvcNoOrigin) {
if (spdy_version_ <= SPDY3) {
return;
}
const SpdyStreamId kStreamId = 1;
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
EXPECT_CALL(visitor, OnAltSvc(kStreamId,
10,
443,
StringPiece("pid"),
StringPiece("h1"),
StringPiece("")));
SpdyAltSvcIR altsvc_ir(1);
altsvc_ir.set_max_age(10);
altsvc_ir.set_port(443);
altsvc_ir.set_protocol_id("pid");
altsvc_ir.set_host("h1");
scoped_ptr<SpdySerializedFrame> frame(framer.SerializeFrame(altsvc_ir));
framer.ProcessInput(frame->data(), framer.GetAltSvcMinimumSize() +
altsvc_ir.protocol_id().length() +
altsvc_ir.host().length());
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
}
TEST_P(SpdyFramerTest, OnAltSvcBadLengths) {
if (spdy_version_ <= SPDY3) {
return;
}
const unsigned char kType = static_cast<unsigned char>(
SpdyConstants::SerializeFrameType(spdy_version_, ALTSVC));
{
TestSpdyVisitor visitor(spdy_version_);
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
const unsigned char kFrameDataLargePIDLen[] = {
0x00, 0x00, 0x17, kType, 0x00,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x05,
0x01, 0xbb, 0x00, 0x05, // Port = 443
'p', 'i', 'd', '1', // Protocol-ID
0x04, 'h', 'o', 's',
't', 'o', 'r', 'i',
'g', 'i', 'n',
};
visitor.SimulateInFramer(kFrameDataLargePIDLen,
sizeof(kFrameDataLargePIDLen));
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME,
visitor.framer_.error_code());
}
{
TestSpdyVisitor visitor(spdy_version_);
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
const unsigned char kFrameDataPIDLenLargerThanFrame[] = {
0x00, 0x00, 0x17, kType, 0x00,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x05,
0x01, 0xbb, 0x00, 0x99, // Port = 443
'p', 'i', 'd', '1', // Protocol-ID
0x04, 'h', 'o', 's',
't', 'o', 'r', 'i',
'g', 'i', 'n',
};
visitor.SimulateInFramer(kFrameDataPIDLenLargerThanFrame,
sizeof(kFrameDataPIDLenLargerThanFrame));
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME,
visitor.framer_.error_code());
}
{
TestSpdyVisitor visitor(spdy_version_);
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
const unsigned char kFrameDataLargeHostLen[] = {
0x00, 0x00, 0x17, kType, 0x00,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x05,
0x01, 0xbb, 0x00, 0x04, // Port = 443
'p', 'i', 'd', '1', // Protocol-ID
0x0f, 'h', 'o', 's',
't', 'o', 'r', 'i',
'g', 'i', 'n',
};
visitor.SimulateInFramer(kFrameDataLargeHostLen,
sizeof(kFrameDataLargeHostLen));
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME,
visitor.framer_.error_code());
}
{
TestSpdyVisitor visitor(spdy_version_);
SpdyFramer framer(spdy_version_);
framer.set_visitor(&visitor);
const unsigned char kFrameDataSmallPIDLen[] = {
0x00, 0x00, 0x17, kType, 0x00,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x05,
0x01, 0xbb, 0x00, 0x01, // Port = 443
'p', 'i', 'd', '1', // Protocol-ID
0x04, 'h', 'o', 's',
't', 'o', 'r', 'i',
'g', 'i', 'n',
};
visitor.SimulateInFramer(kFrameDataSmallPIDLen,
sizeof(kFrameDataSmallPIDLen));
EXPECT_EQ(1, visitor.error_count_);
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME,
visitor.framer_.error_code());
}
}
// Tests handling of ALTSVC frames delivered in small chunks.
TEST_P(SpdyFramerTest, ReadChunkedAltSvcFrame) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
SpdyAltSvcIR altsvc_ir(1);
altsvc_ir.set_max_age(20);
altsvc_ir.set_port(443);
altsvc_ir.set_protocol_id("protocolid");
altsvc_ir.set_host("hostname");
scoped_ptr<SpdyFrame> control_frame(framer.SerializeAltSvc(altsvc_ir));
TestSpdyVisitor visitor(spdy_version_);
visitor.use_compression_ = false;
// Read data in small chunks.
size_t framed_data = 0;
size_t unframed_data = control_frame->size();
size_t kReadChunkSize = 5; // Read five bytes at a time.
while (unframed_data > 0) {
size_t to_read = min(kReadChunkSize, unframed_data);
visitor.SimulateInFramer(
reinterpret_cast<unsigned char*>(control_frame->data() + framed_data),
to_read);
unframed_data -= to_read;
framed_data += to_read;
}
EXPECT_EQ(0, visitor.error_count_);
EXPECT_EQ(1, visitor.altsvc_count_);
EXPECT_EQ(20u, visitor.test_altsvc_ir_.max_age());
EXPECT_EQ(443u, visitor.test_altsvc_ir_.port());
EXPECT_EQ("protocolid", visitor.test_altsvc_ir_.protocol_id());
EXPECT_EQ("hostname", visitor.test_altsvc_ir_.host());
}
// Tests handling of PRIORITY frames.
TEST_P(SpdyFramerTest, ReadPriority) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
SpdyPriorityIR priority(3, 1, 255, false);
scoped_ptr<SpdySerializedFrame> frame(framer.SerializePriority(priority));
testing::StrictMock<test::MockSpdyFramerVisitor> visitor;
framer.set_visitor(&visitor);
EXPECT_CALL(visitor, OnPriority(3, 1, 255, false));
framer.ProcessInput(frame->data(), frame->size());
EXPECT_EQ(SpdyFramer::SPDY_RESET, framer.state());
EXPECT_EQ(SpdyFramer::SPDY_NO_ERROR, framer.error_code())
<< SpdyFramer::ErrorCodeToString(framer.error_code());
// TODO(mlavan): once we actually maintain a priority tree,
// check that state is adjusted correctly.
}
TEST_P(SpdyFramerTest, PriorityWeightMapping) {
if (spdy_version_ <= SPDY3) {
return;
}
SpdyFramer framer(spdy_version_);
EXPECT_EQ(255u, framer.MapPriorityToWeight(0));
EXPECT_EQ(219u, framer.MapPriorityToWeight(1));
EXPECT_EQ(182u, framer.MapPriorityToWeight(2));
EXPECT_EQ(146u, framer.MapPriorityToWeight(3));
EXPECT_EQ(109u, framer.MapPriorityToWeight(4));
EXPECT_EQ(73u, framer.MapPriorityToWeight(5));
EXPECT_EQ(36u, framer.MapPriorityToWeight(6));
EXPECT_EQ(0u, framer.MapPriorityToWeight(7));
EXPECT_EQ(0u, framer.MapWeightToPriority(255));
EXPECT_EQ(0u, framer.MapWeightToPriority(220));
EXPECT_EQ(1u, framer.MapWeightToPriority(219));
EXPECT_EQ(1u, framer.MapWeightToPriority(183));
EXPECT_EQ(2u, framer.MapWeightToPriority(182));
EXPECT_EQ(2u, framer.MapWeightToPriority(147));
EXPECT_EQ(3u, framer.MapWeightToPriority(146));
EXPECT_EQ(3u, framer.MapWeightToPriority(110));
EXPECT_EQ(4u, framer.MapWeightToPriority(109));
EXPECT_EQ(4u, framer.MapWeightToPriority(74));
EXPECT_EQ(5u, framer.MapWeightToPriority(73));
EXPECT_EQ(5u, framer.MapWeightToPriority(37));
EXPECT_EQ(6u, framer.MapWeightToPriority(36));
EXPECT_EQ(6u, framer.MapWeightToPriority(1));
EXPECT_EQ(7u, framer.MapWeightToPriority(0));
}
// Tests handling of PRIORITY frame with incorrect size.
TEST_P(SpdyFramerTest, ReadIncorrectlySizedPriority) {
if (spdy_version_ <= SPDY3) {
return;
}
// PRIORITY frame of size 4, which isn't correct.
const unsigned char kFrameData[] = {
0x00, 0x00, 0x04, 0x02, 0x00,
0x00, 0x00, 0x00, 0x03,
0x00, 0x00, 0x00, 0x01,
};
TestSpdyVisitor visitor(spdy_version_);
visitor.SimulateInFramer(kFrameData, sizeof(kFrameData));
EXPECT_EQ(SpdyFramer::SPDY_ERROR, visitor.framer_.state());
EXPECT_EQ(SpdyFramer::SPDY_INVALID_CONTROL_FRAME,
visitor.framer_.error_code())
<< SpdyFramer::ErrorCodeToString(visitor.framer_.error_code());
}
} // namespace net
| laperry1/android_external_chromium_org | net/spdy/spdy_framer_test.cc | C++ | bsd-3-clause | 194,503 |
<?php
namespace Drupal\Tests\Core\EventSubscriber;
use Drupal\Component\Serialization\Json;
use Drupal\Core\EventSubscriber\ActiveLinkResponseFilter;
use Drupal\Core\Template\Attribute;
use Drupal\Tests\UnitTestCase;
/**
* @coversDefaultClass \Drupal\Core\EventSubscriber\ActiveLinkResponseFilter
* @group EventSubscriber
*/
class ActiveLinkResponseFilterTest extends UnitTestCase {
/**
* Provides test data for testSetLinkActiveClass().
*
* @see \Drupal\Core\EventSubscriber\ActiveLinkResponseFilter::setLinkActiveClass()
*/
public function providerTestSetLinkActiveClass() {
// Define all the variations that *don't* affect whether or not an
// "is-active" class is set, but that should remain unchanged:
// - surrounding HTML
// - tags for which to test the setting of the "is-active" class
// - content of said tags
$edge_case_html5 = '<audio src="foo.ogg">
<track kind="captions" src="foo.en.vtt" srclang="en" label="English">
<track kind="captions" src="foo.sv.vtt" srclang="sv" label="Svenska">
</audio>';
$html = array(
// Simple HTML.
0 => array('prefix' => '<div><p>', 'suffix' => '</p></div>'),
// Tricky HTML5 example that's unsupported by PHP <=5.4's DOMDocument:
// https://www.drupal.org/comment/7938201#comment-7938201.
1 => array('prefix' => '<div><p>', 'suffix' => '</p>' . $edge_case_html5 . '</div>'),
// Multi-byte content *before* the HTML that needs the "is-active" class.
2 => array('prefix' => '<div><p>αβγδεζηθικλμνξοσὠ</p><p>', 'suffix' => '</p></div>'),
);
$tags = array(
// Of course, it must work on anchors.
'a',
// Unfortunately, it must also work on list items.
'li',
// … and therefor, on *any* tag, really.
'foo',
);
$contents = array(
// Regular content.
'test',
// Mix of UTF-8 and HTML entities, both must be retained.
'☆ 3 × 4 = €12 and 4 × 3 = €12 ☆',
// Multi-byte content.
'ΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΣὨ',
// Text that closely approximates an important attribute, but should be
// ignored.
'data-drupal-link-system-path="<front>"',
);
// Define all variations that *do* affect whether or not an "is-active"
// class is set: all possible situations that can be encountered.
$situations = array();
// Situations with context: front page, English, no query.
$context = array(
'path' => 'myfrontpage',
'front' => TRUE,
'language' => 'en',
'query' => array(),
);
// Nothing to do.
$markup = '<foo>bar</foo>';
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => array());
// Matching path, plus all matching variations.
$attributes = array(
'data-drupal-link-system-path' => 'myfrontpage',
);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes + array('hreflang' => 'en'));
// Matching path, plus all non-matching variations.
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => '{"foo":"bar"}'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => TRUE));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => '{"foo":"bar"}'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => TRUE));
// Special matching path, plus all variations.
$attributes = array(
'data-drupal-link-system-path' => '<front>',
);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes + array('hreflang' => 'en'));
// Special matching path, plus all non-matching variations.
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => '{"foo":"bar"}'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => TRUE));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => '{"foo":"bar"}'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => TRUE));
// Situations with context: non-front page, Dutch, no query.
$context = array(
'path' => 'llama',
'front' => FALSE,
'language' => 'nl',
'query' => array(),
);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => array());
// Matching path, plus all matching variations.
$attributes = array(
'data-drupal-link-system-path' => 'llama',
);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes + array('hreflang' => 'nl'));
// Matching path, plus all non-matching variations.
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => '{"foo":"bar"}'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => TRUE));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => '{"foo":"bar"}'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => TRUE));
// Special non-matching path, plus all variations.
$attributes = array(
'data-drupal-link-system-path' => '<front>',
);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => '{"foo":"bar"}'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => TRUE));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => '{"foo":"bar"}'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => TRUE));
// Situations with context: non-front page, Dutch, with query.
$context = array(
'path' => 'llama',
'front' => FALSE,
'language' => 'nl',
'query' => array('foo' => 'bar'),
);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => array());
// Matching path, plus all matching variations.
$attributes = array(
'data-drupal-link-system-path' => 'llama',
'data-drupal-link-query' => Json::encode(array('foo' => 'bar')),
);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes + array('hreflang' => 'nl'));
// Matching path, plus all non-matching variations.
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en'));
unset($attributes['data-drupal-link-query']);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => TRUE));
// Special non-matching path, plus all variations.
$attributes = array(
'data-drupal-link-system-path' => '<front>',
);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en'));
unset($attributes['data-drupal-link-query']);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => TRUE));
// Situations with context: non-front page, Dutch, with query.
$context = array(
'path' => 'llama',
'front' => FALSE,
'language' => 'nl',
'query' => array('foo' => 'bar'),
);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => array());
// Matching path, plus all matching variations.
$attributes = array(
'data-drupal-link-system-path' => 'llama',
'data-drupal-link-query' => Json::encode(array('foo' => 'bar')),
);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes + array('hreflang' => 'nl'));
// Matching path, plus all non-matching variations.
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en'));
unset($attributes['data-drupal-link-query']);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => TRUE));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => TRUE));
// Special non-matching path, plus all variations.
$attributes = array(
'data-drupal-link-system-path' => '<front>',
);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl'));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en'));
unset($attributes['data-drupal-link-query']);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => TRUE));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl', 'data-drupal-link-query' => TRUE));
// Situations with context: front page, English, query.
$context = array(
'path' => 'myfrontpage',
'front' => TRUE,
'language' => 'en',
'query' => array('foo' => 'bar'),
);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => array());
// Matching path, plus all matching variations.
$attributes = array(
'data-drupal-link-system-path' => 'myfrontpage',
'data-drupal-link-query' => Json::encode(array('foo' => 'bar')),
);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes + array('hreflang' => 'en'));
// Matching path, plus all non-matching variations.
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl'));
unset($attributes['data-drupal-link-query']);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => TRUE));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => TRUE));
// Special matching path, plus all variations.
$attributes = array(
'data-drupal-link-system-path' => '<front>',
'data-drupal-link-query' => Json::encode(array('foo' => 'bar')),
);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes);
$situations[] = array('context' => $context, 'is active' => TRUE, 'attributes' => $attributes + array('hreflang' => 'en'));
// Special matching path, plus all non-matching variations.
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'nl'));
unset($attributes['data-drupal-link-query']);
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('data-drupal-link-query' => TRUE));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => ""));
$situations[] = array('context' => $context, 'is active' => FALSE, 'attributes' => $attributes + array('hreflang' => 'en', 'data-drupal-link-query' => TRUE));
// Loop over the surrounding HTML variations.
$data = array();
for ($h = 0; $h < count($html); $h++) {
$html_prefix = $html[$h]['prefix'];
$html_suffix = $html[$h]['suffix'];
// Loop over the tag variations.
for ($t = 0; $t < count($tags); $t++) {
$tag = $tags[$t];
// Loop over the tag contents variations.
for ($c = 0; $c < count($contents); $c++) {
$tag_content = $contents[$c];
$create_markup = function (Attribute $attributes) use ($html_prefix, $html_suffix, $tag, $tag_content) {
return $html_prefix . '<' . $tag . $attributes . '>' . $tag_content . '</' . $tag . '>' . $html_suffix;
};
// Loop over the situations.
for ($s = 0; $s < count($situations); $s++) {
$situation = $situations[$s];
// Build the source markup.
$source_markup = $create_markup(new Attribute($situation['attributes']));
// Build the target markup. If no "is-active" class should be set,
// the resulting HTML should be identical. Otherwise, it should get
// an "is-active" class, either by extending an existing "class"
// attribute or by adding a "class" attribute.
$target_markup = NULL;
if (!$situation['is active']) {
$target_markup = $source_markup;
}
else {
$active_attributes = $situation['attributes'];
if (!isset($active_attributes['class'])) {
$active_attributes['class'] = array();
}
$active_attributes['class'][] = 'is-active';
$target_markup = $create_markup(new Attribute($active_attributes));
}
$data[] = array($source_markup, $situation['context']['path'], $situation['context']['front'], $situation['context']['language'], $situation['context']['query'], $target_markup);
}
}
}
}
// Test case to verify that the 'is-active' class is not added multiple
// times.
$data[] = [
0 => '<a data-drupal-link-system-path="<front>">Once</a> <a data-drupal-link-system-path="<front>">Twice</a>',
1 => '',
2 => TRUE,
3 => 'en',
4 => [],
5 => '<a data-drupal-link-system-path="<front>" class="is-active">Once</a> <a data-drupal-link-system-path="<front>" class="is-active">Twice</a>',
];
// Test cases to verify that the 'is-active' class is added when on the
// front page, and there are two different kinds of matching links on the
// page:
// - the matching path (the resolved front page path)
// - the special matching path ('<front>')
$front_special_link = '<a data-drupal-link-system-path="<front>">Front</a>';
$front_special_link_active = '<a data-drupal-link-system-path="<front>" class="is-active">Front</a>';
$front_path_link = '<a data-drupal-link-system-path="myfrontpage">Front Path</a>';
$front_path_link_active = '<a data-drupal-link-system-path="myfrontpage" class="is-active">Front Path</a>';
$data[] = [
0 => $front_path_link . ' ' . $front_special_link,
1 => 'myfrontpage',
2 => TRUE,
3 => 'en',
4 => [],
5 => $front_path_link_active . ' ' . $front_special_link_active,
];
$data[] = [
0 => $front_special_link . ' ' . $front_path_link,
1 => 'myfrontpage',
2 => TRUE,
3 => 'en',
4 => [],
5 => $front_special_link_active . ' ' . $front_path_link_active,
];
// Test cases to verify that links to the front page do not get the
// 'is-active' class when not on the front page.
$other_link = '<a data-drupal-link-system-path="otherpage">Other page</a>';
$other_link_active = '<a data-drupal-link-system-path="otherpage" class="is-active">Other page</a>';
$data['<front>-and-other-link-on-other-path'] = [
0 => $front_special_link . ' ' . $other_link,
1 => 'otherpage',
2 => FALSE,
3 => 'en',
4 => [],
5 => $front_special_link . ' ' . $other_link_active,
];
$data['front-and-other-link-on-other-path'] = [
0 => $front_path_link . ' ' . $other_link,
1 => 'otherpage',
2 => FALSE,
3 => 'en',
4 => [],
5 => $front_path_link . ' ' . $other_link_active,
];
$data['other-and-<front>-link-on-other-path'] = [
0 => $other_link . ' ' . $front_special_link,
1 => 'otherpage',
2 => FALSE,
3 => 'en',
4 => [],
5 => $other_link_active . ' ' . $front_special_link,
];
$data['other-and-front-link-on-other-path'] = [
0 => $other_link . ' ' . $front_path_link,
1 => 'otherpage',
2 => FALSE,
3 => 'en',
4 => [],
5 => $other_link_active . ' ' . $front_path_link,
];
return $data;
}
/**
* Tests setLinkActiveClass().
*
* @param string $html_markup
* The original HTML markup.
* @param string $current_path
* The system path of the currently active page.
* @param bool $is_front
* Whether the current page is the front page (which implies the current
* path might also be <front>).
* @param string $url_language
* The language code of the current URL.
* @param array $query
* The query string for the current URL.
* @param string $expected_html_markup
* The expected updated HTML markup.
*
* @dataProvider providerTestSetLinkActiveClass
* @covers ::setLinkActiveClass
*/
public function testSetLinkActiveClass($html_markup, $current_path, $is_front, $url_language, array $query, $expected_html_markup) {
$this->assertSame($expected_html_markup, ActiveLinkResponseFilter::setLinkActiveClass($html_markup, $current_path, $is_front, $url_language, $query));
}
}
| windtrader/drupalvm-d8 | web/core/tests/Drupal/Tests/Core/EventSubscriber/ActiveLinkResponseFilterTest.php | PHP | gpl-2.0 | 22,455 |
/*! UIkit 3.0.0-beta.6 | http://www.getuikit.com | (c) 2014 - 2016 YOOtheme | MIT License */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('jquery')) :
typeof define === 'function' && define.amd ? define(['jquery'], factory) :
(global.UIkit = factory(global.jQuery));
}(this, (function ($) { 'use strict';
var $__default = 'default' in $ ? $['default'] : $;
var win = $__default(window);
var doc = $__default(document);
var doc$1 = $__default(document.documentElement);
var langDirection = $__default('html').attr('dir') == 'rtl' ? 'right' : 'left';
function isReady() {
return document.readyState === 'complete' || document.readyState !== 'loading' && !document.documentElement.doScroll;
}
function ready(fn) {
var handle = function () {
off(document, 'DOMContentLoaded', handle);
off(window, 'load', handle);
fn();
};
if (isReady()) {
fn();
} else {
on(document, 'DOMContentLoaded', handle);
on(window, 'load', handle);
}
}
function on(el, type, listener, useCapture) {
$__default(el)[0].addEventListener(type, listener, useCapture)
}
function off(el, type, listener, useCapture) {
$__default(el)[0].removeEventListener(type, listener, useCapture)
}
function transition(element, props, duration, transition) {
if ( duration === void 0 ) duration = 400;
if ( transition === void 0 ) transition = 'linear';
var d = $__default.Deferred();
element = $__default(element);
for (var name in props) {
element.css(name, element.css(name));
}
var timer = setTimeout(function () { return element.trigger(transitionend || 'transitionend'); }, duration);
element
.one(transitionend || 'transitionend', function (e, cancel) {
clearTimeout(timer);
element.removeClass('uk-transition').css('transition', '');
if (!cancel) {
d.resolve();
} else {
d.reject();
}
})
.addClass('uk-transition')
.css('transition', ("all " + duration + "ms " + transition))
.css(props);
return d.promise();
}
var Transition = {
start: transition,
stop: function stop(element) {
$__default(element).trigger(transitionend || 'transitionend');
return this;
},
cancel: function cancel(element) {
$__default(element).trigger(transitionend || 'transitionend', [true]);
return this;
},
inProgress: function inProgress(element) {
return $__default(element).hasClass('uk-transition');
}
};
function animate(element, animation, duration, origin, out) {
if ( duration === void 0 ) duration = 200;
var d = $__default.Deferred(), cls = out ? 'uk-animation-leave' : 'uk-animation-enter';
element = $__default(element);
if (animation.lastIndexOf('uk-animation-', 0) === 0) {
if (origin) {
animation += " uk-animation-" + origin;
}
if (out) {
animation += ' uk-animation-reverse';
}
}
reset();
element
.one(animationend || 'animationend', function () { return d.resolve().then(reset); })
.css('animation-duration', (duration + "ms"))
.addClass(animation)
.addClass(cls);
if (!animationend) {
requestAnimationFrame(function () { return Animation.cancel(element); });
}
return d.promise();
function reset() {
element.css('animation-duration', '').removeClass((cls + " " + animation));
}
}
var Animation = {
in: function in$1(element, animation, duration, origin) {
return animate(element, animation, duration, origin, false);
},
out: function out(element, animation, duration, origin) {
return animate(element, animation, duration, origin, true);
},
inProgress: function inProgress(element) {
return $__default(element).hasClass('uk-animation-enter') || $__default(element).hasClass('uk-animation-leave');
},
cancel: function cancel(element) {
$__default(element).trigger(animationend || 'animationend');
return $__default.Deferred().resolve();
}
};
function isWithin(element, selector) {
element = $__default(element);
return element.is(selector) || !!(isString(selector) ? element.parents(selector).length : $__default.contains(selector instanceof $__default ? selector[0] : selector, element[0]));
}
function attrFilter(element, attr, pattern, replacement) {
element = $__default(element);
return element.attr(attr, function (i, value) { return value ? value.replace(pattern, replacement) : value; });
}
function removeClass(element, cls) {
return attrFilter(element, 'class', new RegExp(("(^|\\s)" + cls + "(?!\\S)"), 'g'), '');
}
function createEvent(e, bubbles, cancelable, data) {
if ( bubbles === void 0 ) bubbles = true;
if ( cancelable === void 0 ) cancelable = false;
if ( data === void 0 ) data = false;
if (isString(e)) {
var event = document.createEvent('Event');
event.initEvent(e, bubbles, cancelable);
e = event;
}
if (data) {
$__default.extend(e, data);
}
return e;
}
function isInView(element, offsetTop, offsetLeft) {
if ( offsetTop === void 0 ) offsetTop = 0;
if ( offsetLeft === void 0 ) offsetLeft = 0;
element = $__default(element);
if (!element.is(':visible')) {
return false;
}
var scrollLeft = win.scrollLeft(), scrollTop = win.scrollTop();
var ref = element.offset();
var top = ref.top;
var left = ref.left;
return top + element.height() >= scrollTop
&& top - offsetTop <= scrollTop + win.height()
&& left + element.width() >= scrollLeft
&& left - offsetLeft <= scrollLeft + win.width();
}
function getIndex(index, elements, current) {
if ( current === void 0 ) current = 0;
elements = $__default(elements);
var length = $__default(elements).length;
index = (isNumber(index)
? index
: index === 'next'
? current + 1
: index === 'previous'
? current - 1
: isString(index)
? parseInt(index, 10)
: elements.index(index)
) % length;
return index < 0 ? index + length : index;
}
var voidElements = {
area: true,
base: true,
br: true,
col: true,
embed: true,
hr: true,
img: true,
input: true,
keygen: true,
link: true,
menuitem: true,
meta: true,
param: true,
source: true,
track: true,
wbr: true
};
function isVoidElement(element) {
element = $__default(element);
return voidElements[element[0].tagName.toLowerCase()];
}
var Dimensions = {
ratio: function ratio(dimensions, prop, value) {
var aProp = prop === 'width' ? 'height' : 'width';
return ( obj = {}, obj[aProp] = Math.round(value * dimensions[aProp] / dimensions[prop]), obj[prop] = value, obj );
var obj;
},
fit: function fit(dimensions, maxDimensions) {
var this$1 = this;
dimensions = $.extend({}, dimensions);
$.each(dimensions, function (prop) { return dimensions = dimensions[prop] > maxDimensions[prop] ? this$1.ratio(dimensions, prop, maxDimensions[prop]) : dimensions; });
return dimensions;
},
cover: function cover(dimensions, maxDimensions) {
var this$1 = this;
dimensions = this.fit(dimensions, maxDimensions);
$.each(dimensions, function (prop) { return dimensions = dimensions[prop] < maxDimensions[prop] ? this$1.ratio(dimensions, prop, maxDimensions[prop]) : dimensions; });
return dimensions;
}
};
function query(selector, context) {
var selectors = getContextSelectors(selector);
return selectors ? selectors.reduce(function (context, selector) { return toJQuery(selector, context); }, context) : toJQuery(selector);
}
var Observer = window.MutationObserver || window.WebKitMutationObserver;
var requestAnimationFrame = window.requestAnimationFrame || function (fn) { return setTimeout(fn, 1000 / 60); };
var cancelAnimationFrame = window.cancelAnimationFrame || window.clearTimeout;
var hasTouch = 'ontouchstart' in window
|| window.DocumentTouch && document instanceof DocumentTouch
|| navigator.msPointerEnabled && navigator.msMaxTouchPoints > 0 // IE 10
|| navigator.pointerEnabled && navigator.maxTouchPoints > 0; // IE >=11
var pointerDown = !hasTouch ? 'mousedown' : window.PointerEvent ? 'pointerdown' : 'touchstart';
var pointerMove = !hasTouch ? 'mousemove' : window.PointerEvent ? 'pointermove' : 'touchmove';
var pointerUp = !hasTouch ? 'mouseup' : window.PointerEvent ? 'pointerup' : 'touchend';
var transitionend = (function () {
var element = document.body || document.documentElement,
names = {
WebkitTransition: 'webkitTransitionEnd',
MozTransition: 'transitionend',
OTransition: 'oTransitionEnd otransitionend',
transition: 'transitionend'
}, name;
for (name in names) {
if (element.style[name] !== undefined) {
return names[name];
}
}
})();
var animationend = (function () {
var element = document.body || document.documentElement,
names = {
WebkitAnimation: 'webkitAnimationEnd',
MozAnimation: 'animationend',
OAnimation: 'oAnimationEnd oanimationend',
animation: 'animationend'
}, name;
for (name in names) {
if (element.style[name] !== undefined) {
return names[name];
}
}
})();
function getStyle(element, property, pseudoElt) {
return (window.getComputedStyle(element, pseudoElt) || {})[property];
}
function getCssVar(name) {
/* usage in css: .var-name:before { content:"xyz" } */
var val, doc = document.documentElement,
element = doc.appendChild(document.createElement('div'));
element.classList.add(("var-" + name));
try {
val = getStyle(element, 'content', ':before').replace(/^["'](.*)["']$/, '$1');
val = JSON.parse(val);
} catch (e) {}
doc.removeChild(element);
return val || undefined;
}
// Copyright (c) 2016 Wilson Page wilsonpage@me.com
// https://github.com/wilsonpage/fastdom
/**
* Initialize a `FastDom`.
*
* @constructor
*/
function FastDom() {
var self = this;
self.reads = [];
self.writes = [];
self.raf = requestAnimationFrame.bind(window); // test hook
}
FastDom.prototype = {
constructor: FastDom,
/**
* Adds a job to the read batch and
* schedules a new frame if need be.
*
* @param {Function} fn
* @public
*/
measure: function(fn, ctx) {
var task = !ctx ? fn : fn.bind(ctx);
this.reads.push(task);
scheduleFlush(this);
return task;
},
/**
* Adds a job to the
* write batch and schedules
* a new frame if need be.
*
* @param {Function} fn
* @public
*/
mutate: function(fn, ctx) {
var task = !ctx ? fn : fn.bind(ctx);
this.writes.push(task);
scheduleFlush(this);
return task;
},
/**
* Clears a scheduled 'read' or 'write' task.
*
* @param {Object} task
* @return {Boolean} success
* @public
*/
clear: function(task) {
return remove(this.reads, task) || remove(this.writes, task);
},
/**
* Extend this FastDom with some
* custom functionality.
*
* Because fastdom must *always* be a
* singleton, we're actually extending
* the fastdom instance. This means tasks
* scheduled by an extension still enter
* fastdom's global task queue.
*
* The 'super' instance can be accessed
* from `this.fastdom`.
*
* @example
*
* var myFastdom = fastdom.extend({
* initialize: function() {
* // runs on creation
* },
*
* // override a method
* measure: function(fn) {
* // do extra stuff ...
*
* // then call the original
* return this.fastdom.measure(fn);
* },
*
* ...
* });
*
* @param {Object} props properties to mixin
* @return {FastDom}
*/
extend: function(props) {
if (typeof props != 'object') { throw new Error('expected object'); }
var child = Object.create(this);
mixin(child, props);
child.fastdom = this;
// run optional creation hook
if (child.initialize) { child.initialize(); }
return child;
},
// override this with a function
// to prevent Errors in console
// when tasks throw
catch: null
};
/**
* Schedules a new read/write
* batch if one isn't pending.
*
* @private
*/
function scheduleFlush(fastdom) {
if (!fastdom.scheduled) {
fastdom.scheduled = true;
fastdom.raf(flush.bind(null, fastdom));
}
}
/**
* Runs queued `read` and `write` tasks.
*
* Errors are caught and thrown by default.
* If a `.catch` function has been defined
* it is called instead.
*
* @private
*/
function flush(fastdom) {
var reads = fastdom.reads.splice(0, fastdom.reads.length),
writes = fastdom.writes.splice(0, fastdom.writes.length),
error;
try {
runTasks(reads);
runTasks(writes);
} catch (e) { error = e; }
fastdom.scheduled = false;
// If the batch errored we may still have tasks queued
if (fastdom.reads.length || fastdom.writes.length) { scheduleFlush(fastdom); }
if (error) {
if (fastdom.catch) { fastdom.catch(error); }
else { throw error; }
}
}
/**
* We run this inside a try catch
* so that if any jobs error, we
* are able to recover and continue
* to flush the batch until it's empty.
*
* @private
*/
function runTasks(tasks) {
var task; while (task = tasks.shift()) { task(); }
}
/**
* Remove an item from an Array.
*
* @param {Array} array
* @param {*} item
* @return {Boolean}
*/
function remove(array, item) {
var index = array.indexOf(item);
return !!~index && !!array.splice(index, 1);
}
/**
* Mixin own properties of source
* object into the target.
*
* @param {Object} target
* @param {Object} source
*/
function mixin(target, source) {
for (var key in source) {
if (source.hasOwnProperty(key)) { target[key] = source[key]; }
}
}
var fastdom = new FastDom();
function bind(fn, context) {
return function (a) {
var l = arguments.length;
return l ? l > 1 ? fn.apply(context, arguments) : fn.call(context, a) : fn.call(context);
};
}
var hasOwnProperty = Object.prototype.hasOwnProperty;
function hasOwn(obj, key) {
return hasOwnProperty.call(obj, key);
}
function classify(str) {
return str.replace(/(?:^|[-_\/])(\w)/g, function (_, c) { return c ? c.toUpperCase() : ''; });
}
function hyphenate(str) {
return str
.replace(/([a-z\d])([A-Z])/g, '$1-$2')
.toLowerCase()
}
var camelizeRE = /-(\w)/g;
function camelize(str) {
return str.replace(camelizeRE, toUpper)
}
function toUpper(_, c) {
return c ? c.toUpperCase() : ''
}
function isString(value) {
return typeof value === 'string';
}
function isNumber(value) {
return typeof value === 'number';
}
function isUndefined(value) {
return value === undefined;
}
function isContextSelector(selector) {
return isString(selector) && selector.match(/^(!|>|\+|-)/);
}
function getContextSelectors(selector) {
return isContextSelector(selector) && selector.split(/(?=\s(?:!|>|\+|-))/g).map(function (value) { return value.trim(); });
}
var contextSelectors = {'!': 'closest', '+': 'nextAll', '-': 'prevAll'};
function toJQuery(element, context) {
if (element === true) {
return null;
}
try {
if (context && isContextSelector(element) && element[0] !== '>') {
element = $__default(context)[contextSelectors[element[0]]](element.substr(1));
} else {
element = $__default(element, context);
}
} catch (e) {
return null;
}
return element.length ? element : null;
}
function toBoolean(value) {
return typeof value === 'boolean'
? value
: value === 'true' || value == '1' || value === ''
? true
: value === 'false' || value == '0'
? false
: value;
}
function toNumber(value) {
var number = Number(value);
return !isNaN(number) ? number : false;
}
var vars = {};
function toMedia(value) {
if (isString(value) && value[0] == '@') {
var name = "media-" + (value.substr(1));
value = vars[name] || (vars[name] = parseFloat(getCssVar(name)));
}
return value && !isNaN(value) ? ("(min-width: " + value + "px)") : false;
}
function coerce(type, value, context) {
if (type === Boolean) {
return toBoolean(value);
} else if (type === Number) {
return toNumber(value);
} else if (type === 'jQuery') {
return query(value, context);
} else if (type === 'media') {
return toMedia(value);
}
return type ? type(value) : value;
}
var strats = {};
// concat strategy
strats.args =
strats.created =
strats.init =
strats.ready =
strats.connected =
strats.disconnected =
strats.destroy = function (parentVal, childVal) {
return childVal
? parentVal
? parentVal.concat(childVal)
: $.isArray(childVal)
? childVal
: [childVal]
: parentVal;
};
strats.update = function (parentVal, childVal) {
return strats.args(parentVal, $.isFunction(childVal) ? {write: childVal} : childVal);
};
// events strategy
strats.events = function (parentVal, childVal) {
if (!childVal) {
return parentVal;
}
if (!parentVal) {
return childVal;
}
var ret = $.extend({}, parentVal);
for (var key in childVal) {
var parent = ret[key], child = childVal[key];
if (parent && !$.isArray(parent)) {
parent = [parent]
}
ret[key] = parent
? parent.concat(child)
: [child]
}
return ret;
};
// property strategy
strats.props = function (parentVal, childVal) {
if ($.isArray(childVal)) {
var ret = {};
childVal.forEach(function (val) {
ret[val] = String;
});
childVal = ret;
}
return strats.methods(parentVal, childVal);
};
// extend strategy
strats.defaults =
strats.methods = function (parentVal, childVal) {
return childVal
? parentVal
? $.extend(true, {}, parentVal, childVal)
: childVal
: parentVal;
};
// default strategy
var defaultStrat = function (parentVal, childVal) {
return isUndefined(childVal) ? parentVal : childVal;
};
function mergeOptions (parent, child, thisArg) {
var options = {}, key;
if (child.mixins) {
for (var i = 0, l = child.mixins.length; i < l; i++) {
parent = mergeOptions(parent, child.mixins[i], thisArg);
}
}
for (key in parent) {
mergeKey(key);
}
for (key in child) {
if (!hasOwn(parent, key)) {
mergeKey(key);
}
}
function mergeKey (key) {
options[key] = (strats[key] || defaultStrat)(parent[key], child[key], thisArg, key);
}
return options;
}
var dirs = {
x: ['width', 'left', 'right'],
y: ['height', 'top', 'bottom']
};
function position(element, target, attach, targetAttach, offset, targetOffset, flip, boundary) {
element = $__default(element);
target = $__default(target);
boundary = boundary && $__default(boundary);
attach = getPos(attach);
targetAttach = getPos(targetAttach);
var dim = getDimensions(element),
targetDim = getDimensions(target),
position = targetDim;
moveTo(position, attach, dim, -1);
moveTo(position, targetAttach, targetDim, 1);
offset = getOffsets(offset, dim.width, dim.height);
targetOffset = getOffsets(targetOffset, targetDim.width, targetDim.height);
offset['x'] += targetOffset['x'];
offset['y'] += targetOffset['y'];
position.left += offset['x'];
position.top += offset['y'];
boundary = getDimensions(boundary || window);
var flipped = {element: attach, target: targetAttach};
if (flip) {
$__default.each(dirs, function (dir, ref) {
var prop = ref[0];
var align = ref[1];
var alignFlip = ref[2];
if (!(flip === true || ~flip.indexOf(dir))) {
return;
}
var elemOffset = attach[dir] === align ? -dim[prop] : attach[dir] === alignFlip ? dim[prop] : 0,
targetOffset = targetAttach[dir] === align ? targetDim[prop] : targetAttach[dir] === alignFlip ? -targetDim[prop] : 0;
if (position[align] < boundary[align] || position[align] + dim[prop] > boundary[alignFlip]) {
var newVal = position[align] + elemOffset + targetOffset - offset[dir] * 2;
if (newVal >= boundary[align] && newVal + dim[prop] <= boundary[alignFlip]) {
position[align] = newVal;
['element', 'target'].forEach(function (el) {
flipped[el][dir] = !elemOffset
? flipped[el][dir]
: flipped[el][dir] === dirs[dir][1]
? dirs[dir][2]
: dirs[dir][1];
});
}
}
});
}
element.offset({left: position.left, top: position.top});
return flipped;
}
function getDimensions(elem) {
elem = $__default(elem);
var width = Math.round(elem.outerWidth()),
height = Math.round(elem.outerHeight()),
offset = elem[0].getClientRects ? elem.offset() : null,
left = offset ? Math.round(offset.left) : elem.scrollLeft(),
top = offset ? Math.round(offset.top) : elem.scrollTop();
return {width: width, height: height, left: left, top: top, right: left + width, bottom: top + height};
}
function moveTo(position, attach, dim, factor) {
$__default.each(dirs, function (dir, ref) {
var prop = ref[0];
var align = ref[1];
var alignFlip = ref[2];
if (attach[dir] === alignFlip) {
position[align] += dim[prop] * factor;
} else if (attach[dir] === 'center') {
position[align] += dim[prop] * factor / 2;
}
});
}
function getPos(pos) {
var x = /left|center|right/, y = /top|center|bottom/;
pos = (pos || '').split(' ');
if (pos.length === 1) {
pos = x.test(pos[0])
? pos.concat(['center'])
: y.test(pos[0])
? ['center'].concat(pos)
: ['center', 'center'];
}
return {
x: x.test(pos[0]) ? pos[0] : 'center',
y: y.test(pos[1]) ? pos[1] : 'center'
};
}
function getOffsets(offsets, width, height) {
offsets = (offsets || '').split(' ');
return {
x: offsets[0] ? parseFloat(offsets[0]) * (offsets[0][offsets[0].length - 1] === '%' ? width / 100 : 1) : 0,
y: offsets[1] ? parseFloat(offsets[1]) * (offsets[1][offsets[1].length - 1] === '%' ? height / 100 : 1) : 0
};
}
function flipPosition(pos) {
switch (pos) {
case 'left':
return 'right';
case 'right':
return 'left';
case 'top':
return 'bottom';
case 'bottom':
return 'top';
default:
return pos;
}
}
// Copyright (c) 2010-2016 Thomas Fuchs
// http://zeptojs.com/
var touch = {};
var touchTimeout;
var tapTimeout;
var swipeTimeout;
var longTapTimeout;
var longTapDelay = 750;
var gesture;
var clicked;
function swipeDirection(x1, x2, y1, y2) {
return Math.abs(x1 - x2) >= Math.abs(y1 - y2) ? (x1 - x2 > 0 ? 'Left' : 'Right') : (y1 - y2 > 0 ? 'Up' : 'Down');
}
function longTap() {
longTapTimeout = null;
if (touch.last) {
if (touch.el !== undefined) { touch.el.trigger('longTap'); }
touch = {};
}
}
function cancelLongTap() {
if (longTapTimeout) { clearTimeout(longTapTimeout); }
longTapTimeout = null;
}
function cancelAll() {
if (touchTimeout) { clearTimeout(touchTimeout); }
if (tapTimeout) { clearTimeout(tapTimeout); }
if (swipeTimeout) { clearTimeout(swipeTimeout); }
if (longTapTimeout) { clearTimeout(longTapTimeout); }
touchTimeout = tapTimeout = swipeTimeout = longTapTimeout = null;
touch = {};
}
ready(function () {
var now, delta, deltaX = 0, deltaY = 0, firstTouch;
if ('MSGesture' in window) {
gesture = new MSGesture();
gesture.target = document.body;
}
document.addEventListener('click', function () { return clicked = true; }, true);
doc
.on('MSGestureEnd gestureend', function (e) {
var swipeDirectionFromVelocity = e.originalEvent.velocityX > 1 ? 'Right' : e.originalEvent.velocityX < -1 ? 'Left' : e.originalEvent.velocityY > 1 ? 'Down' : e.originalEvent.velocityY < -1 ? 'Up' : null;
if (swipeDirectionFromVelocity && touch.el !== undefined) {
touch.el.trigger('swipe');
touch.el.trigger('swipe' + swipeDirectionFromVelocity);
}
})
.on(pointerDown, function (e) {
firstTouch = e.originalEvent.touches ? e.originalEvent.touches[0] : e;
now = Date.now();
delta = now - (touch.last || now);
touch.el = $__default('tagName' in firstTouch.target ? firstTouch.target : firstTouch.target.parentNode);
if (touchTimeout) { clearTimeout(touchTimeout); }
touch.x1 = firstTouch.pageX;
touch.y1 = firstTouch.pageY;
if (delta > 0 && delta <= 250) { touch.isDoubleTap = true; }
touch.last = now;
longTapTimeout = setTimeout(longTap, longTapDelay);
// adds the current touch contact for IE gesture recognition
if (gesture && ( e.type == 'pointerdown' || e.type == 'touchstart' )) {
gesture.addPointer(e.originalEvent.pointerId);
}
clicked = false;
})
.on(pointerMove, function (e) {
firstTouch = e.originalEvent.touches ? e.originalEvent.touches[0] : e;
cancelLongTap();
touch.x2 = firstTouch.pageX;
touch.y2 = firstTouch.pageY;
deltaX += Math.abs(touch.x1 - touch.x2);
deltaY += Math.abs(touch.y1 - touch.y2);
})
.on(pointerUp, function () {
cancelLongTap();
// swipe
if ((touch.x2 && Math.abs(touch.x1 - touch.x2) > 30) || (touch.y2 && Math.abs(touch.y1 - touch.y2) > 30)) {
swipeTimeout = setTimeout(function () {
if (touch.el !== undefined) {
touch.el.trigger('swipe');
touch.el.trigger('swipe' + (swipeDirection(touch.x1, touch.x2, touch.y1, touch.y2)));
}
touch = {};
}, 0);
// normal tap
} else if ('last' in touch) {
// don't fire tap when delta position changed by more than 30 pixels,
// for instance when moving to a point and back to origin
if (isNaN(deltaX) || (deltaX < 30 && deltaY < 30)) {
// delay by one tick so we can cancel the 'tap' event if 'scroll' fires
// ('tap' fires before 'scroll')
tapTimeout = setTimeout(function () {
// trigger universal 'tap' with the option to cancelTouch()
// (cancelTouch cancels processing of single vs double taps for faster 'tap' response)
var event = $__default.Event('tap');
event.cancelTouch = cancelAll;
if (touch.el !== undefined) {
touch.el.trigger(event);
}
// trigger double tap immediately
if (touch.isDoubleTap) {
if (touch.el !== undefined) { touch.el.trigger('doubleTap'); }
touch = {};
}
// trigger single tap after 300ms of inactivity
else {
touchTimeout = setTimeout(function () {
touchTimeout = null;
if (touch.el !== undefined) {
touch.el.trigger('singleTap');
if (!clicked) {
touch.el.trigger('click');
}
}
touch = {};
}, 300);
}
});
} else {
touch = {};
}
deltaX = deltaY = 0;
}
})
// when the browser window loses focus,
// for example when a modal dialog is shown,
// cancel all ongoing events
.on('touchcancel pointercancel', cancelAll);
// scrolling the window indicates intention of the user
// to scroll, not tap or swipe, so cancel all ongoing events
win.on('scroll', cancelAll);
});
var util = Object.freeze({
win: win,
doc: doc,
docElement: doc$1,
langDirection: langDirection,
isReady: isReady,
ready: ready,
on: on,
off: off,
transition: transition,
Transition: Transition,
animate: animate,
Animation: Animation,
isWithin: isWithin,
attrFilter: attrFilter,
removeClass: removeClass,
createEvent: createEvent,
isInView: isInView,
getIndex: getIndex,
isVoidElement: isVoidElement,
Dimensions: Dimensions,
query: query,
Observer: Observer,
requestAnimationFrame: requestAnimationFrame,
cancelAnimationFrame: cancelAnimationFrame,
hasTouch: hasTouch,
pointerDown: pointerDown,
pointerMove: pointerMove,
pointerUp: pointerUp,
transitionend: transitionend,
animationend: animationend,
getStyle: getStyle,
getCssVar: getCssVar,
fastdom: fastdom,
$: $__default,
bind: bind,
hasOwn: hasOwn,
classify: classify,
hyphenate: hyphenate,
camelize: camelize,
isString: isString,
isNumber: isNumber,
isUndefined: isUndefined,
isContextSelector: isContextSelector,
getContextSelectors: getContextSelectors,
toJQuery: toJQuery,
toBoolean: toBoolean,
toNumber: toNumber,
toMedia: toMedia,
coerce: coerce,
ajax: $.ajax,
each: $.each,
extend: $.extend,
map: $.map,
merge: $.merge,
isArray: $.isArray,
isNumeric: $.isNumeric,
isFunction: $.isFunction,
isPlainObject: $.isPlainObject,
mergeOptions: mergeOptions,
position: position,
getDimensions: getDimensions,
flipPosition: flipPosition
});
function globalAPI (UIkit) {
var DATA = UIkit.data;
UIkit.use = function (plugin) {
if (plugin.installed) {
return;
}
plugin.call(null, this);
plugin.installed = true;
return this;
};
UIkit.mixin = function (mixin, component) {
component = (isString(component) ? UIkit.components[component] : component) || this;
component.options = mergeOptions(component.options, mixin);
};
UIkit.extend = function (options) {
options = options || {};
var Super = this, name = options.name || Super.options.name;
var Sub = createClass(name || 'UIkitComponent');
Sub.prototype = Object.create(Super.prototype);
Sub.prototype.constructor = Sub;
Sub.options = mergeOptions(Super.options, options);
Sub['super'] = Super;
Sub.extend = Super.extend;
return Sub;
};
UIkit.update = function (e, element, parents) {
if ( parents === void 0 ) parents = false;
e = createEvent(e || 'update');
if (!element) {
update(UIkit.instances, e);
return;
}
element = $__default(element)[0];
if (parents) {
do {
update(element[DATA], e);
element = element.parentNode;
} while (element)
} else {
apply(element, function (element) { return update(element[DATA], e); });
}
};
var container;
Object.defineProperty(UIkit, 'container', {
get: function get() {
return container || document.body;
},
set: function set(element) {
container = element;
}
});
}
function createClass(name) {
return new Function(("return function " + (classify(name)) + " (options) { this._init(options); }"))();
}
function apply(node, fn) {
if (node.nodeType !== Node.ELEMENT_NODE) {
return;
}
fn(node);
node = node.firstChild;
while (node) {
apply(node, fn);
node = node.nextSibling;
}
}
function update(data, e) {
if (!data) {
return;
}
for (var name in data) {
if (data[name]._isReady) {
data[name]._callUpdate(e);
}
}
}
function internalAPI (UIkit) {
var uid = 0;
UIkit.prototype.props = {};
UIkit.prototype._init = function (options) {
options = options || {};
options = this.$options = mergeOptions(this.constructor.options, options, this);
UIkit.instances[uid] = this;
this.$el = null;
this.$name = UIkit.prefix + hyphenate(this.$options.name);
this._uid = uid++;
this._initData();
this._initMethods();
this._callHook('created');
this._frames = {reads: {}, writes: {}};
if (options.el) {
this.$mount(options.el);
}
};
UIkit.prototype._initData = function () {
var this$1 = this;
var defaults = $.extend(true, {}, this.$options.defaults),
data = this.$options.data || {},
args = this.$options.args || [],
props = this.$options.props || {};
if (!defaults) {
return;
}
if (args.length && $.isArray(data)) {
data = data.slice(0, args.length).reduce(function (data, value, index) {
data[args[index]] = value;
return data;
}, {});
}
for (var key in defaults) {
this$1[key] = hasOwn(data, key) ? coerce(props[key], data[key], this$1.$options.el) : defaults[key];
}
};
UIkit.prototype._initProps = function () {
var this$1 = this;
var el = this.$el[0],
args = this.$options.args || [],
props = this.$options.props || {},
options = el.getAttribute(this.$name) || el.getAttribute(("data-" + (this.$name))),
key, prop;
if (!props) {
return;
}
for (key in props) {
prop = hyphenate(key);
if (el.hasAttribute(prop)) {
var value = coerce(props[key], el.getAttribute(prop), el);
if (prop === 'target' && (!value || value.lastIndexOf('_', 0) === 0)) {
continue;
}
this$1[key] = value;
}
}
if (!options) {
return;
}
if (options[0] === '{') {
try {
options = JSON.parse(options);
} catch (e) {
console.warn("Invalid JSON.");
options = {};
}
} else if (args.length && !~options.indexOf(':')) {
options = (( obj = {}, obj[args[0]] = options, obj ));
var obj;
} else {
var tmp = {};
options.split(';').forEach(function (option) {
var ref = option.split(/:(.+)/);
var key = ref[0];
var value = ref[1];
if (key && value) {
tmp[key.trim()] = value.trim();
}
});
options = tmp;
}
for (key in options || {}) {
prop = camelize(key);
if (props[prop] !== undefined) {
this$1[prop] = coerce(props[prop], options[key], el);
}
}
};
UIkit.prototype._initMethods = function () {
var this$1 = this;
var methods = this.$options.methods;
if (methods) {
for (var key in methods) {
this$1[key] = bind(methods[key], this$1);
}
}
};
UIkit.prototype._initEvents = function () {
var this$1 = this;
var events = this.$options.events,
register = function (name, fn) { return this$1.$el.on(name, isString(fn) ? this$1[fn] : bind(fn, this$1)); };
if (events) {
for (var key in events) {
if ($.isArray(events[key])) {
events[key].forEach(function (event) { return register(key, event); });
} else {
register(key, events[key]);
}
}
}
};
UIkit.prototype._callReady = function () {
this._isReady = true;
this._callHook('ready');
this._callUpdate();
};
UIkit.prototype._callHook = function (hook) {
var this$1 = this;
var handlers = this.$options[hook];
if (handlers) {
handlers.forEach(function (handler) { return handler.call(this$1); });
}
};
UIkit.prototype._callUpdate = function (e) {
var this$1 = this;
e = createEvent(e || 'update');
var updates = this.$options.update;
if (!updates) {
return;
}
updates.forEach(function (update, i) {
if (e.type !== 'update' && (!update.events || !~update.events.indexOf(e.type))) {
return;
}
if (e.sync) {
if (update.read) {
update.read.call(this$1, e);
}
if (update.write) {
update.write.call(this$1, e);
}
return;
}
if (update.read && !~fastdom.reads.indexOf(this$1._frames.reads[i])) {
this$1._frames.reads[i] = fastdom.measure(function () { return update.read.call(this$1, e); });
}
if (update.write && !~fastdom.writes.indexOf(this$1._frames.writes[i])) {
this$1._frames.writes[i] = fastdom.mutate(function () { return update.write.call(this$1, e); });
}
});
};
}
function instanceAPI (UIkit) {
var DATA = UIkit.data;
UIkit.prototype.$mount = function (el) {
var this$1 = this;
var name = this.$options.name;
if (!el[DATA]) {
el[DATA] = {};
UIkit.elements.push(el);
}
if (el[DATA][name]) {
console.warn(("Component \"" + name + "\" is already mounted on element: "), el);
return;
}
el[DATA][name] = this;
this.$el = $__default(el);
this._initProps();
this._callHook('init');
this._initEvents();
if (document.documentElement.contains(this.$el[0])) {
this._callHook('connected');
}
ready(function () { return this$1._callReady(); });
};
UIkit.prototype.$emit = function (e) {
this._callUpdate(e);
};
UIkit.prototype.$emitSync = function (e) {
this._callUpdate(createEvent(e || 'update', true, false, {sync: true}));
};
UIkit.prototype.$update = function (e, parents) {
UIkit.update(e, this.$el, parents);
};
UIkit.prototype.$updateSync = function (e, parents) {
UIkit.update(createEvent(e || 'update', true, false, {sync: true}), this.$el, parents);
};
UIkit.prototype.$destroy = function (remove) {
if ( remove === void 0 ) remove = false;
this._callHook('destroy');
delete UIkit.instances[this._uid];
var el = this.$options.el;
if (!el || !el[DATA]) {
return;
}
delete el[DATA][this.$options.name];
if (!Object.keys(el[DATA]).length) {
delete el[DATA];
var index = UIkit.elements.indexOf(el);
if (~index) {
UIkit.elements.splice(index, 1);
}
}
if (remove) {
this.$el.remove();
}
};
}
function componentAPI (UIkit) {
var DATA = UIkit.data;
UIkit.components = {};
UIkit.component = function (id, options) {
var name = camelize(id);
if ($.isPlainObject(options)) {
options.name = name;
options = UIkit.extend(options);
} else {
options.options.name = name;
}
UIkit.components[name] = options;
UIkit[name] = function (element, data) {
var i = arguments.length, argsArray = Array(i);
while ( i-- ) argsArray[i] = arguments[i];
if ($.isPlainObject(element)) {
return new UIkit.components[name]({data: element});
}
if (UIkit.components[name].options.functional) {
return new UIkit.components[name]({data: [].concat( argsArray )});
}
var result = [];
data = data || {};
$__default(element).each(function (i, el) { return result.push(el[DATA] && el[DATA][name] || new UIkit.components[name]({el: el, data: data})); });
return result;
};
if (document.body && !options.options.functional) {
UIkit[name](("[uk-" + id + "],[data-uk-" + id + "]"));
}
return UIkit.components[name];
};
UIkit.getComponents = function (element) { return element && element[DATA] || {}; };
UIkit.getComponent = function (element, name) { return UIkit.getComponents(element)[name]; };
UIkit.connect = function (node) {
var name;
if (node[DATA]) {
if (!~UIkit.elements.indexOf(node)) {
UIkit.elements.push(node);
}
for (name in node[DATA]) {
var component = node[DATA][name];
if (!(component._uid in UIkit.instances)) {
UIkit.instances[component._uid] = component;
}
component._callHook('connected');
}
}
for (var i = 0; i < node.attributes.length; i++) {
name = node.attributes[i].name;
if (name.lastIndexOf('uk-', 0) === 0 || name.lastIndexOf('data-uk-', 0) === 0) {
name = camelize(name.replace('data-uk-', '').replace('uk-', ''));
if (UIkit[name]) {
UIkit[name](node);
}
}
}
};
UIkit.disconnect = function (node) {
var index = UIkit.elements.indexOf(node);
if (~index) {
UIkit.elements.splice(index, 1);
}
for (var name in node[DATA]) {
var component = node[DATA][name];
if (component._uid in UIkit.instances) {
delete UIkit.instances[component._uid];
component._callHook('disconnected');
}
}
}
}
var UIkit$1 = function (options) {
this._init(options);
};
UIkit$1.util = util;
UIkit$1.data = '__uikit__';
UIkit$1.prefix = 'uk-';
UIkit$1.options = {};
UIkit$1.instances = {};
UIkit$1.elements = [];
globalAPI(UIkit$1);
internalAPI(UIkit$1);
instanceAPI(UIkit$1);
componentAPI(UIkit$1);
var Class = {
init: function init() {
this.$el.addClass(this.$name);
}
}
var Toggable = {
props: {
cls: Boolean,
animation: Boolean,
duration: Number,
origin: String,
transition: String,
queued: Boolean
},
defaults: {
cls: false,
animation: false,
duration: 200,
origin: false,
transition: 'linear',
queued: false,
initProps: {
overflow: '',
height: '',
paddingTop: '',
paddingBottom: '',
marginTop: '',
marginBottom: ''
},
hideProps: {
overflow: 'hidden',
height: 0,
paddingTop: 0,
paddingBottom: 0,
marginTop: 0,
marginBottom: 0
}
},
ready: function ready() {
if (isString(this.animation)) {
this.animation = this.animation.split(',');
if (this.animation.length === 1) {
this.animation[1] = this.animation[0];
}
this.animation = this.animation.map(function (animation) { return animation.trim(); });
}
this.queued = this.queued && !!this.animation;
},
methods: {
toggleElement: function toggleElement(targets, show, animate) {
var this$1 = this;
var toggles, body = document.body, scroll = body.scrollTop,
all = function (targets) { return $__default.when.apply($__default, targets.toArray().map(function (el) { return this$1._toggleElement(el, show, animate); })); },
delay = function (targets) {
var def = all(targets);
this$1.queued = true;
body.scrollTop = scroll;
return def;
};
targets = $__default(targets);
if (!this.queued || targets.length < 2) {
return all(targets);
}
if (this.queued !== true) {
return delay(targets.not(this.queued));
}
this.queued = targets.not(toggles = targets.filter(function (_, el) { return this$1.isToggled(el); }));
return all(toggles).then(function () { return this$1.queued !== true && delay(this$1.queued); });
},
toggleNow: function toggleNow(targets, show) {
var this$1 = this;
$__default(targets).each(function (_, el) { return this$1._toggleElement(el, show, false); });
},
isToggled: function isToggled(el) {
el = $__default(el);
return this.cls ? el.hasClass(this.cls.split(' ')[0]) : !el.attr('hidden');
},
updateAria: function updateAria(el) {
if (this.cls === false) {
el.attr('aria-hidden', !this.isToggled(el));
}
},
_toggleElement: function _toggleElement(el, show, animate) {
var this$1 = this;
el = $__default(el);
var deferred;
if (Animation.inProgress(el)) {
return Animation.cancel(el).then(function () { return this$1._toggleElement(el, show, animate); });
}
show = typeof show === 'boolean' ? show : !this.isToggled(el);
var event = $__default.Event(("before" + (show ? 'show' : 'hide')));
el.trigger(event, [this]);
if (event.result === false) {
return $__default.Deferred().reject();
}
deferred = (this.animation === true && animate !== false
? this._toggleHeight
: this.animation && animate !== false
? this._toggleAnimation
: this._toggleImmediate
)(el, show);
el.trigger(show ? 'show' : 'hide', [this]);
return deferred.then(function () { return el.trigger(show ? 'shown' : 'hidden', [this$1]); });
},
_toggle: function _toggle(el, toggled) {
el = $__default(el);
if (this.cls) {
el.toggleClass(this.cls, ~this.cls.indexOf(' ') ? undefined : toggled);
} else {
el.attr('hidden', !toggled);
}
el.find('[autofocus]:visible').focus();
this.updateAria(el);
UIkit.update(null, el);
},
_toggleImmediate: function _toggleImmediate(el, show) {
this._toggle(el, show);
return $__default.Deferred().resolve();
},
_toggleHeight: function _toggleHeight(el, show) {
var this$1 = this;
var inProgress = Transition.inProgress(el),
inner = parseFloat(el.children().first().css('margin-top')) + parseFloat(el.children().last().css('margin-bottom')),
height = el[0].offsetHeight ? el.height() + (inProgress ? 0 : inner) : 0,
endHeight;
Transition.cancel(el);
if (!this.isToggled(el)) {
this._toggle(el, true);
}
el.css('height', '');
endHeight = el.height() + (inProgress ? 0 : inner);
el.height(height);
return show
? Transition.start(el, $.extend(this.initProps, {overflow: 'hidden', height: endHeight}), Math.round(this.duration * (1 - height / endHeight)), this.transition)
: Transition.start(el, this.hideProps, Math.round(this.duration * (height / endHeight)), this.transition).then(function () {
this$1._toggle(el, false);
el.css(this$1.initProps);
});
},
_toggleAnimation: function _toggleAnimation(el, show) {
var this$1 = this;
if (show) {
this._toggle(el, true);
return Animation.in(el, this.animation[0], this.duration, this.origin);
}
return Animation.out(el, this.animation[1], this.duration, this.origin).then(function () { return this$1._toggle(el, false); });
}
}
};
var active;
doc.on({
click: function click(e) {
if (active && active.bgClose && !e.isDefaultPrevented() && !isWithin(e.target, active.panel)) {
active.hide();
}
},
keydown: function keydown(e) {
if (e.keyCode === 27 && active && active.escClose) {
e.preventDefault();
active.hide();
}
}
});
var Modal = {
mixins: [Class, Toggable],
props: {
clsPanel: String,
selClose: String,
escClose: Boolean,
bgClose: Boolean,
stack: Boolean
},
defaults: {
cls: 'uk-open',
escClose: true,
bgClose: true,
overlay: true,
stack: false
},
ready: function ready() {
var this$1 = this;
this.page = $__default(document.documentElement);
this.body = $__default(document.body);
this.panel = toJQuery(("." + (this.clsPanel)), this.$el);
this.$el.on('click', this.selClose, function (e) {
e.preventDefault();
this$1.hide();
});
},
events: {
toggle: function toggle(e) {
e.preventDefault();
this.toggleNow(this.$el);
},
beforeshow: function beforeshow(e) {
var this$1 = this;
if (!this.$el.is(e.target)) {
return;
}
if (this.isActive()) {
return false;
}
var prev = active && active !== this && active;
if (!active) {
this.body.css('overflow-y', this.getScrollbarWidth() && this.overlay ? 'scroll' : '');
}
active = this;
if (prev) {
if (this.stack) {
this.prev = prev;
} else {
prev.hide();
}
}
this.panel.one(transitionend, function () {
var event = $__default.Event('show');
event.isShown = true;
this$1.$el.trigger(event, [this$1]);
});
},
show: function show(e) {
if (!this.$el.is(e.target)) {
return;
}
if (!e.isShown) {
e.stopImmediatePropagation();
}
},
beforehide: function beforehide(e) {
var this$1 = this;
if (!this.$el.is(e.target)) {
return;
}
active = active && active !== this && active || this.prev;
var hide = function () {
var event = $__default.Event('hide');
event.isHidden = true;
this$1.$el.trigger(event, [this$1]);
};
if (parseFloat(this.panel.css('transition-duration'))) {
this.panel.one(transitionend, hide);
} else {
hide();
}
},
hide: function hide(e) {
if (!this.$el.is(e.target)) {
return;
}
if (!e.isHidden) {
e.stopImmediatePropagation();
return;
}
if (!active) {
this.body.css('overflow-y', '');
}
}
},
methods: {
isActive: function isActive() {
return this.$el.hasClass(this.cls);
},
toggle: function toggle() {
return this.isActive() ? this.hide() : this.show();
},
show: function show() {
var deferred = $__default.Deferred();
this.$el.one('show', function () { return deferred.resolve(); });
this.toggleNow(this.$el, true);
return deferred.promise();
},
hide: function hide() {
var deferred = $__default.Deferred();
this.$el.one('hide', function () { return deferred.resolve(); });
this.toggleNow(this.$el, false);
return deferred.promise();
},
getActive: function getActive() {
return active;
},
getScrollbarWidth: function getScrollbarWidth() {
var width = this.page[0].style.width;
this.page.css('width', '');
var scrollbarWidth = window.innerWidth - this.page.outerWidth(true);
if (width) {
this.page.width(width);
}
return scrollbarWidth;
}
}
}
var Mouse = {
defaults: {
positions: [],
position: null
},
methods: {
initMouseTracker: function initMouseTracker() {
var this$1 = this;
this.positions = [];
this.position = null;
this.mouseHandler = function (e) {
this$1.positions.push({x: e.pageX, y: e.pageY});
if (this$1.positions.length > 5) {
this$1.positions.shift();
}
};
doc.on('mousemove', this.mouseHandler);
},
cancelMouseTracker: function cancelMouseTracker() {
if (this.mouseHandler) {
doc.off('mousemove', this.mouseHandler);
}
},
movesTo: function movesTo(target) {
var p = getDimensions(target),
points = [
[{x: p.left, y: p.top}, {x: p.right, y: p.bottom}],
[{x: p.right, y: p.top}, {x: p.left, y: p.bottom}]
],
position = this.positions[this.positions.length - 1],
prevPos = this.positions[0] || position;
if (!position) {
return false;
}
if (p.right <= position.x) {
} else if (p.left >= position.x) {
points[0].reverse();
points[1].reverse();
} else if (p.bottom <= position.y) {
points[0].reverse();
} else if (p.top >= position.y) {
points[1].reverse();
}
var delay = position
&& !(this.position && position.x === this.position.x && position.y === this.position.y)
&& points.reduce(function (result, point) {
return result + (slope(prevPos, point[0]) < slope(position, point[0]) && slope(prevPos, point[1]) > slope(position, point[1]));
}, 0);
this.position = delay ? position : null;
return delay;
}
}
}
function slope(a, b) {
return (b.y - a.y) / (b.x - a.x);
}
var Position = {
props: {
pos: String,
offset: null,
flip: Boolean,
clsPos: String
},
defaults: {
pos: 'bottom-left',
flip: true,
offset: false,
clsPos: ''
},
init: function init() {
this.pos = (this.pos + (!~this.pos.indexOf('-') ? '-center' : '')).split('-');
this.dir = this.pos[0];
this.align = this.pos[1];
},
methods: {
positionAt: function positionAt(element, target, boundary) {
removeClass(element, this.clsPos + '-(top|bottom|left|right)(-[a-z]+)?').css({top: '', left: ''});
this.dir = this.pos[0];
this.align = this.pos[1];
var offset = toNumber(this.offset) || 0,
axis = this.getAxis(),
flipped = position(
element,
target,
axis === 'x' ? ((flipPosition(this.dir)) + " " + (this.align)) : ((this.align) + " " + (flipPosition(this.dir))),
axis === 'x' ? ((this.dir) + " " + (this.align)) : ((this.align) + " " + (this.dir)),
axis === 'x' ? ("" + (this.dir === 'left' ? -1 * offset : offset)) : (" " + (this.dir === 'top' ? -1 * offset : offset)),
null,
this.flip,
boundary
);
this.dir = axis === 'x' ? flipped.target.x : flipped.target.y;
this.align = axis === 'x' ? flipped.target.y : flipped.target.x;
element.css('display', '').toggleClass(((this.clsPos) + "-" + (this.dir) + "-" + (this.align)), this.offset === false);
},
getAxis: function getAxis() {
return this.pos[0] === 'top' || this.pos[0] === 'bottom' ? 'y' : 'x';
}
}
}
function mixin$1 (UIkit) {
UIkit.mixin.class = Class;
UIkit.mixin.modal = Modal;
UIkit.mixin.mouse = Mouse;
UIkit.mixin.position = Position;
UIkit.mixin.toggable = Toggable;
}
function Accordion (UIkit) {
UIkit.component('accordion', {
mixins: [Class, Toggable],
props: {
targets: String,
active: null,
collapsible: Boolean,
multiple: Boolean,
toggle: String,
content: String,
transition: String
},
defaults: {
targets: '> *',
active: false,
animation: true,
collapsible: true,
multiple: false,
clsOpen: 'uk-open',
toggle: '.uk-accordion-title',
content: '.uk-accordion-content',
transition: 'ease'
},
ready: function ready() {
var this$1 = this;
this.$el.on('click', ((this.targets) + " " + (this.toggle)), function (e) {
e.preventDefault();
this$1.show(this$1.items.find(this$1.toggle).index(e.currentTarget));
});
},
update: function update() {
var this$1 = this;
var items = $__default(this.targets, this.$el),
changed = !this.items || items.length !== this.items.length || items.toArray().some(function (el, i) { return el !== this$1.items.get(i); });
this.items = items;
if (!changed) {
return;
}
this.items.each(function (i, el) {
el = $__default(el);
this$1.toggleNow(el.find(this$1.content), el.hasClass(this$1.clsOpen));
});
var active = this.active !== false && toJQuery(this.items.eq(Number(this.active))) || !this.collapsible && toJQuery(this.items.eq(0));
if (active && !active.hasClass(this.clsOpen)) {
this.show(active, false);
}
},
methods: {
show: function show(item, animate) {
var this$1 = this;
if (!this.items) {
this.$emitSync();
}
var index = getIndex(item, this.items),
active = this.items.filter(("." + (this.clsOpen)));
item = this.items.eq(index);
item.add(!this.multiple && active).each(function (i, el) {
el = $__default(el);
var content = el.find(this$1.content), isItem = el.is(item), state = isItem && !el.hasClass(this$1.clsOpen);
if (!state && isItem && !this$1.collapsible && active.length < 2) {
return;
}
el.toggleClass(this$1.clsOpen, state);
if (!Transition.inProgress(content.parent())) {
content.wrap('<div>').parent().attr('hidden', state);
}
this$1.toggleNow(content, true);
this$1.toggleElement(content.parent(), state, animate).then(function () {
if (el.hasClass(this$1.clsOpen) === state) {
if (!state) {
this$1.toggleNow(content, false);
}
content.unwrap();
}
});
})
}
}
});
}
function Alert (UIkit) {
UIkit.component('alert', {
mixins: [Class, Toggable],
args: 'animation',
props: {
animation: Boolean,
close: String
},
defaults: {
animation: true,
close: '.uk-alert-close',
duration: 150,
hideProps: {opacity: 0}
},
ready: function ready() {
var this$1 = this;
this.$el.on('click', this.close, function (e) {
e.preventDefault();
this$1.closeAlert();
});
},
methods: {
closeAlert: function closeAlert() {
var this$1 = this;
this.toggleElement(this.$el).then(function () { return this$1.$destroy(true); });
}
}
});
}
function Cover (UIkit) {
UIkit.component('cover', {
props: {
automute: Boolean,
width: Number,
height: Number
},
defaults: {automute: true},
ready: function ready() {
if (!this.$el.is('iframe')) {
return;
}
this.$el.css('pointerEvents', 'none');
if (this.automute) {
var src = this.$el.attr('src');
this.$el
.attr('src', ("" + src + (~src.indexOf('?') ? '&' : '?') + "enablejsapi=1&api=1"))
.on('load', function (ref) {
var target = ref.target;
return target.contentWindow.postMessage('{"event": "command", "func": "mute", "method":"setVolume", "value":0}', '*');
});
}
},
update: {
write: function write() {
if (this.$el[0].offsetHeight === 0) {
return;
}
this.$el
.css({width: '', height: ''})
.css(Dimensions.cover(
{width: this.width || this.$el.width(), height: this.height || this.$el.height()},
{width: this.$el.parent().width(), height: this.$el.parent().height()}
));
},
events: ['load', 'resize', 'orientationchange']
},
events: {
loadedmetadata: function loadedmetadata() {
this.$emit();
}
}
});
}
function Drop (UIkit) {
var active;
doc.on('click', function (e) {
if (active && !isWithin(e.target, active.$el) && (!active.toggle || !isWithin(e.target, active.toggle.$el))) {
active.hide(false);
}
});
UIkit.component('drop', {
mixins: [Mouse, Position, Toggable],
args: 'pos',
props: {
mode: String,
toggle: Boolean,
boundary: 'jQuery',
boundaryAlign: Boolean,
delayShow: Number,
delayHide: Number,
clsDrop: String
},
defaults: {
mode: 'hover',
toggle: '- :first',
boundary: window,
boundaryAlign: false,
delayShow: 0,
delayHide: 800,
clsDrop: false,
hoverIdle: 200,
animation: 'uk-animation-fade',
cls: 'uk-open'
},
init: function init() {
this.clsDrop = this.clsDrop || ("uk-" + (this.$options.name));
this.clsPos = this.clsDrop;
this.$el.addClass(this.clsDrop);
},
ready: function ready() {
var this$1 = this;
this.updateAria(this.$el);
this.$el.on('click', ("." + (this.clsDrop) + "-close"), function (e) {
e.preventDefault();
this$1.hide(false);
});
if (this.toggle) {
this.toggle = query(this.toggle, this.$el);
if (this.toggle) {
this.toggle = UIkit.toggle(this.toggle, {target: this.$el, mode: this.mode})[0];
}
}
},
update: {
write: function write() {
if (!this.$el.hasClass(this.cls)) {
return;
}
removeClass(this.$el, ((this.clsDrop) + "-(stack|boundary)")).css({top: '', left: ''});
this.$el.toggleClass(((this.clsDrop) + "-boundary"), this.boundaryAlign);
this.dir = this.pos[0];
this.align = this.pos[1];
var boundary = getDimensions(this.boundary), alignTo = this.boundaryAlign ? boundary : getDimensions(this.toggle.$el);
if (this.align === 'justify') {
var prop = this.getAxis() === 'y' ? 'width' : 'height';
this.$el.css(prop, alignTo[prop]);
} else if (this.$el.outerWidth() > Math.max(boundary.right - alignTo.left, alignTo.right - boundary.left)) {
this.$el.addClass(this.clsDrop + '-stack');
this.$el.trigger('stack', [this]);
}
this.positionAt(this.$el, this.boundaryAlign ? this.boundary : this.toggle.$el, this.boundary);
},
events: ['resize', 'orientationchange']
},
events: {
toggle: function toggle(e, toggle$1) {
e.preventDefault();
if (this.isToggled(this.$el)) {
this.hide(false);
} else {
this.show(toggle$1, false);
}
},
'toggleShow mouseenter': function toggleShowmouseenter(e, toggle) {
e.preventDefault();
this.show(toggle || this.toggle);
},
'toggleHide mouseleave': function toggleHidemouseleave(e) {
e.preventDefault();
if (this.toggle && this.toggle.mode === 'hover') {
this.hide();
}
}
},
methods: {
show: function show(toggle, delay) {
var this$1 = this;
if ( delay === void 0 ) delay = true;
if (toggle && this.toggle && !this.toggle.$el.is(toggle.$el)) {
this.hide(false);
}
this.toggle = toggle || this.toggle;
this.clearTimers();
if (this.isActive()) {
return;
} else if (delay && active && active !== this && active.isDelaying) {
this.showTimer = setTimeout(this.show, 75);
return;
} else if (active) {
active.hide(false);
}
var show = function () {
if (this$1.toggleElement(this$1.$el, true).state() !== 'rejected') {
this$1.initMouseTracker();
this$1.toggle.$el.addClass(this$1.cls).attr('aria-expanded', 'true');
this$1.clearTimers();
}
};
if (delay && this.delayShow) {
this.showTimer = setTimeout(show, this.delayShow);
} else {
show();
}
active = this;
},
hide: function hide(delay) {
var this$1 = this;
if ( delay === void 0 ) delay = true;
this.clearTimers();
var hide = function () {
if (this$1.toggleElement(this$1.$el, false, false).state() !== 'rejected') {
active = this$1.isActive() ? null : active;
this$1.toggle.$el.removeClass(this$1.cls).attr('aria-expanded', 'false').blur().find('a, button').blur();
this$1.cancelMouseTracker();
this$1.clearTimers();
}
};
this.isDelaying = this.movesTo(this.$el);
if (delay && this.isDelaying) {
this.hideTimer = setTimeout(this.hide, this.hoverIdle);
} else if (delay && this.delayHide) {
this.hideTimer = setTimeout(hide, this.delayHide);
} else {
hide();
}
},
clearTimers: function clearTimers() {
clearTimeout(this.showTimer);
clearTimeout(this.hideTimer);
this.showTimer = null;
this.hideTimer = null;
},
isActive: function isActive() {
return active === this;
}
}
});
UIkit.drop.getActive = function () { return active; };
}
function Dropdown (UIkit) {
UIkit.component('dropdown', UIkit.components.drop.extend({name: 'dropdown'}));
}
function FormCustom (UIkit) {
UIkit.component('form-custom', {
mixins: [Class],
args: 'target',
props: {
target: Boolean
},
defaults: {
target: false
},
ready: function ready() {
this.input = this.$el.find(':input:first');
this.target = this.target && query(this.target === true ? '> :input:first + :first' : this.target, this.$el);
var state = this.input.next();
this.input.on({
focus: function (e) { return state.addClass('uk-focus'); },
blur: function (e) { return state.removeClass('uk-focus'); },
mouseenter: function (e) { return state.addClass('uk-hover'); },
mouseleave: function (e) { return state.removeClass('uk-hover'); }
});
this.input.trigger('change');
},
events: {
change: function change() {
this.target && this.target[this.target.is(':input') ? 'val' : 'text'](
this.input[0].files && this.input[0].files[0]
? this.input[0].files[0].name
: this.input.is('select')
? this.input.find('option:selected').text()
: this.input.val()
);
}
}
});
}
function Gif (UIkit) {
UIkit.component('gif', {
update: {
read: function read() {
var inview = isInView(this.$el);
if (!this.isInView && inview) {
this.$el[0].src = this.$el[0].src;
}
this.isInView = inview;
},
events: ['scroll', 'load', 'resize', 'orientationchange']
}
});
}
function Grid (UIkit) {
UIkit.component('grid', UIkit.components.margin.extend({
mixins: [Class],
name: 'grid',
defaults: {
margin: 'uk-grid-margin',
clsStack: 'uk-grid-stack'
},
update: {
write: function write() {
this.$el.toggleClass(this.clsStack, this.stacks);
},
events: ['load', 'resize', 'orientationchange']
}
}));
}
function HeightMatch (UIkit) {
UIkit.component('height-match', {
args: 'target',
props: {
target: String,
row: Boolean
},
defaults: {
target: '> *',
row: true
},
update: {
write: function write() {
var this$1 = this;
var elements = toJQuery(this.target, this.$el).css('min-height', '');
if (!this.row) {
this.match(elements);
return this;
}
var lastOffset = false, group = [];
elements.each(function (i, el) {
el = $__default(el);
var offset = el.offset().top;
if (offset != lastOffset && group.length) {
this$1.match($__default(group));
group = [];
offset = el.offset().top;
}
group.push(el);
lastOffset = offset;
});
if (group.length) {
this.match($__default(group));
}
},
events: ['resize', 'orientationchange']
},
methods: {
match: function match(elements) {
if (elements.length < 2) {
return;
}
var max = 0;
elements
.each(function (i, el) {
el = $__default(el);
var height;
if (el.css('display') === 'none') {
var style = el.attr('style');
el.attr('style', (style + ";display:block !important;"));
height = el.outerHeight();
el.attr('style', style || '');
} else {
height = el.outerHeight();
}
max = Math.max(max, height);
})
.each(function (i, el) {
el = $__default(el);
el.css('min-height', ((max - (el.outerHeight() - parseFloat(el.css('height')))) + "px"));
});
}
}
});
}
function HeightViewport (UIkit) {
UIkit.component('height-viewport', {
props: {
expand: Boolean,
offsetTop: Boolean,
offsetBottom: Boolean
},
defaults: {
expand: false,
offsetTop: false,
offsetBottom: false
},
init: function init() {
this.$emit();
},
update: {
write: function write() {
var viewport = window.innerHeight, height, offset = 0;
if (this.expand) {
this.$el.css({height: '', minHeight: ''});
var diff = viewport - document.documentElement.offsetHeight;
if (diff > 0) {
this.$el.css('min-height', height = this.$el.outerHeight() + diff)
}
} else {
var top = this.$el[0].offsetTop;
if (top < viewport) {
if (this.offsetTop) {
offset += top;
}
if (this.offsetBottom) {
offset += this.$el.next().outerHeight() || 0;
}
}
this.$el.css('min-height', height = offset ? ("calc(100vh - " + offset + "px)") : '100vh');
}
// IE 10-11 fix (min-height on a flex container won't apply to its flex items)
this.$el.css('height', '');
if (height && viewport - offset >= this.$el.outerHeight()) {
this.$el.css('height', height);
}
},
events: ['load', 'resize', 'orientationchange']
}
});
}
function Hover (UIkit) {
ready(function () {
if (!hasTouch) {
return;
}
var cls = 'uk-hover';
doc$1.on('tap', function (ref) {
var target = ref.target;
return $__default(("." + cls)).filter(function (_, el) { return !isWithin(target, el); }).removeClass(cls);
});
Object.defineProperty(UIkit, 'hoverSelector', {
set: function set(selector) {
doc$1.on('tap', selector, function () {
this.classList.add(cls);
});
}
});
UIkit.hoverSelector = '.uk-animation-toggle, .uk-transition-toggle, [uk-hover]';
});
}
function Icon (UIkit) {
UIkit.component('icon', UIkit.components.svg.extend({
mixins: [Class],
name: 'icon',
args: 'icon',
props: ['icon'],
defaults: {exclude: ['id', 'style', 'class', 'src']},
init: function init() {
this.$el.addClass('uk-icon');
}
}));
[
'close',
'navbar-toggle-icon',
'overlay-icon',
'pagination-previous',
'pagination-next',
'slidenav',
'search-icon',
'totop'
].forEach(function (name) { return UIkit.component(name, UIkit.components.icon.extend({name: name})); });
}
function Margin (UIkit) {
UIkit.component('margin', {
props: {
margin: String,
firstColumn: Boolean
},
defaults: {
margin: 'uk-margin-small-top',
firstColumn: 'uk-first-column'
},
connected: function connected() {
this.$emit();
},
update: {
read: function read() {
var this$1 = this;
if (this.$el[0].offsetHeight === 0) {
this.hidden = true;
return;
}
this.hidden = false;
this.stacks = true;
var columns = this.$el.children().filter(function (_, el) { return el.offsetHeight > 0; });
this.rows = [[columns.get(0)]];
columns.slice(1).each(function (_, el) {
var top = Math.ceil(el.offsetTop), bottom = top + el.offsetHeight;
for (var index = this$1.rows.length - 1; index >= 0; index--) {
var row = this$1.rows[index], rowTop = Math.ceil(row[0].offsetTop);
if (top >= rowTop + row[0].offsetHeight) {
this$1.rows.push([el]);
break;
}
if (bottom > rowTop) {
this$1.stacks = false;
if (el.offsetLeft < row[0].offsetLeft) {
row.unshift(el);
break;
}
row.push(el);
break;
}
if (index === 0) {
this$1.rows.splice(index, 0, [el]);
break;
}
}
});
},
write: function write() {
var this$1 = this;
if (this.hidden) {
return;
}
this.rows.forEach(function (row, i) { return row.forEach(function (el, j) { return $__default(el)
.toggleClass(this$1.margin, i !== 0)
.toggleClass(this$1.firstColumn, j === 0); }
); }
)
},
events: ['load', 'resize', 'orientationchange']
}
});
}
function Modal$1 (UIkit) {
UIkit.component('modal', {
mixins: [Modal],
props: {
center: Boolean,
container: Boolean
},
defaults: {
center: false,
clsPage: 'uk-modal-page',
clsPanel: 'uk-modal-dialog',
selClose: '.uk-modal-close, .uk-modal-close-default, .uk-modal-close-outside, .uk-modal-close-full',
container: true
},
ready: function ready() {
this.container = this.container === true && UIkit.container || this.container && toJQuery(this.container);
if (this.container && !this.$el.parent().is(this.container)) {
this.$el.appendTo(this.container);
}
},
update: {
write: function write() {
if (this.$el.css('display') === 'block' && this.center) {
this.$el
.removeClass('uk-flex uk-flex-center uk-flex-middle')
.css('display', 'block')
.toggleClass('uk-flex uk-flex-center uk-flex-middle', window.innerHeight > this.panel.outerHeight(true))
.css('display', this.$el.hasClass('uk-flex') ? '' : 'block');
}
},
events: ['resize', 'orientationchange']
},
events: {
beforeshow: function beforeshow(e) {
if (!this.$el.is(e.target)) {
return;
}
this.page.addClass(this.clsPage);
this.$el.css('display', 'block');
this.$el.height();
},
hide: function hide(e) {
if (!this.$el.is(e.target)) {
return;
}
if (!this.getActive()) {
this.page.removeClass(this.clsPage);
}
this.$el.css('display', '').removeClass('uk-flex uk-flex-center uk-flex-middle');
}
}
});
UIkit.component('overflow-auto', {
mixins: [Class],
ready: function ready() {
this.panel = query('!.uk-modal-dialog', this.$el);
this.$el.css('min-height', 150);
},
update: {
write: function write() {
var current = this.$el.css('max-height');
this.$el.css('max-height', 150).css('max-height', Math.max(150, 150 - (this.panel.outerHeight(true) - window.innerHeight)));
if (current !== this.$el.css('max-height')) {
this.$el.trigger('resize');
}
},
events: ['load', 'resize', 'orientationchange']
}
});
UIkit.modal.dialog = function (content, options) {
var dialog = UIkit.modal($__default(
("<div class=\"uk-modal\">\n <div class=\"uk-modal-dialog\">" + content + "</div>\n </div>")
), options)[0];
dialog.show();
dialog.$el.on('hide', function () { return dialog.$destroy(true); });
return dialog;
};
UIkit.modal.alert = function (message, options) {
options = $.extend({bgClose: false, escClose: false, labels: UIkit.modal.labels}, options);
var deferred = $__default.Deferred();
UIkit.modal.dialog(("\n <div class=\"uk-modal-body\">" + (isString(message) ? message : $__default(message).html()) + "</div>\n <div class=\"uk-modal-footer uk-text-right\">\n <button class=\"uk-button uk-button-primary uk-modal-close\" autofocus>" + (options.labels.ok) + "</button>\n </div>\n "), options).$el.on('hide', function () { return deferred.resolve(); });
return deferred.promise();
};
UIkit.modal.confirm = function (message, options) {
options = $.extend({bgClose: false, escClose: false, labels: UIkit.modal.labels}, options);
var deferred = $__default.Deferred();
UIkit.modal.dialog(("\n <div class=\"uk-modal-body\">" + (isString(message) ? message : $__default(message).html()) + "</div>\n <div class=\"uk-modal-footer uk-text-right\">\n <button class=\"uk-button uk-button-default uk-modal-close\">" + (options.labels.cancel) + "</button>\n <button class=\"uk-button uk-button-primary uk-modal-close\" autofocus>" + (options.labels.ok) + "</button>\n </div>\n "), options).$el.on('click', '.uk-modal-footer button', function (e) { return deferred[$__default(e.target).index() === 0 ? 'reject' : 'resolve'](); });
return deferred.promise();
};
UIkit.modal.prompt = function (message, value, options) {
options = $.extend({bgClose: false, escClose: false, labels: UIkit.modal.labels}, options);
var deferred = $__default.Deferred(),
prompt = UIkit.modal.dialog(("\n <form class=\"uk-form-stacked\">\n <div class=\"uk-modal-body\">\n <label>" + (isString(message) ? message : $__default(message).html()) + "</label>\n <input class=\"uk-input\" type=\"text\" autofocus>\n </div>\n <div class=\"uk-modal-footer uk-text-right\">\n <button class=\"uk-button uk-button-default uk-modal-close\" type=\"button\">" + (options.labels.cancel) + "</button>\n <button class=\"uk-button uk-button-primary\" type=\"submit\">" + (options.labels.ok) + "</button>\n </div>\n </form>\n "), options),
input = prompt.$el.find('input').val(value);
prompt.$el
.on('submit', 'form', function (e) {
e.preventDefault();
deferred.resolve(input.val());
prompt.hide()
})
.on('hide', function () {
if (deferred.state() === 'pending') {
deferred.resolve(null);
}
});
return deferred.promise();
};
UIkit.modal.labels = {
ok: 'Ok',
cancel: 'Cancel'
}
}
function Nav (UIkit) {
UIkit.component('nav', UIkit.components.accordion.extend({
name: 'nav',
defaults: {
targets: '> .uk-parent',
toggle: '> a',
content: 'ul:first'
}
}));
}
function Navbar (UIkit) {
UIkit.component('navbar', {
mixins: [Class],
props: {
dropdown: String,
mode: String,
align: String,
offset: Number,
boundary: Boolean,
boundaryAlign: Boolean,
clsDrop: String,
delayShow: Number,
delayHide: Number,
dropbar: Boolean,
dropbarMode: String,
dropbarAnchor: 'jQuery',
duration: Number
},
defaults: {
dropdown: '.uk-navbar-nav > li',
mode: 'hover',
align: 'left',
offset: false,
boundary: true,
boundaryAlign: false,
clsDrop: 'uk-navbar-dropdown',
delayShow: 0,
delayHide: 800,
flip: 'x',
dropbar: false,
dropbarMode: 'slide',
dropbarAnchor: false,
duration: 200,
},
init: function init() {
this.boundary = (this.boundary === true || this.boundaryAlign) ? this.$el : this.boundary;
this.pos = "bottom-" + (this.align);
},
ready: function ready() {
var this$1 = this;
this.$el.on('mouseenter', this.dropdown, function (ref) {
var target = ref.target;
var active = this$1.getActive();
if (active && !isWithin(target, active.toggle.$el) && !active.isDelaying) {
active.hide(false);
}
});
if (!this.dropbar) {
return;
}
this.dropbar = query(this.dropbar, this.$el) || $__default('<div class="uk-navbar-dropbar"></div>').insertAfter(this.dropbarAnchor || this.$el);
this.dropbar.on({
mouseleave: function () {
var active = this$1.getActive();
if (active && !this$1.dropbar.is(':hover')) {
active.hide();
}
},
beforeshow: function (e, ref) {
var $el = ref.$el;
$el.addClass(((this$1.clsDrop) + "-dropbar"));
this$1.transitionTo($el.outerHeight(true));
},
beforehide: function (e, ref) {
var $el = ref.$el;
var active = this$1.getActive();
if (this$1.dropbar.is(':hover') && active && active.$el.is($el)) {
return false;
}
},
hide: function (e, ref) {
var $el = ref.$el;
var active = this$1.getActive();
if (!active || active && active.$el.is($el)) {
this$1.transitionTo(0);
}
}
});
if (this.dropbarMode === 'slide') {
this.dropbar.addClass('uk-navbar-dropbar-slide');
}
},
update: function update() {
var this$1 = this;
$__default(this.dropdown, this.$el).each(function (i, el) {
var drop = toJQuery(("." + (this$1.clsDrop)), el);
if (drop && !UIkit.getComponent(drop, 'drop') && !UIkit.getComponent(drop, 'dropdown')) {
UIkit.drop(drop, $.extend({}, this$1));
}
});
},
events: {
beforeshow: function beforeshow(e, ref) {
var $el = ref.$el;
var dir = ref.dir;
if (this.dropbar && dir === 'bottom' && !isWithin($el, this.dropbar)) {
$el.appendTo(this.dropbar);
this.dropbar.trigger('beforeshow', [{$el: $el}]);
}
}
},
methods: {
getActive: function getActive() {
var active = UIkit.drop.getActive();
return active && active.mode !== 'click' && isWithin(active.toggle.$el, this.$el) && active;
},
transitionTo: function transitionTo(height) {
this.dropbar.height(this.dropbar[0].offsetHeight ? this.dropbar.height() : 0);
return Transition.cancel(this.dropbar).start(this.dropbar, {height: height}, this.duration);
}
}
});
}
function Offcanvas (UIkit) {
UIkit.component('offcanvas', {
mixins: [Modal],
args: 'mode',
props: {
mode: String,
flip: Boolean,
overlay: Boolean
},
defaults: {
mode: 'slide',
flip: false,
overlay: false,
clsPage: 'uk-offcanvas-page',
clsPanel: 'uk-offcanvas-bar',
clsFlip: 'uk-offcanvas-flip',
clsPageAnimation: 'uk-offcanvas-page-animation',
clsSidebarAnimation: 'uk-offcanvas-bar-animation',
clsMode: 'uk-offcanvas',
clsOverlay: 'uk-offcanvas-overlay',
clsPageOverlay: 'uk-offcanvas-page-overlay',
selClose: '.uk-offcanvas-close'
},
init: function init() {
this.clsFlip = this.flip ? this.clsFlip : '';
this.clsOverlay = this.overlay ? this.clsOverlay : '';
this.clsPageOverlay = this.overlay ? this.clsPageOverlay : '';
this.clsMode = (this.clsMode) + "-" + (this.mode);
if (this.mode === 'none' || this.mode === 'reveal') {
this.clsSidebarAnimation = '';
}
if (this.mode !== 'push' && this.mode !== 'reveal') {
this.clsPageAnimation = '';
}
},
update: {
write: function write() {
if (this.isActive()) {
this.page.width(window.innerWidth - this.getScrollbarWidth());
}
},
events: ['resize', 'orientationchange']
},
events: {
beforeshow: function beforeshow(e) {
if (!this.$el.is(e.target)) {
return;
}
this.page.addClass(((this.clsPage) + " " + (this.clsFlip) + " " + (this.clsPageAnimation) + " " + (this.clsPageOverlay)));
this.panel.addClass(((this.clsSidebarAnimation) + " " + (this.clsMode)));
this.$el.addClass(this.clsOverlay).css('display', 'block').height();
},
beforehide: function beforehide(e) {
if (!this.$el.is(e.target)) {
return;
}
this.page.removeClass(this.clsPageAnimation).css('margin-left', '');
if (this.mode === 'none' || this.getActive() && this.getActive() !== this) {
this.panel.trigger(transitionend);
}
},
hide: function hide(e) {
if (!this.$el.is(e.target)) {
return;
}
this.page.removeClass(((this.clsPage) + " " + (this.clsFlip) + " " + (this.clsPageOverlay))).width('');
this.panel.removeClass(((this.clsSidebarAnimation) + " " + (this.clsMode)));
this.$el.removeClass(this.clsOverlay).css('display', '');
}
}
});
}
function Responsive (UIkit) {
UIkit.component('responsive', {
props: ['width', 'height'],
update: {
write: function write() {
if (this.$el.is(':visible') && this.width && this.height) {
this.$el.height(Dimensions.fit(
{height: this.height, width: this.width},
{width: this.$el.parent().width(), height: this.height || this.$el.height()}
)['height']);
}
},
events: ['load', 'resize', 'orientationchange']
}
});
}
function Scroll (UIkit) {
UIkit.component('scroll', {
props: {
duration: Number,
transition: String,
offset: Number
},
defaults: {
duration: 1000,
transition: 'easeOutExpo',
offset: 0
},
methods: {
scrollToElement: function scrollToElement(el) {
var this$1 = this;
el = $__default(el);
// get / set parameters
var target = el.offset().top - this.offset,
docHeight = doc.height(),
winHeight = window.innerHeight;
if (target + winHeight > docHeight) {
target = docHeight - winHeight;
}
// animate to target, fire callback when done
$__default('html,body')
.stop()
.animate({scrollTop: parseInt(target, 10) || 1}, this.duration, this.transition)
.promise()
.then(function () { return this$1.$el.trigger('scrolled', [this$1]); });
}
},
events: {
click: function click(e) {
if (e.isDefaultPrevented()) {
return;
}
e.preventDefault();
this.scrollToElement($__default(this.$el[0].hash).length ? this.$el[0].hash : 'body');
}
}
});
if (!$__default.easing.easeOutExpo) {
$__default.easing.easeOutExpo = function (x, t, b, c, d) {
return (t == d) ? b + c : c * (-Math.pow(2, -10 * t / d) + 1) + b;
};
}
}
function Scrollspy (UIkit) {
UIkit.component('scrollspy', {
args: 'cls',
props: {
cls: String,
target: String,
hidden: Boolean,
offsetTop: Number,
offsetLeft: Number,
repeat: Boolean,
delay: Number
},
defaults: {
cls: 'uk-scrollspy-inview',
target: false,
hidden: true,
offsetTop: 0,
offsetLeft: 0,
repeat: false,
delay: 0,
inViewClass: 'uk-scrollspy-inview'
},
init: function init() {
this.$emit();
},
update: [
{
read: function read() {
this.elements = this.target && toJQuery(this.target, this.$el) || this.$el;
},
write: function write() {
if (this.hidden) {
this.elements.filter((":not(." + (this.inViewClass) + ")")).css('visibility', 'hidden');
}
}
},
{
read: function read() {
var this$1 = this;
this.elements.each(function (i, el) {
if (!el._scrollspy) {
el._scrollspy = {toggles: ($__default(el).attr('uk-scrollspy-class') || this$1.cls).split(',')};
}
el._scrollspy.show = isInView(el, this$1.offsetTop, this$1.offsetLeft);
});
},
write: function write() {
var this$1 = this;
var index = this.elements.length === 1 ? 1 : 0;
this.elements.each(function (i, el) {
var $el = $__default(el);
var data = el._scrollspy;
if (data.show) {
if (!data.inview && !data.timer) {
data.timer = setTimeout(function () {
$el.css('visibility', '')
.addClass(this$1.inViewClass)
.toggleClass(data.toggles[0])
.trigger('inview');
data.inview = true;
delete data.timer;
}, this$1.delay * index++);
}
} else {
if (data.inview && this$1.repeat) {
if (data.timer) {
clearTimeout(data.timer);
delete data.timer;
}
$el.removeClass(this$1.inViewClass)
.toggleClass(data.toggles[0])
.css('visibility', this$1.hidden ? 'hidden' : '')
.trigger('outview');
data.inview = false;
}
}
data.toggles.reverse();
});
},
events: ['scroll', 'load', 'resize', 'orientationchange']
}
]
});
}
function ScrollspyNav (UIkit) {
UIkit.component('scrollspy-nav', {
props: {
cls: String,
closest: String,
scroll: Boolean,
overflow: Boolean,
offset: Number
},
defaults: {
cls: 'uk-active',
closest: false,
scroll: false,
overflow: true,
offset: 0
},
update: [
{
read: function read() {
this.links = this.$el.find('a[href^="#"]').filter(function (i, el) { return el.hash; });
this.elements = (this.closest ? this.links.closest(this.closest) : this.links);
this.targets = $__default($__default.map(this.links, function (el) { return el.hash; }).join(','));
if (this.scroll) {
UIkit.scroll(this.links, {offset: this.offset || 0});
}
}
},
{
read: function read() {
var this$1 = this;
var scroll = win.scrollTop() + this.offset, max = document.documentElement.scrollHeight - window.innerHeight + this.offset;
this.active = false;
this.targets.each(function (i, el) {
el = $__default(el);
var offset = el.offset(), last = i + 1 === this$1.targets.length;
if (!this$1.overflow && (i === 0 && offset.top > scroll || last && offset.top + el.outerHeight() < scroll)) {
return false;
}
if (!last && this$1.targets.eq(i + 1).offset().top <= scroll) {
return;
}
if (scroll >= max) {
for (var j = this$1.targets.length; j > i; j--) {
if (isInView(this$1.targets.eq(j))) {
el = this$1.targets.eq(j);
break;
}
}
}
return !(this$1.active = toJQuery(this$1.links.filter(("[href=\"#" + (el.attr('id')) + "\"]"))));
});
},
write: function write() {
this.links.blur();
this.elements.removeClass(this.cls);
if (this.active) {
this.$el.trigger('active', [
this.active,
(this.closest ? this.active.closest(this.closest) : this.active).addClass(this.cls)
]);
}
},
events: ['scroll', 'load', 'resize', 'orientationchange']
}
]
});
}
function Spinner (UIkit) {
UIkit.component('spinner', UIkit.components.icon.extend({
name: 'spinner',
init: function init() {
this.height = this.width = this.$el.width();
},
ready: function ready() {
var this$1 = this;
this.svg.then(function (svg) {
var circle = svg.find('circle'),
diameter = Math.floor(this$1.width / 2);
svg[0].setAttribute('viewBox', ("0 0 " + (this$1.width) + " " + (this$1.width)));
circle.attr({cx: diameter, cy: diameter, r: diameter - parseInt(circle.css('stroke-width'), 10)});
});
}
}));
}
function Sticky (UIkit) {
UIkit.component('sticky', {
props: {
top: null,
bottom: Boolean,
offset: Number,
animation: String,
clsActive: String,
clsInactive: String,
widthElement: 'jQuery',
showOnUp: Boolean,
media: 'media',
target: Number
},
defaults: {
top: 0,
bottom: false,
offset: 0,
animation: '',
clsActive: 'uk-active',
clsInactive: '',
widthElement: false,
showOnUp: false,
media: false,
target: false
},
connected: function connected() {
this.placeholder = $__default('<div class="uk-sticky-placeholder"></div>').insertAfter(this.$el).attr('hidden', true);
this._widthElement = this.widthElement || this.placeholder;
},
ready: function ready() {
var this$1 = this;
this.topProp = this.top;
this.bottomProp = this.bottom;
if (this.target && location.hash && win.scrollTop() > 0) {
var target = query(location.hash);
if (target) {
requestAnimationFrame(function () {
var top = target.offset().top,
elTop = this$1.$el.offset().top,
elHeight = this$1.$el.outerHeight(),
elBottom = elTop + elHeight;
if (elBottom >= top && elTop <= top + target.outerHeight()) {
window.scrollTo(0, top - elHeight - this$1.target - this$1.offset);
}
});
}
}
},
update: [
{
write: function write() {
var this$1 = this;
var outerHeight = this.$el.outerHeight(), isActive = this.isActive(), el;
this.placeholder
.css('height', this.$el.css('position') !== 'absolute' ? outerHeight : '')
.css(this.$el.css(['marginTop', 'marginBottom', 'marginLeft', 'marginRight']));
this.topOffset = (isActive ? this.placeholder.offset() : this.$el.offset()).top;
this.bottomOffset = this.topOffset + outerHeight;
['top', 'bottom'].forEach(function (prop) {
this$1[prop] = this$1[(prop + "Prop")];
if (!this$1[prop]) {
return;
}
if ($.isNumeric(this$1[prop])) {
this$1[prop] = this$1[(prop + "Offset")] + parseFloat(this$1[prop]);
} else {
if (isString(this$1[prop]) && this$1[prop].match(/^-?\d+vh$/)) {
this$1[prop] = window.innerHeight * parseFloat(this$1[prop]) / 100;
} else {
el = this$1[prop] === true ? this$1.$el.parent() : query(this$1[prop], this$1.$el);
if (el) {
this$1[prop] = el.offset().top + el.outerHeight();
}
}
}
});
this.top = Math.max(parseFloat(this.top), this.topOffset) - this.offset;
this.bottom = this.bottom && this.bottom - outerHeight;
this.inactive = this.media && !window.matchMedia(this.media).matches;
if (isActive) {
this.update();
}
},
events: ['load', 'resize', 'orientationchange']
},
{
write: function write(ref) {
var this$1 = this;
if ( ref === void 0 ) ref = {};
var dir = ref.dir;
var isActive = this.isActive(), scroll = win.scrollTop();
if (scroll < 0 || !this.$el.is(':visible') || this.disabled) {
return;
}
if (this.inactive
|| scroll < this.top
|| this.showOnUp && (dir !== 'up' || dir === 'up' && !isActive && scroll <= this.bottomOffset)
) {
if (!isActive) {
return;
}
isActive = false;
if (this.animation && this.bottomOffset < this.$el.offset().top) {
Animation.cancel(this.$el).then(function () { return Animation.out(this$1.$el, this$1.animation).then(function () { return this$1.hide(); }); });
} else {
this.hide();
}
} else if (isActive) {
this.update();
} else if (this.animation) {
Animation.cancel(this.$el).then(function () {
this$1.show();
Animation.in(this$1.$el, this$1.animation);
});
} else {
this.show();
}
},
events: ['scroll']
} ],
methods: {
show: function show() {
this.update();
this.$el
.addClass(this.clsActive)
.removeClass(this.clsInactive)
.trigger('active');
},
hide: function hide() {
this.$el
.addClass(this.clsInactive)
.removeClass(this.clsActive)
.css({position: '', top: '', width: ''})
.trigger('inactive');
this.placeholder.attr('hidden', true);
},
update: function update() {
var top = Math.max(0, this.offset), scroll = win.scrollTop();
this.placeholder.attr('hidden', false);
if (this.bottom && scroll > this.bottom - this.offset) {
top = this.bottom - scroll;
}
this.$el.css({
position: 'fixed',
top: top + 'px',
width: this._widthElement[0].getBoundingClientRect().width
});
},
isActive: function isActive() {
return this.$el.hasClass(this.clsActive) && !(this.animation && this.$el.hasClass('uk-animation-leave'));
}
},
disconnected: function disconnected() {
this.placeholder.remove();
this.placeholder = null;
this._widthElement = null;
}
});
}
var storage = window.sessionStorage || {};
var svgs = {};
function Svg (UIkit) {
UIkit.component('svg', {
props: {
id: String,
icon: String,
src: String,
class: String,
style: String,
width: Number,
height: Number,
ratio: Number
},
defaults: {
ratio: 1,
id: false,
class: '',
exclude: ['src']
},
connected: function connected() {
this.svg = $__default.Deferred();
},
update: {
read: function read() {
var this$1 = this;
if (!this.src) {
this.src = getSrc(this.$el);
}
if (!this.src || this.isSet) {
return;
}
this.isSet = true;
if (!this.icon && ~this.src.indexOf('#')) {
var parts = this.src.split('#');
if (parts.length > 1) {
this.src = parts[0];
this.icon = parts[1];
}
}
this.get(this.src).then(function (svg) { return fastdom.mutate(function () {
var el;
el = !this$1.icon
? svg.clone()
: (el = toJQuery(("#" + (this$1.icon)), svg))
&& toJQuery((el[0].outerHTML || $__default('<div>').append(el.clone()).html()).replace(/symbol/g, 'svg')) // IE workaround, el[0].outerHTML
|| !toJQuery('symbol', svg) && svg.clone(); // fallback if SVG has no symbols
if (!el || !el.length) {
return $__default.Deferred().reject('SVG not found.');
}
var dimensions = el[0].getAttribute('viewBox'); // jQuery workaround, el.attr('viewBox')
if (dimensions) {
dimensions = dimensions.split(' ');
this$1.width = this$1.width || dimensions[2];
this$1.height = this$1.height || dimensions[3];
}
this$1.width *= this$1.ratio;
this$1.height *= this$1.ratio;
for (var prop in this$1.$options.props) {
if (this$1[prop] && !~this$1.exclude.indexOf(prop)) {
el.attr(prop, this$1[prop]);
}
}
if (!this$1.id) {
el.removeAttr('id');
}
if (this$1.width && !this$1.height) {
el.removeAttr('height');
}
if (this$1.height && !this$1.width) {
el.removeAttr('width');
}
if (isVoidElement(this$1.$el) || this$1.$el[0].tagName === 'CANVAS') {
this$1.$el.attr({hidden: true, id: null});
el.insertAfter(this$1.$el);
} else {
el.appendTo(this$1.$el);
}
this$1.svg.resolve(el);
}); }
);
},
events: ['load']
},
methods: {
get: function get(src) {
if (svgs[src]) {
return svgs[src];
}
svgs[src] = $__default.Deferred();
if (src.lastIndexOf('data:', 0) === 0) {
svgs[src].resolve(getSvg(decodeURIComponent(src.split(',')[1])));
} else {
var key = "uikit_" + (UIkit.version) + "_" + src;
if (storage[key]) {
svgs[src].resolve(getSvg(storage[key]));
} else {
$__default.get(src).then(function (doc, status, res) {
storage[key] = res.responseText;
svgs[src].resolve(getSvg(storage[key]));
});
}
}
return svgs[src];
function getSvg (doc) {
return $__default(doc).filter('svg');
}
}
},
destroy: function destroy() {
if (isVoidElement(this.$el)) {
this.$el.attr({hidden: null, id: this.id || null});
}
if (this.svg) {
this.svg.then(function (svg) { return svg.remove(); });
}
}
});
function getSrc(el) {
var image = getBackgroundImage(el);
if (!image) {
el = el.clone().empty()
.attr({'uk-no-boot': '', style: ((el.attr('style')) + ";display:block !important;")})
.appendTo(document.body);
image = getBackgroundImage(el);
// safari workaround
if (!image && el[0].tagName === 'CANVAS') {
var span = $__default(el[0].outerHTML.replace(/canvas/g, 'span')).insertAfter(el);
image = getBackgroundImage(span);
span.remove();
}
el.remove();
}
return image && image.slice(4, -1).replace(/"/g, '');
}
function getBackgroundImage(el) {
var image = getStyle(el[0], 'backgroundImage', '::before');
return image !== 'none' && image;
}
}
function Switcher (UIkit) {
UIkit.component('switcher', {
mixins: [Toggable],
args: 'connect',
props: {
connect: 'jQuery',
toggle: String,
active: Number,
swiping: Boolean
},
defaults: {
connect: false,
toggle: ' > *',
active: 0,
swiping: true,
cls: 'uk-active',
clsContainer: 'uk-switcher',
attrItem: 'uk-switcher-item',
queued: true
},
ready: function ready() {
var this$1 = this;
this.$el.on('click', ((this.toggle) + ":not(.uk-disabled)"), function (e) {
e.preventDefault();
this$1.show(e.currentTarget);
});
},
update: function update() {
var this$1 = this;
this.toggles = $__default(this.toggle, this.$el);
this.connects = this.connect || $__default(this.$el.next(("." + (this.clsContainer))));
this.connects.off('click', ("[" + (this.attrItem) + "]")).on('click', ("[" + (this.attrItem) + "]"), function (e) {
e.preventDefault();
this$1.show($__default(e.currentTarget).attr(this$1.attrItem));
});
if (this.swiping) {
this.connects.off('swipeRight swipeLeft').on('swipeRight swipeLeft', function (e) {
e.preventDefault();
if (!window.getSelection().toString()) {
this$1.show(e.type == 'swipeLeft' ? 'next' : 'previous');
}
});
}
this.updateAria(this.connects.children());
this.show(toJQuery(this.toggles.filter(("." + (this.cls) + ":first"))) || toJQuery(this.toggles.eq(this.active)) || this.toggles.first());
},
methods: {
show: function show(item) {
var this$1 = this;
if (!this.toggles) {
this.$emitSync();
}
var length = this.toggles.length,
prev = this.connects.children(("." + (this.cls))).index(),
hasPrev = prev >= 0,
index = getIndex(item, this.toggles, prev),
dir = item === 'previous' ? -1 : 1,
toggle;
for (var i = 0; i < length; i++, index = (index + dir + length) % length) {
if (!this$1.toggles.eq(index).is('.uk-disabled, [disabled]')) {
toggle = this$1.toggles.eq(index);
break;
}
}
if (!toggle || prev >= 0 && toggle.hasClass(this.cls) || prev === index) {
return;
}
this.toggles.removeClass(this.cls).attr('aria-expanded', false);
toggle.addClass(this.cls).attr('aria-expanded', true);
if (!hasPrev) {
this.toggleNow(this.connects.children((":nth-child(" + (index + 1) + ")")));
} else {
this.toggleElement(this.connects.children((":nth-child(" + (prev + 1) + "),:nth-child(" + (index + 1) + ")")));
}
}
}
});
}
function Tab (UIkit) {
UIkit.component('tab', UIkit.components.switcher.extend({
mixins: [Class],
name: 'tab',
defaults: {
media: 960,
attrItem: 'uk-tab-item'
},
init: function init() {
var cls = this.$el.hasClass('uk-tab-left') && 'uk-tab-left' || this.$el.hasClass('uk-tab-right') && 'uk-tab-right';
if (cls) {
UIkit.toggle(this.$el, {cls: cls, mode: 'media', media: this.media});
}
}
}));
}
function Toggle (UIkit) {
UIkit.component('toggle', {
mixins: [UIkit.mixin.toggable],
args: 'target',
props: {
href: 'jQuery',
target: 'jQuery',
mode: String,
media: 'media'
},
defaults: {
href: false,
target: false,
mode: 'click',
queued: true,
media: false
},
ready: function ready() {
var this$1 = this;
this.target = this.target || this.href || this.$el;
this.mode = hasTouch && this.mode == 'hover' ? 'click' : this.mode;
if (this.mode === 'media') {
return;
}
if (this.mode === 'hover') {
this.$el.on({
mouseenter: function () { return this$1.toggle('toggleShow'); },
mouseleave: function () { return this$1.toggle('toggleHide'); }
});
}
this.$el.on('click', function (e) {
// TODO better isToggled handling
if ($__default(e.target).closest('a[href="#"], button').length || $__default(e.target).closest('a[href]') && (this$1.cls || !this$1.target.is(':visible'))) {
e.preventDefault();
}
this$1.toggle();
});
},
update: {
write: function write() {
if (this.mode !== 'media' || !this.media) {
return;
}
var toggled = this.isToggled(this.target);
if (window.matchMedia(this.media).matches ? !toggled : toggled) {
this.toggle();
}
},
events: ['load', 'resize', 'orientationchange']
},
methods: {
toggle: function toggle(type) {
var event = $__default.Event(type || 'toggle');
this.target.triggerHandler(event, [this]);
if (!event.isDefaultPrevented()) {
this.toggleElement(this.target);
}
}
}
});
}
function core (UIkit) {
var scroll = null, dir, ticking, resizing;
win
.on('load', UIkit.update)
.on('resize orientationchange', function (e) {
if (!resizing) {
requestAnimationFrame(function () {
UIkit.update(e);
resizing = false;
});
resizing = true;
}
})
.on('scroll', function (e) {
if (scroll === null) {
scroll = 0;
}
dir = scroll < window.pageYOffset;
scroll = window.pageYOffset;
if (!ticking) {
requestAnimationFrame(function () {
e.dir = dir ? 'down' : 'up';
UIkit.update(e);
ticking = false;
});
ticking = true;
}
});
var started = 0;
on(document, 'animationstart', function (ref) {
var target = ref.target;
fastdom.measure(function () {
if (hasAnimation(target)) {
fastdom.mutate(function () {
document.body.style.overflowX = 'hidden';
started++;
});
}
});
}, true);
on(document, 'animationend', function (ref) {
var target = ref.target;
fastdom.measure(function () {
if (hasAnimation(target) && !--started) {
fastdom.mutate(function () { return document.body.style.overflowX = ''; })
}
});
}, true);
on(document.documentElement, 'webkitAnimationEnd', function (ref) {
var target = ref.target;
fastdom.measure(function () {
if (getStyle(target, 'webkitFontSmoothing') === 'antialiased') {
fastdom.mutate(function () {
target.style.webkitFontSmoothing = 'subpixel-antialiased';
setTimeout(function () { return target.style.webkitFontSmoothing = ''; });
})
}
});
}, true);
// core components
UIkit.use(Accordion);
UIkit.use(Alert);
UIkit.use(Cover);
UIkit.use(Drop);
UIkit.use(Dropdown);
UIkit.use(FormCustom);
UIkit.use(HeightMatch);
UIkit.use(HeightViewport);
UIkit.use(Hover);
UIkit.use(Margin);
UIkit.use(Gif);
UIkit.use(Grid);
UIkit.use(Modal$1);
UIkit.use(Nav);
UIkit.use(Navbar);
UIkit.use(Offcanvas);
UIkit.use(Responsive);
UIkit.use(Scroll);
UIkit.use(Scrollspy);
UIkit.use(ScrollspyNav);
UIkit.use(Sticky);
UIkit.use(Svg);
UIkit.use(Icon);
UIkit.use(Spinner);
UIkit.use(Switcher);
UIkit.use(Tab);
UIkit.use(Toggle);
function hasAnimation(target) {
return (getStyle(target, 'animationName') || '').lastIndexOf('uk-', 0) === 0;
}
}
function boot (UIkit) {
if (Observer) {
if (document.body) {
init();
} else {
(new Observer(function () {
if (document.body) {
this.disconnect();
init();
}
})).observe(document.documentElement, {childList: true, subtree: true});
}
} else {
ready(function () {
apply(document.body, UIkit.connect);
on(document.documentElement, 'DOMNodeInserted', function (e) { return apply(e.target, UIkit.connect); });
on(document.documentElement, 'DOMNodeRemoved', function (e) { return apply(e.target, UIkit.disconnect); });
});
}
function init() {
apply(document.body, UIkit.connect);
(new Observer(function (mutations) { return mutations.forEach(function (mutation) {
for (var i = 0; i < mutation.addedNodes.length; i++) {
apply(mutation.addedNodes[i], UIkit.connect)
}
for (i = 0; i < mutation.removedNodes.length; i++) {
apply(mutation.removedNodes[i], UIkit.disconnect)
}
UIkit.update('update', mutation.target, true);
}); }
)).observe(document.documentElement, {childList: true, subtree: true, characterData: true, attributes: true, attributeFilter: ['href']});
}
function apply(node, fn) {
if (node.nodeType !== Node.ELEMENT_NODE || node.hasAttribute('uk-no-boot')) {
return;
}
fn(node);
node = node.firstChild;
while (node) {
var next = node.nextSibling;
apply(node, fn);
node = next;
}
}
}
UIkit$1.version = '3.0.0';
mixin$1(UIkit$1);
core(UIkit$1);
boot(UIkit$1);
if (typeof module !== 'undefined') {
module.exports = UIkit$1;
}
return UIkit$1;
})));/*! UIkit 3.0.0-beta.6 | http://www.getuikit.com | (c) 2014 - 2016 YOOtheme | MIT License */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('uikit')) :
typeof define === 'function' && define.amd ? define(['uikit'], factory) :
(factory(global.UIkit));
}(this, (function (uikit) { 'use strict';
var $ = uikit.util.$;
var doc = uikit.util.doc;
var extend = uikit.util.extend;
var Dimensions = uikit.util.Dimensions;
var getIndex = uikit.util.getIndex;
var Transition = uikit.util.Transition;
var active;
doc.on({
keydown: function (e) {
if (active) {
switch (e.keyCode) {
case 37:
active.show('previous');
break;
case 39:
active.show('next');
break;
}
}
}
});
UIkit.component('lightbox', {
name: 'lightbox',
props: {
toggle: String,
duration: Number,
inverse: Boolean
},
defaults: {
toggle: 'a',
duration: 400,
dark: false,
attrItem: 'uk-lightbox-item',
items: [],
index: 0
},
ready: function ready() {
var this$1 = this;
this.toggles = $(this.toggle, this.$el);
this.toggles.each(function (i, el) {
el = $(el);
this$1.items.push({
source: el.attr('href'),
title: el.attr('title'),
type: el.attr('type')
})
});
this.$el.on('click', ((this.toggle) + ":not(.uk-disabled)"), function (e) {
e.preventDefault();
this$1.show(this$1.toggles.index(e.currentTarget));
});
},
update: {
write: function write() {
var this$1 = this;
var item = this.getItem();
if (!this.modal || !item.content) {
return;
}
var panel = this.modal.panel,
dim = {width: panel.width(), height: panel.height()},
max = {
width: window.innerWidth - (panel.outerWidth(true) - dim.width),
height: window.innerHeight - (panel.outerHeight(true) - dim.height)
},
newDim = Dimensions.fit({width: item.width, height: item.height}, max);
Transition
.stop(panel)
.stop(this.modal.content);
if (this.modal.content) {
this.modal.content.remove();
}
this.modal.content = $(item.content).css('opacity', 0).appendTo(panel);
panel.css(dim);
Transition.start(panel, newDim, this.duration).then(function () {
Transition.start(this$1.modal.content, {opacity: 1}, 400).then(function () {
panel.find('[uk-transition-hide]').show();
panel.find('[uk-transition-show]').hide();
});
});
},
events: ['resize', 'orientationchange']
},
events: {
showitem: function showitem(e) {
var item = this.getItem();
if (item.content) {
this.$update();
e.stopImmediatePropagation();
}
}
},
methods: {
show: function show(index) {
var this$1 = this;
this.index = getIndex(index, this.items, this.index);
if (!this.modal) {
this.modal = UIkit.modal.dialog("\n <button class=\"uk-modal-close-outside\" uk-transition-hide type=\"button\" uk-close></button>\n <span class=\"uk-position-center\" uk-transition-show uk-icon=\"icon: trash\"></span>\n ", {center: true});
this.modal.$el.css('overflow', 'hidden').addClass('uk-modal-lightbox');
this.modal.panel.css({width: 200, height: 200});
this.modal.caption = $('<div class="uk-modal-caption" uk-transition-hide></div>').appendTo(this.modal.panel);
if (this.items.length > 1) {
$(("<div class=\"" + (this.dark ? 'uk-dark' : 'uk-light') + "\" uk-transition-hide>\n <a href=\"#\" class=\"uk-position-center-left\" uk-slidenav=\"previous\" uk-lightbox-item=\"previous\"></a>\n <a href=\"#\" class=\"uk-position-center-right\" uk-slidenav=\"next\" uk-lightbox-item=\"next\"></a>\n </div>\n ")).appendTo(this.modal.panel.addClass('uk-slidenav-position'));
}
this.modal.$el
.on('hide', this.hide)
.on('click', ("[" + (this.attrItem) + "]"), function (e) {
e.preventDefault();
this$1.show($(e.currentTarget).attr(this$1.attrItem));
}).on('swipeRight swipeLeft', function (e) {
e.preventDefault();
if (!window.getSelection().toString()) {
this$1.show(e.type == 'swipeLeft' ? 'next' : 'previous');
}
});
}
active = this;
this.modal.panel.find('[uk-transition-hide]').hide();
this.modal.panel.find('[uk-transition-show]').show();
this.modal.content && this.modal.content.remove();
this.modal.caption.text(this.getItem().title);
var event = $.Event('showitem');
this.$el.trigger(event);
if (!event.isImmediatePropagationStopped()) {
this.setError(this.getItem());
}
},
hide: function hide() {
var this$1 = this;
active = active && active !== this && active;
this.modal.hide().then(function () {
this$1.modal.$destroy(true);
this$1.modal = null;
});
},
getItem: function getItem() {
return this.items[this.index] || {source: '', title: '', type: ''};
},
setItem: function setItem(item, content, width, height) {
if ( width === void 0 ) width = 200;
if ( height === void 0 ) height = 200;
extend(item, {content: content, width: width, height: height});
this.$update();
},
setError: function setError(item) {
this.setItem(item, '<div class="uk-position-cover uk-flex uk-flex-middle uk-flex-center"><strong>Loading resource failed!</strong></div>', 400, 300);
}
}
});
UIkit.mixin({
events: {
showitem: function showitem(e) {
var this$1 = this;
var item = this.getItem();
if (item.type !== 'image' && item.source && !item.source.match(/\.(jp(e)?g|png|gif|svg)$/i)) {
return;
}
var img = new Image();
img.onerror = function () { return this$1.setError(item); };
img.onload = function () { return this$1.setItem(item, ("<img class=\"uk-responsive-width\" width=\"" + (img.width) + "\" height=\"" + (img.height) + "\" src =\"" + (item.source) + "\">"), img.width, img.height); };
img.src = item.source;
e.stopImmediatePropagation();
}
}
}, 'lightbox');
UIkit.mixin({
events: {
showitem: function showitem(e) {
var this$1 = this;
var item = this.getItem();
if (item.type !== 'video' && item.source && !item.source.match(/\.(mp4|webm|ogv)$/i)) {
return;
}
var vid = $('<video class="uk-responsive-width" controls></video>')
.on('loadedmetadata', function () { return this$1.setItem(item, vid.attr({width: vid[0].videoWidth, height: vid[0].videoHeight}), vid[0].videoWidth, vid[0].videoHeight); })
.attr('src', item.source);
e.stopImmediatePropagation();
}
}
}, 'lightbox');
UIkit.mixin({
events: {
showitem: function showitem(e) {
var this$1 = this;
var item = this.getItem(), matches;
if (!(matches = item.source.match(/\/\/.*?youtube\.[a-z]+\/watch\?v=([^&]+)&?(.*)/)) && !(item.source.match(/youtu\.be\/(.*)/))) {
return;
}
var id = matches[1],
img = new Image(),
lowres = false,
setIframe = function (width, height) { return this$1.setItem(item, ("<iframe src=\"//www.youtube.com/embed/" + id + "\" width=\"" + width + "\" height=\"" + height + "\" style=\"max-width:100%;box-sizing:border-box;\"></iframe>"), width, height); };
img.onerror = function () { return setIframe(640, 320); };
img.onload = function () {
//youtube default 404 thumb, fall back to lowres
if (img.width === 120 && img.height === 90) {
if (!lowres) {
lowres = true;
img.src = "//img.youtube.com/vi/" + id + "/0.jpg";
} else {
setIframe(640, 320);
}
} else {
setIframe(img.width, img.height);
}
};
img.src = "//img.youtube.com/vi/" + id + "/maxresdefault.jpg";
e.stopImmediatePropagation();
}
}
}, 'lightbox');
UIkit.mixin({
events: {
showitem: function showitem(e) {
var this$1 = this;
var item = this.getItem(), matches;
if (!(matches = item.source.match(/(\/\/.*?)vimeo\.[a-z]+\/([0-9]+).*?/))) {
return;
}
var id = matches[2],
setIframe = function (width, height) { return this$1.setItem(item, ("<iframe src=\"//player.vimeo.com/video/" + id + "\" width=\"" + width + "\" height=\"" + height + "\" style=\"max-width:100%;box-sizing:border-box;\"></iframe>"), width, height); };
$.ajax({type: 'GET', url: ("http://vimeo.com/api/oembed.json?url=" + (encodeURI(item.source))), jsonp: 'callback', dataType: 'jsonp'}).then(function (res) { return setIframe(res.width, res.height); });
e.stopImmediatePropagation();
}
}
}, 'lightbox');
})));/*! UIkit 3.0.0-beta.6 | http://www.getuikit.com | (c) 2014 - 2016 YOOtheme | MIT License */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('uikit')) :
typeof define === 'function' && define.amd ? define(['uikit'], factory) :
(factory(global.UIkit));
}(this, (function (uikit) { 'use strict';
var $ = uikit.util.$;
var Transition = uikit.util.Transition;
var containers = {};
UIkit.component('notification', {
functional: true,
args: ['message', 'status'],
defaults: {
message: '',
status: '',
timeout: 5000,
group: null,
pos: 'top-center',
onClose: null
},
created: function created() {
if (!containers[this.pos]) {
containers[this.pos] = $(("<div class=\"uk-notification uk-notification-" + (this.pos) + "\"></div>")).appendTo(uikit.container);
}
this.$mount($(
("<div class=\"uk-notification-message" + (this.status ? (" uk-notification-message-" + (this.status)) : '') + "\">\n <a href=\"#\" class=\"uk-notification-close\" data-uk-close></a>\n <div>" + (this.message) + "</div>\n </div>")
).appendTo(containers[this.pos].show()));
},
ready: function ready() {
var this$1 = this;
var marginBottom = parseInt(this.$el.css('margin-bottom'), 10);
Transition.start(
this.$el.css({opacity: 0, marginTop: -1 * this.$el.outerHeight(), marginBottom: 0}),
{opacity: 1, marginTop: 0, marginBottom: marginBottom}
).then(function () {
if (this$1.timeout) {
this$1.timer = setTimeout(this$1.close, this$1.timeout);
this$1.$el
.on('mouseenter', function () { return clearTimeout(this$1.timer); })
.on('mouseleave', function () { return this$1.timer = setTimeout(this$1.close, this$1.timeout); });
}
});
},
events: {
click: function click(e) {
e.preventDefault();
this.close();
}
},
methods: {
close: function close(immediate) {
var this$1 = this;
var remove = function () {
this$1.onClose && this$1.onClose();
this$1.$el.trigger('close', [this$1]).remove();
if (!containers[this$1.pos].children().length) {
containers[this$1.pos].hide();
}
};
if (this.timer) {
clearTimeout(this.timer);
}
if (immediate) {
remove();
} else {
Transition.start(this.$el, {opacity: 0, marginTop: -1 * this.$el.outerHeight(), marginBottom: 0}).then(remove)
}
}
}
});
UIkit.notification.closeAll = function (group, immediate) {
var notification;
UIkit.elements.forEach(function (el) {
if ((notification = UIkit.getComponent(el, 'notification')) && (!group || group === notification.group)) {
notification.close(immediate);
}
});
};
})));/*! UIkit 3.0.0-beta.6 | http://www.getuikit.com | (c) 2014 - 2016 YOOtheme | MIT License */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('uikit')) :
typeof define === 'function' && define.amd ? define(['uikit'], factory) :
(factory(global.UIkit));
}(this, (function (uikit) { 'use strict';
var $ = uikit.util.$;
var doc = uikit.util.docElement;
var extend = uikit.util.extend;
var isWithin = uikit.util.isWithin;
var Observer = uikit.util.Observer;
var on = uikit.util.on;
var off = uikit.util.off;
var pointerDown = uikit.util.pointerDown;
var pointerMove = uikit.util.pointerMove;
var pointerUp = uikit.util.pointerUp;
var win = uikit.util.win;
UIkit.component('sortable', {
mixins: [uikit.mixin.class],
props: {
group: String,
animation: Number,
threshold: Number,
clsItem: String,
clsPlaceholder: String,
clsDrag: String,
clsDragState: String,
clsBase: String,
clsNoDrag: String,
clsEmpty: String,
clsCustom: String,
handle: String
},
defaults: {
group: false,
animation: 150,
threshold: 5,
clsItem: 'uk-sortable-item',
clsPlaceholder: 'uk-sortable-placeholder',
clsDrag: 'uk-sortable-drag',
clsDragState: 'uk-drag',
clsBase: 'uk-sortable',
clsNoDrag: 'uk-sortable-nodrag',
clsEmpty: 'uk-sortable-empty',
clsCustom: '',
handle: false
},
init: function init() {
var this$1 = this;
['init', 'start', 'move', 'end'].forEach(function (key) {
var fn = this$1[key];
this$1[key] = function (e) {
e = e.originalEvent || e;
this$1.scrollY = window.scrollY;
var ref = e.touches && e.touches[0] || e;
var pageX = ref.pageX;
var pageY = ref.pageY;
this$1.pos = {x: pageX, y: pageY};
fn(e);
}
});
},
connected: function connected() {
var this$1 = this;
on(this.$el, pointerDown, this.init);
if (this.clsEmpty) {
var empty = function () { return this$1.$el.toggleClass(this$1.clsEmpty, !this$1.$el.children().length); };
(this._observer = new Observer(empty)).observe(this.$el[0], {childList: true});
empty();
}
},
update: {
write: function write() {
var this$1 = this;
if (!this.drag) {
return;
}
this.drag.offset({top: this.pos.y + this.origin.top, left: this.pos.x + this.origin.left});
var top = this.drag.offset().top, bottom = top + this.drag[0].offsetHeight;
if (top > 0 && top < this.scrollY) {
setTimeout(function () { return win.scrollTop(this$1.scrollY - 5); }, 5);
} else if (bottom < doc[0].offsetHeight && bottom > window.innerHeight + this.scrollY) {
setTimeout(function () { return win.scrollTop(this$1.scrollY + 5); }, 5);
}
}
},
methods: {
init: function init(e) {
var target = $(e.target), placeholder = this.$el.children().filter(function (i, el) { return isWithin(e.target, el); });
if (!placeholder.length
|| target.is(':input')
|| this.handle && !isWithin(target, this.handle)
|| e.button && e.button !== 0
|| isWithin(target, ("." + (this.clsNoDrag)))
) {
return;
}
e.preventDefault();
e.stopPropagation();
this.touched = [this];
this.placeholder = placeholder;
this.origin = extend({target: target, index: this.placeholder.index()}, this.pos);
doc.on(pointerMove, this.move);
doc.on(pointerUp, this.end);
win.on('scroll', this.scroll);
if (!this.threshold) {
this.start(e);
}
},
start: function start(e) {
this.drag = $(this.placeholder[0].outerHTML.replace(/^<li/i, '<div').replace(/li>$/i, 'div>'))
.attr('uk-no-boot', '')
.addClass(((this.clsDrag) + " " + (this.clsCustom)))
.css({
boxSizing: 'border-box',
width: this.placeholder.outerWidth(),
height: this.placeholder.outerHeight()
})
.css(this.placeholder.css(['paddingLeft', 'paddingRight', 'paddingTop', 'paddingBottom']))
.appendTo(uikit.container);
this.drag.children().first().height(this.placeholder.children().height());
var ref = this.placeholder.offset();
var left = ref.left;
var top = ref.top;
extend(this.origin, {left: left - this.pos.x, top: top - this.pos.y});
this.placeholder.addClass(this.clsPlaceholder);
this.$el.children().addClass(this.clsItem);
doc.addClass(this.clsDragState);
this.$el.trigger('start', [this, this.placeholder, this.drag]);
this.move(e);
},
move: function move(e) {
if (!this.drag) {
if (Math.abs(this.pos.x - this.origin.x) > this.threshold || Math.abs(this.pos.y - this.origin.y) > this.threshold) {
this.start(e);
}
return;
}
this.$emit();
var target = e.type === 'mousemove' ? e.target : document.elementFromPoint(this.pos.x - document.body.scrollLeft, this.pos.y - document.body.scrollTop),
sortable = getSortable(target),
previous = getSortable(this.placeholder[0]),
move = sortable !== previous;
if (!sortable || isWithin(target, this.placeholder) || move && (!sortable.group || sortable.group !== previous.group)) {
return;
}
target = sortable.$el.is(target.parentNode) && $(target) || sortable.$el.children().has(target);
if (move) {
previous.remove(this.placeholder);
} else if (!target.length) {
return;
}
sortable.insert(this.placeholder, target);
if (!~this.touched.indexOf(sortable)) {
this.touched.push(sortable);
}
},
scroll: function scroll() {
var scroll = window.scrollY;
if (scroll !== this.scrollY) {
this.pos.y += scroll - this.scrollY;
this.scrollY = scroll;
this.$emit();
}
},
end: function end(e) {
doc.off(pointerMove, this.move);
doc.off(pointerUp, this.end);
win.off('scroll', this.scroll);
if (!this.drag) {
if (e.type !== 'mouseup' && isWithin(e.target, 'a[href]')) {
location.href = $(e.target).closest('a[href]').attr('href');
}
return;
}
preventClick();
var sortable = getSortable(this.placeholder[0]);
if (this === sortable) {
if (this.origin.index !== this.placeholder.index()) {
this.$el.trigger('change', [this, this.placeholder, 'moved']);
}
} else {
sortable.$el.trigger('change', [sortable, this.placeholder, 'added']);
this.$el.trigger('change', [this, this.placeholder, 'removed']);
}
this.$el.trigger('stop', [this]);
this.drag.remove();
this.drag = null;
this.touched.forEach(function (sortable) { return sortable.$el.children().removeClass(((sortable.clsPlaceholder) + " " + (sortable.clsItem))); });
doc.removeClass(this.clsDragState);
},
insert: function insert(element, target) {
var this$1 = this;
this.$el.children().addClass(this.clsItem);
var insert = function () {
if (target.length) {
if (!this$1.$el.has(element).length || element.prevAll().filter(target).length) {
element.insertBefore(target);
} else {
element.insertAfter(target);
}
} else {
this$1.$el.append(element);
}
};
if (this.animation) {
this.animate(insert);
} else {
insert();
}
},
remove: function remove(element) {
if (!this.$el.has(element).length) {
return;
}
if (this.animation) {
this.animate(function () { return element.detach(); });
} else {
element.detach();
}
},
animate: function animate(action) {
var this$1 = this;
var props = [],
children = this.$el.children().toArray().map(function (el) {
el = $(el);
props.push(extend({
position: 'absolute',
pointerEvents: 'none',
width: el.outerWidth(),
height: el.outerHeight()
}, el.position()));
return el;
}),
reset = {position: '', width: '', height: '', pointerEvents: '', top: '', left: ''};
action();
children.forEach(function (el) { return el.stop(); });
this.$el.children().css(reset);
this.$updateSync('update', true);
this.$el.css('min-height', this.$el.height());
var positions = children.map(function (el) { return el.position(); });
$.when.apply($, children.map(function (el, i) { return el.css(props[i]).animate(positions[i], this$1.animation).promise(); }))
.then(function () {
this$1.$el.css('min-height', '').children().css(reset);
this$1.$updateSync('update', true);
});
}
},
disconnected: function disconnected() {
off(this.$el, pointerDown, this.init);
if (this._observer) {
this._observer.disconnect()
}
}
});
function getSortable(element) {
return UIkit.getComponent(element, 'sortable') || element.parentNode && getSortable(element.parentNode);
}
function preventClick() {
var timer = setTimeout(function () { return doc.trigger('click'); }, 0),
listener = function (e) {
e.preventDefault();
e.stopPropagation();
clearTimeout(timer);
off(doc, 'click', listener, true);
};
on(doc, 'click', listener, true);
}
})));/*! UIkit 3.0.0-beta.6 | http://www.getuikit.com | (c) 2014 - 2016 YOOtheme | MIT License */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('uikit')) :
typeof define === 'function' && define.amd ? define(['uikit'], factory) :
(factory(global.UIkit));
}(this, (function (uikit) { 'use strict';
var $ = uikit.util.$;
var flipPosition = uikit.util.flipPosition;
UIkit.component('tooltip', {
mixins: [uikit.mixin.toggable, uikit.mixin.position],
props: {
delay: Number
},
defaults: {
pos: 'top',
delay: 0,
animation: 'uk-animation-scale-up',
duration: 100,
cls: 'uk-active',
clsPos: 'uk-tooltip'
},
ready: function ready() {
this.content = this.$el.attr('title');
this.$el
.removeAttr('title')
.attr('aria-expanded', false);
},
methods: {
show: function show() {
var this$1 = this;
clearTimeout(this.showTimer);
if (this.$el.attr('aria-expanded') === 'true') {
return;
}
this.tooltip = $(("<div class=\"" + (this.clsPos) + "\" aria-hidden=\"true\"><div class=\"" + (this.clsPos) + "-inner\">" + (this.content) + "</div></div>")).appendTo(uikit.container);
this.$el.attr('aria-expanded', true);
this.positionAt(this.tooltip, this.$el);
this.origin = this.getAxis() === 'y' ? ((flipPosition(this.dir)) + "-" + (this.align)) : ((this.align) + "-" + (flipPosition(this.dir)));
this.showTimer = setTimeout(function () {
this$1.toggleElement(this$1.tooltip, true);
this$1.hideTimer = setInterval(function () {
if (!this$1.$el.is(':visible')) {
this$1.hide();
}
}, 150);
}, this.delay);
},
hide: function hide() {
if (this.$el.is('input') && this.$el[0] === document.activeElement) {
return;
}
clearTimeout(this.showTimer);
clearInterval(this.hideTimer);
this.$el.attr('aria-expanded', false);
this.toggleElement(this.tooltip, false);
this.tooltip && this.tooltip.remove();
this.tooltip = false;
}
},
events: {
'focus mouseenter': 'show',
'blur mouseleave': 'hide'
}
});
})));/*! UIkit 3.0.0-beta.6 | http://www.getuikit.com | (c) 2014 - 2016 YOOtheme | MIT License */
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory(require('uikit')) :
typeof define === 'function' && define.amd ? define(['uikit'], factory) :
(factory(global.UIkit));
}(this, (function (uikit) { 'use strict';
var $ = uikit.util.$;
var ajax = uikit.util.ajax;
var on = uikit.util.on;
UIkit.component('upload', {
props: {
allow: String,
clsDragover: String,
concurrent: Number,
dataType: String,
mime: String,
msgInvalidMime: String,
msgInvalidName: String,
multiple: Boolean,
name: String,
params: Object,
type: String,
url: String
},
defaults: {
allow: false,
clsDragover: 'uk-dragover',
concurrent: 1,
dataType: undefined,
mime: false,
msgInvalidMime: 'Invalid File Type: %s',
msgInvalidName: 'Invalid File Name: %s',
multiple: false,
name: 'files[]',
params: {},
type: 'POST',
url: '',
abort: null,
beforeAll: null,
beforeSend: null,
complete: null,
completeAll: null,
error: null,
fail: function fail(msg) {
alert(msg);
},
load: null,
loadEnd: null,
loadStart: null,
progress: null
},
events: {
change: function change(e) {
if (!$(e.target).is('input[type="file"]')) {
return;
}
e.preventDefault();
if (e.target.files) {
this.upload(e.target.files);
}
e.target.value = '';
},
drop: function drop(e) {
e.preventDefault();
e.stopPropagation();
var transfer = e.originalEvent.dataTransfer;
if (!transfer || !transfer.files) {
return;
}
this.$el.removeClass(this.clsDragover);
this.upload(transfer.files);
},
dragenter: function dragenter(e) {
e.preventDefault();
e.stopPropagation();
},
dragover: function dragover(e) {
e.preventDefault();
e.stopPropagation();
this.$el.addClass(this.clsDragover);
},
dragleave: function dragleave(e) {
e.preventDefault();
e.stopPropagation();
this.$el.removeClass(this.clsDragover);
}
},
methods: {
upload: function upload(files) {
var this$1 = this;
if (!files.length) {
return;
}
this.$el.trigger('upload', [files]);
for (var i = 0; i < files.length; i++) {
if (this$1.allow) {
if (!match(this$1.allow, files[i].name)) {
this$1.fail(this$1.msgInvalidName.replace(/%s/, this$1.allow));
return;
}
}
if (this$1.mime) {
if (!match(this$1.mime, files[i].type)) {
this$1.fail(this$1.msgInvalidMime.replace(/%s/, this$1.mime));
return;
}
}
}
if (!this.multiple) {
files = [files[0]];
}
this.beforeAll && this.beforeAll(this, files);
var chunks = chunk(files, this.concurrent),
upload = function (files) {
var data = new FormData();
files.forEach(function (file) { return data.append(this$1.name, file); });
for (var key in this$1.params) {
data.append(key, this$1.params[key]);
}
ajax({
data: data,
url: this$1.url,
type: this$1.type,
dataType: this$1.dataType,
beforeSend: this$1.beforeSend,
complete: [this$1.complete, function (xhr, status) {
if (chunks.length) {
upload(chunks.shift());
} else {
this$1.completeAll && this$1.completeAll(xhr);
}
if (status === 'abort') {
this$1.abort && this$1.abort(xhr);
}
}],
cache: false,
contentType: false,
processData: false,
xhr: function () {
var xhr = $.ajaxSettings.xhr();
xhr.upload && this$1.progress && on(xhr.upload, 'progress', this$1.progress);
['loadStart', 'load', 'loadEnd', 'error', 'abort'].forEach(function (type) { return this$1[type] && on(xhr, type.toLowerCase(), this$1[type]); });
return xhr;
}
})
};
upload(chunks.shift());
}
}
});
function match(pattern, path) {
return path.match(new RegExp(("^" + (pattern.replace(/\//g, '\\/').replace(/\*\*/g, '(\\/[^\\/]+)*').replace(/\*/g, '[^\\/]+').replace(/((?!\\))\?/g, '$1.')) + "$"), 'i'));
}
function chunk(files, size) {
var chunks = [];
for (var i = 0; i < files.length; i += size) {
var chunk = [];
for (var j = 0; j < size; j++) {
chunk.push(files[i+j]);
}
chunks.push(chunk);
}
return chunks;
}
}))); | ahocevar/cdnjs | ajax/libs/uikit/3.0.0-beta.6/js/uikit.js | JavaScript | mit | 161,667 |
/*
* Copyright (c) 2007-2012 SlimDX Group
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "stdafx.h"
#include "Direct3D11Exception.h"
#include "EffectPass11.h"
#include "EffectTechnique11.h"
#include "EffectTechniqueDescription11.h"
#include "StateBlockMask11.h"
#include "EffectVariable11.h"
using namespace System;
using namespace System::Globalization;
namespace SlimDX
{
namespace Direct3D11
{
EffectTechnique::EffectTechnique( ID3DX11EffectTechnique* pointer )
{
m_Pointer = pointer;
}
EffectTechnique::EffectTechnique( IntPtr pointer )
{
m_Pointer = reinterpret_cast<ID3DX11EffectTechnique*>( pointer.ToPointer() );
}
EffectTechniqueDescription EffectTechnique::Description::get()
{
D3DX11_TECHNIQUE_DESC nativeDescription;
if (RECORD_D3D11( m_Pointer->GetDesc( &nativeDescription ) ).IsFailure)
return EffectTechniqueDescription();
return EffectTechniqueDescription( nativeDescription );
}
bool EffectTechnique::IsValid::get()
{
return m_Pointer->IsValid() ? true : false;
}
EffectVariable^ EffectTechnique::GetAnnotationByIndex( int index )
{
ID3DX11EffectVariable* variable = m_Pointer->GetAnnotationByIndex( index );
if( variable == 0 )
return nullptr;
return gcnew EffectVariable( variable );
}
EffectVariable^ EffectTechnique::GetAnnotationByName( String^ name )
{
array<unsigned char>^ nameBytes = System::Text::ASCIIEncoding::ASCII->GetBytes( name );
pin_ptr<unsigned char> pinnedName = &nameBytes[ 0 ];
ID3DX11EffectVariable* variable = m_Pointer->GetAnnotationByName( reinterpret_cast<LPCSTR>( pinnedName ) );
if( variable == 0 )
return nullptr;
return gcnew EffectVariable( variable );
}
EffectPass^ EffectTechnique::GetPassByIndex( int index )
{
ID3DX11EffectPass* pass = m_Pointer->GetPassByIndex( index );
if( pass == 0 )
return nullptr;
return gcnew EffectPass( pass );
}
EffectPass^ EffectTechnique::GetPassByName( String^ name )
{
array<unsigned char>^ nameBytes = System::Text::ASCIIEncoding::ASCII->GetBytes( name );
pin_ptr<unsigned char> pinnedName = &nameBytes[ 0 ];
ID3DX11EffectPass* pass = m_Pointer->GetPassByName( reinterpret_cast<LPCSTR>( pinnedName ) );
if( pass == 0 )
return nullptr;
return gcnew EffectPass( pass );
}
StateBlockMask^ EffectTechnique::ComputeStateBlockMask()
{
D3DX11_STATE_BLOCK_MASK mask;
HRESULT hr = m_Pointer->ComputeStateBlockMask(&mask);
if (RECORD_D3D11(hr).IsFailure)
return nullptr;
return gcnew StateBlockMask(mask);
}
}
}
| pandap/slimdx | source/direct3d11/EffectTechnique11.cpp | C++ | mit | 3,536 |
<?php
/*
* This file is part of Mustache.php.
*
* (c) 2012 Justin Hileman
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* @group unit
*/
class Mustache_Test_Logger_AbstractLoggerTest extends PHPUnit_Framework_TestCase
{
public function testEverything()
{
$logger = new Mustache_Test_Logger_TestLogger;
$logger->emergency('emergency message');
$logger->alert('alert message');
$logger->critical('critical message');
$logger->error('error message');
$logger->warning('warning message');
$logger->notice('notice message');
$logger->info('info message');
$logger->debug('debug message');
$expected = array(
array(Mustache_Logger::EMERGENCY, 'emergency message', array()),
array(Mustache_Logger::ALERT, 'alert message', array()),
array(Mustache_Logger::CRITICAL, 'critical message', array()),
array(Mustache_Logger::ERROR, 'error message', array()),
array(Mustache_Logger::WARNING, 'warning message', array()),
array(Mustache_Logger::NOTICE, 'notice message', array()),
array(Mustache_Logger::INFO, 'info message', array()),
array(Mustache_Logger::DEBUG, 'debug message', array()),
);
$this->assertEquals($expected, $logger->log);
}
}
class Mustache_Test_Logger_TestLogger extends Mustache_Logger_AbstractLogger
{
public $log = array();
/**
* Logs with an arbitrary level.
*
* @param mixed $level
* @param string $message
* @param array $context
*/
public function log($level, $message, array $context = array())
{
$this->log[] = array($level, $message, $context);
}
}
| spikeopaths/spikeopaths | wp-content/plugins/vimeography/vendor/mustache.php-master/test/Mustache/Test/Logger/AbstractLoggerTest.php | PHP | gpl-2.0 | 1,830 |
__author__ = 'bromix'
import unittest
"""
class TestCipher(unittest.TestCase):
def setUp(self):
pass
def test_load_javascript(self):
cipher = Cipher()
java_script = ''
with open ("html5player.js", "r") as java_script_file:
java_script = java_script_file.read()
pass
json_script = cipher._load_java_script(java_script)
jse = JsonScriptEngine(json_script)
signature = jse.execute('299D15DC85986F6D8B7BC0E5655F758E6F14B1E33.50BCBEAE15DA02F131DAA96B640C57AAABAB20E20E2')
pass
pass
""" | azumimuo/family-xbmc-addon | zips/plugin.video.youtube/resources/lib/test_youtube/test_cipher.py | Python | gpl-2.0 | 585 |
/*******************************************************************************
* Copyright (c) 2013 Lectorius, Inc.
* Authors:
* Vijay Pandurangan (vijayp@mitro.co)
* Evan Jones (ej@mitro.co)
* Adam Hilss (ahilss@mitro.co)
*
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* You can contact the authors at inbound@mitro.co.
*******************************************************************************/
package co.mitro.core.server.data;
import java.util.Date;
import com.j256.ormlite.field.DataType;
import com.j256.ormlite.field.DatabaseField;
import com.j256.ormlite.table.DatabaseTable;
@DatabaseTable(tableName="signups")
public class DBSignup {
public DBSignup() {
timestamp = new Date();
}
@DatabaseField(generatedId=true)
private int id;
@DatabaseField
private String email;
@DatabaseField
private String browser;
@DatabaseField(dataType=DataType.LONG_STRING)
private String userAgent;
@DatabaseField(dataType=DataType.LONG_STRING)
private String trackingData;
@DatabaseField(dataType=DataType.DATE_LONG)
private Date timestamp;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getBrowser() {
return browser;
}
public void setBrowser(String browser) {
this.browser = browser;
}
public String getUserAgent() {
return userAgent;
}
public void setUserAgent(String userAgent) {
this.userAgent = userAgent;
}
public String getTrackingData() {
return trackingData;
}
public void setTrackingData(String trackingData) {
this.trackingData = trackingData;
}
public Date getTimestamp() {
return timestamp;
}
public void setTimestamp(Date timestamp) {
this.timestamp = timestamp;
}
}
| imrehg/mitro | mitro-core/java/server/src/co/mitro/core/server/data/DBSignup.java | Java | gpl-3.0 | 2,541 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Dates on Sales Order',
'version': '1.1',
'category': 'Sales Management',
'description': """
Add additional date information to the sales order.
===================================================
You can add the following additional dates to a sales order:
------------------------------------------------------------
* Requested Date (will be used as the expected date on pickings)
* Commitment Date
* Effective Date
""",
'website': 'https://www.odoo.com/page/crm',
'depends': ['sale_stock'],
'data': ['sale_order_dates_view.xml'],
'demo': [],
'test': ['test/requested_date.yml'],
'installable': True,
'auto_install': False,
}
| minhphung171093/GreenERP_V9 | openerp/addons/sale_order_dates/__openerp__.py | Python | gpl-3.0 | 797 |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.actions.pagerduty;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.xpack.core.watcher.actions.Action;
import org.elasticsearch.xpack.core.watcher.actions.ExecutableAction;
import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext;
import org.elasticsearch.xpack.core.watcher.watch.Payload;
import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine;
import org.elasticsearch.xpack.watcher.notification.pagerduty.IncidentEvent;
import org.elasticsearch.xpack.watcher.notification.pagerduty.PagerDutyAccount;
import org.elasticsearch.xpack.watcher.notification.pagerduty.PagerDutyService;
import org.elasticsearch.xpack.watcher.notification.pagerduty.SentEvent;
import org.elasticsearch.xpack.watcher.support.Variables;
import java.util.Map;
public class ExecutablePagerDutyAction extends ExecutableAction<PagerDutyAction> {
private final TextTemplateEngine templateEngine;
private final PagerDutyService pagerDutyService;
public ExecutablePagerDutyAction(PagerDutyAction action, Logger logger, PagerDutyService pagerDutyService,
TextTemplateEngine templateEngine) {
super(action, logger);
this.pagerDutyService = pagerDutyService;
this.templateEngine = templateEngine;
}
@Override
public Action.Result execute(final String actionId, WatchExecutionContext ctx, Payload payload) throws Exception {
PagerDutyAccount account = pagerDutyService.getAccount(action.event.account);
if (account == null) {
// the account associated with this action was deleted
throw new IllegalStateException("account [" + action.event.account + "] was not found. perhaps it was deleted");
}
Map<String, Object> model = Variables.createCtxParamsMap(ctx, payload);
IncidentEvent event = action.event.render(ctx.watch().id(), actionId, templateEngine, model, account.getDefaults());
if (ctx.simulateAction(actionId)) {
return new PagerDutyAction.Result.Simulated(event);
}
SentEvent sentEvent = account.send(event, payload, ctx.id().watchId());
return new PagerDutyAction.Result.Executed(account.getName(), sentEvent);
}
}
| gfyoung/elasticsearch | x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/actions/pagerduty/ExecutablePagerDutyAction.java | Java | apache-2.0 | 2,531 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.sling.maven.projectsupport;
import java.io.IOException;
import java.io.OutputStreamWriter;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.sling.maven.projectsupport.bundlelist.v1_0_0.io.xpp3.BundleListXpp3Writer;
/**
* Output the bundle list back to the console.
*/
@Mojo( name = "output-bundle-list", requiresDependencyResolution = ResolutionScope.TEST)
public class OutputBundleListMojo extends AbstractUsingBundleListMojo {
@Override
protected void executeWithArtifacts() throws MojoExecutionException, MojoFailureException {
BundleListXpp3Writer writer = new BundleListXpp3Writer();
try {
writer.write(new OutputStreamWriter(System.out), getInitializedBundleList());
} catch (IOException e) {
throw new MojoExecutionException("Unable to write bundle list", e);
}
}
}
| Nimco/sling | tooling/maven/maven-launchpad-plugin/src/main/java/org/apache/sling/maven/projectsupport/OutputBundleListMojo.java | Java | apache-2.0 | 1,853 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.applicationhistoryservice.webapp;
import static org.apache.hadoop.yarn.util.StringHelper.join;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import org.apache.hadoop.yarn.server.webapp.AppBlock;
import org.apache.hadoop.yarn.server.webapp.WebPageUtils;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.YarnWebParams;
public class AppPage extends AHSView {
@Override
protected void preHead(Page.HTML<__> html) {
commonPreHead(html);
String appId = $(YarnWebParams.APPLICATION_ID);
set(
TITLE,
appId.isEmpty() ? "Bad request: missing application ID" : join(
"Application ", $(YarnWebParams.APPLICATION_ID)));
set(DATATABLES_ID, "attempts ResourceRequests");
set(initID(DATATABLES, "attempts"), WebPageUtils.attemptsTableInit());
setTableStyles(html, "attempts", ".queue {width:6em}", ".ui {width:8em}");
setTableStyles(html, "ResourceRequests");
set(YarnWebParams.WEB_UI_TYPE, YarnWebParams.APP_HISTORY_WEB_UI);
}
@Override
protected Class<? extends SubView> content() {
return AppBlock.class;
}
protected String getAttemptsTableColumnDefs() {
StringBuilder sb = new StringBuilder();
return sb.append("[\n").append("{'sType':'natural', 'aTargets': [0]")
.append(", 'mRender': parseHadoopID }")
.append("\n, {'sType':'numeric', 'aTargets': [1]")
.append(", 'mRender': renderHadoopDate }]").toString();
}
}
| steveloughran/hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-applicationhistoryservice/src/main/java/org/apache/hadoop/yarn/server/applicationhistoryservice/webapp/AppPage.java | Java | apache-2.0 | 2,454 |
export { InformationFilled32 as default } from "../../";
| markogresak/DefinitelyTyped | types/carbon__icons-react/es/information--filled/32.d.ts | TypeScript | mit | 57 |
<?php
define('lang_Select','Velg');
define('lang_Erase','Slett');
define('lang_Open','Åpne');
define('lang_Confirm_del','Er du sikker på at du vil slette denne filen?');
define('lang_All','Alle');
define('lang_Files','Filer');
define('lang_Images','Bilder');
define('lang_Archives','Arkiv');
define('lang_Error_Upload','Den opplastede filen overskrider maksimal tillatt størrelse.');
define('lang_Error_extension','Filtypen er ikke tillatt.');
define('lang_Upload_file','Last opp fil');
define('lang_Filters','Filter');
define('lang_Videos','Videoer');
define('lang_Music','Musikk');
define('lang_New_Folder','Ny mappe');
define('lang_Folder_Created','Mappe opprettet');
define('lang_Existing_Folder','Eksisterende mappe');
define('lang_Confirm_Folder_del','Er du sikker på at du vil slette mappen og alt innholdet?');
define('lang_Return_Files_List','Tilbake til filoversikten');
define('lang_Preview','Forhåndsvisning');
define('lang_Download','Last ned');
define('lang_Insert_Folder_Name','Gi mappen et navn:');
define('lang_Root','Rot');
define('lang_Rename','Gi nytt navn');
define('lang_Back','Tilbake');
define('lang_View','Visning');
define('lang_View_list','Listevisning');
define('lang_View_columns_list','Side ved side');
define('lang_View_boxes','Boksvisning');
define('lang_Toolbar','Verktøylinje');
define('lang_Actions','Gjøremål');
define('lang_Rename_existing_file','Filen er allerede opprettet');
define('lang_Rename_existing_folder','Mappen er allerede opprettet');
define('lang_Empty_name','Tomt navn');
define('lang_Text_filter','Tekst-filter');
define('lang_Swipe_help','Sveip filnavnet/mappenavnet for å vise alternativer');
define('lang_Upload_base','Vanlig opplasting');
define('lang_Upload_java','Java-opplasting (store filer)');
define('lang_Upload_java_help',"Hvis java-appleten ikke lastes: 1. Sjekk om Java er installert, hvis ikke <a href='http://java.com/en/download/'>last ned Java</a> 2. Sjekk brannmur-innstillingene.");
define('lang_Upload_base_help',"Dra og slipp filen(e) i området over eller klikk (virker for moderne nettlesere). Ved bruk av gammel nettleser: Velg filen og klikk på knappen. Når opplastingen er ferdig, klikk på tilbake-knappen øverst.");
define('lang_Type_dir','Mappe');
define('lang_Type','Type');
define('lang_Dimension','Dimensjoner');
define('lang_Size','Størrelse');
define('lang_Date','Dato');
define('lang_Filename','Filnavn');
define('lang_Operations','Handlinger');
define('lang_Date_type','d.m.y');
define('lang_OK','OK');
define('lang_Cancel','Avbryt');
define('lang_Sorting','Sortering');
define('lang_Show_url','Vis URL');
define('lang_Extract','Pakk ut her');
define('lang_File_info','Fil-info');
define('lang_Edit_image','Rediger bilde');
define('lang_Duplicate','Duplikat');
define('lang_Folders', 'Mapper');
define('lang_Copy','Kopier');
define('lang_Cut','Klipp ut');
define('lang_Paste','Lim inn');
define('lang_CB', 'Utklippstavle'); // clipboard
define('lang_Paste_Here','Lim inn i denne mappen');
define('lang_Paste_Confirm','Er du sikker på at du vil lime inn i denne mappen? Dette vil overskrive eventuelle eksisterende filer eller mapper.');
define('lang_Paste_Failed','Lim inn feilet');
define('lang_Clear_Clipboard','Tøm utklippstavlen');
define('lang_Clear_Clipboard_Confirm','Er du sikker på at du vil tømme utklippstavlen?');
define('lang_Files_ON_Clipboard','Der er filer på utklippstavlen.');
define('lang_Copy_Cut_Size_Limit','De valgte filene/mappene er for store for %s. Grense: %d MB/operasjon'); // %s = cut or copy
define('lang_Copy_Cut_Count_Limit','Du valgte for mange filer/mapper for %s. Grense: %d filer/operasjon'); // %s = cut or copy
define('lang_Copy_Cut_Not_Allowed','Du har ikke lov til å %s filer.'); // %s(1) = cut or copy, %s(2) = files or folders
define('lang_Aviary_No_Save', 'Kunne ikke lagre bildet');
define('lang_Zip_No_Extract', 'Kunne ikke pakke ut. Filen er muligens ødelagt.');
define('lang_Zip_Invalid', 'Dette filetternavnet er ikke støttet. Gyldige filer: zip, gz, tar.');
define('lang_Dir_No_Write', 'Mappen du valgte er ikke skrivbar.');
define('lang_Function_Disabled', 'Funksjonen %s er blitt deaktivert av serveren.'); // %s = cut or copy
define('lang_File_Permission', 'Filrettigheter');
define('lang_File_Permission_Not_Allowed', 'Forandring av %s rettigheter er ikke tillatt.'); // %s = files or folders
define('lang_File_Permission_Recursive', 'Utfør rekursivt?');
define('lang_File_Permission_Wrong_Mode', "Filrettigheten er feil.");
define('lang_User', 'Bruker');
define('lang_Group', 'Gruppe');
define('lang_Yes', 'Ja');
define('lang_No', 'Nei');
define('lang_Lang_Not_Found', 'Kunne ikke finne språk.');
define('lang_Lang_Change', 'Forandre språk');
define('lang_File_Not_Found', 'Fant ikke filen.');
define('lang_File_Open_Edit_Not_Allowed', 'Du har ikke tillatelse til å %s denne filen.'); // %s = open or edit
define('lang_Edit', 'Rediger');
define('lang_Edit_File', "Rediger filens innhold");
define('lang_File_Save_OK', "Filen ble lagret.");
define('lang_File_Save_Error', "Det oppstod en feil når filen ble lagret.");
define('lang_New_File','Ny fil');
define('lang_No_Extension','Du må legge til et fil-etternavn.');
define('lang_Valid_Extensions','Gyldige fil-etternavn: %s'); // %s = txt,log etc.
?> | gaschuf/landingdeposito | wpanel/filemanager/lang/nb_NO.php | PHP | mit | 5,274 |
export { Train20 as default } from "../../";
| georgemarshall/DefinitelyTyped | types/carbon__icons-react/es/train/20.d.ts | TypeScript | mit | 45 |
# -*- coding: utf-8 -*-
import os
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
# Register database schemes in URLs.
urlparse.uses_netloc.append('postgres')
urlparse.uses_netloc.append('postgresql')
urlparse.uses_netloc.append('pgsql')
urlparse.uses_netloc.append('postgis')
urlparse.uses_netloc.append('mysql')
urlparse.uses_netloc.append('mysql2')
urlparse.uses_netloc.append('mysqlgis')
urlparse.uses_netloc.append('spatialite')
urlparse.uses_netloc.append('sqlite')
DEFAULT_ENV = 'DATABASE_URL'
SCHEMES = {
'postgres': 'django.db.backends.postgresql_psycopg2',
'postgresql': 'django.db.backends.postgresql_psycopg2',
'pgsql': 'django.db.backends.postgresql_psycopg2',
'postgis': 'django.contrib.gis.db.backends.postgis',
'mysql': 'django.db.backends.mysql',
'mysql2': 'django.db.backends.mysql',
'mysqlgis': 'django.contrib.gis.db.backends.mysql',
'spatialite': 'django.contrib.gis.db.backends.spatialite',
'sqlite': 'django.db.backends.sqlite3',
}
def config(env=DEFAULT_ENV, default=None, engine=None):
"""Returns configured DATABASE dictionary from DATABASE_URL."""
config = {}
s = os.environ.get(env, default)
if s:
config = parse(s, engine)
return config
def parse(url, engine=None):
"""Parses a database URL."""
if url == 'sqlite://:memory:':
# this is a special case, because if we pass this URL into
# urlparse, urlparse will choke trying to interpret "memory"
# as a port number
return {
'ENGINE': SCHEMES['sqlite'],
'NAME': ':memory:'
}
# note: no other settings are required for sqlite
# otherwise parse the url as normal
config = {}
url = urlparse.urlparse(url)
# Remove query strings.
path = url.path[1:]
path = path.split('?', 2)[0]
# if we are using sqlite and we have no path, then assume we
# want an in-memory database (this is the behaviour of sqlalchemy)
if url.scheme == 'sqlite' and path == '':
path = ':memory:'
# Update with environment configuration.
config.update({
'NAME': path or '',
'USER': url.username or '',
'PASSWORD': url.password or '',
'HOST': url.hostname or '',
'PORT': url.port or '',
})
if engine:
config['ENGINE'] = engine
elif url.scheme in SCHEMES:
config['ENGINE'] = SCHEMES[url.scheme]
return config
| ramcn/demo3 | venv/lib/python3.4/site-packages/dj_database_url.py | Python | mit | 2,475 |
export { StarFilled32 as default } from "../../";
| georgemarshall/DefinitelyTyped | types/carbon__icons-react/es/star--filled/32.d.ts | TypeScript | mit | 50 |
export { SkipBackOutlineFilled24 as default } from "../../";
| markogresak/DefinitelyTyped | types/carbon__icons-react/es/skip--back--outline--filled/24.d.ts | TypeScript | mit | 61 |
<?php
/**
* @file
* Sample file for handling redirection from old to new URIs. Use an Apache
* rewrite rule (or equivalent) to map legacy requests to this file. To use,
* copy or symlink this file to the root of your drupal site. Customize this
* file to your needs.
*
* CREATE TABLE `migrate_source_uri_map` (
* `source_uri` varchar(255) NOT NULL DEFAULT '',
* `migration_name` varchar(255) NOT NULL,
* `source_id` int(11) NOT NULL, -- can be varchar for some migrations
* PRIMARY KEY (`source_uri`)
* )
*
*/
// For security, this script is disabled by default.
die('Comment out this line when you are ready to use this script');
// Based on custom patterns, build the destination_uri for given source_uri
function migrate_build_url($destid1, $migration_name) {
global $base_url;
// TODO: Add an entry for each migration that we need to redirect.
$patterns = variable_get('migrate_patterns', array(
'BeerTerm' => 'taxonomy/term/:source_id',
'BlogEntries' => 'node/:source_id',
'Slideshows' => 'node/:source_id',
'TagTerm' => 'taxonomy/term/:source_id',
));
$pattern = $patterns[$migration_name];
// Swap in the destination ID.
$destination_uri = str_replace(':source_id', $destid1, $pattern);
// For speed, we go right to aliases table rather than more bootstrapping.
if ($uri_clean = db_query("SELECT alias FROM {url_alias} WHERE source = :destination_uri", array(':destination_uri' => $destination_uri))->fetchField()) {
$destination_uri = $uri_clean;
}
// Build absolute url for 301 redirect.
return $base_url . '/' . $destination_uri;
}
define('DRUPAL_ROOT', getcwd());
require_once DRUPAL_ROOT . '/includes/bootstrap.inc';
// Only bootstrap to DB so we are as fast as possible. Much of the Drupal API
// is not available to us.
drupal_bootstrap(DRUPAL_BOOTSTRAP_DATABASE);
// You must populate this querystring param from a rewrite rule or $_SERVER
// On Apache, we could likely use _SERVER['REDIRECT_URL']. nginx?
if (!$source_uri = $_GET['migrate_source_uri']) {
print '$_GET[migrate_source_uri] was not found on the request.';
exit();
}
// This is a tall table mapping legacy URLs to source_id and migration_name.
// If you can already know the migration name and source_id based on the URI,
// then the first lookup is not needed.
$uri_table = variable_get('migrate_source_uri_table', 'migrate_source_uri_map');
if ($uri_map = db_query("SELECT migration_name, source_id FROM $uri_table WHERE source_uri = :source_uri", array(':source_uri' => $source_uri))->fetchObject()) {
// Hurray, we do recognize this URI.
// Consult migrate_map_x table to determine corresponding Drupal nid/tid/cid/etc.
$map_table = 'migrate_map_' . drupal_strtolower($uri_map->migration_name);
$sql = "SELECT destid1 FROM $map_table WHERE sourceid1 = :source_id";
if ($destid1 = $migrate_map = db_query($sql, array(':source_id' => $uri_map->source_id))->fetchField()) {
// Hurray. We already migrated this content. Go there.
header('Location: ' . migrate_build_url($destid1, $uri_map->migration_name), TRUE, 301);
}
else {
// We recognize URI but don't have the content in Drupal. Very unlikely.
}
}
else {
// Can't find the source URI. TODO: Make nice 404 page.
header('Status=Not Found', TRUE, 404);
print 'Sorry folks. Park is closed.';
}
| abrege/lerouergue | sites/all/modules/migrate/uri_map_redirect.php | PHP | gpl-2.0 | 3,339 |
<?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Quiz overview report upgrade script.
*
* @package quiz_overview
* @copyright 2008 Jamie Pratt
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
defined('MOODLE_INTERNAL') || die();
/**
* Quiz overview report upgrade function.
* @param number $oldversion
*/
function xmldb_quiz_overview_upgrade($oldversion) {
global $CFG, $DB;
$dbman = $DB->get_manager();
// Moodle v2.2.0 release upgrade line.
// Put any upgrade step following this.
// Moodle v2.3.0 release upgrade line
// Put any upgrade step following this
// Moodle v2.4.0 release upgrade line
// Put any upgrade step following this
// Moodle v2.5.0 release upgrade line.
// Put any upgrade step following this.
// Moodle v2.6.0 release upgrade line.
// Put any upgrade step following this.
// Moodle v2.7.0 release upgrade line.
// Put any upgrade step following this.
// Moodle v2.8.0 release upgrade line.
// Put any upgrade step following this.
// Moodle v2.9.0 release upgrade line.
// Put any upgrade step following this.
// Moodle v3.0.0 release upgrade line.
// Put any upgrade step following this.
return true;
}
| ernestovi/ups | moodle/mod/quiz/report/overview/db/upgrade.php | PHP | gpl-3.0 | 1,910 |
<?php
/**
* Zend Framework
*
* LICENSE
*
* This source file is subject to the new BSD license that is bundled
* with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* http://framework.zend.com/license/new-bsd
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@zend.com so we can send you a copy immediately.
*
* @category Zend
* @package Zend_Gdata
* @subpackage Media
* @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
* @version $Id: MediaDescription.php 20096 2010-01-06 02:05:09Z bkarwin $
*/
/**
* @see Zend_Gdata_App_Extension
*/
#require_once 'Zend/Gdata/App/Extension.php';
/**
* Represents the media:description element
*
* @category Zend
* @package Zend_Gdata
* @subpackage Media
* @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com)
* @license http://framework.zend.com/license/new-bsd New BSD License
*/
class Zend_Gdata_Media_Extension_MediaDescription extends Zend_Gdata_Extension
{
protected $_rootElement = 'description';
protected $_rootNamespace = 'media';
/**
* @var string
*/
protected $_type = null;
/**
* @param string $text
* @param string $type
*/
public function __construct($text = null, $type = null)
{
$this->registerAllNamespaces(Zend_Gdata_Media::$namespaces);
parent::__construct();
$this->_type = $type;
$this->_text = $text;
}
/**
* Retrieves a DOMElement which corresponds to this element and all
* child properties. This is used to build an entry back into a DOM
* and eventually XML text for sending to the server upon updates, or
* for application storage/persistence.
*
* @param DOMDocument $doc The DOMDocument used to construct DOMElements
* @return DOMElement The DOMElement representing this element and all
* child properties.
*/
public function getDOM($doc = null, $majorVersion = 1, $minorVersion = null)
{
$element = parent::getDOM($doc, $majorVersion, $minorVersion);
if ($this->_type !== null) {
$element->setAttribute('type', $this->_type);
}
return $element;
}
/**
* Given a DOMNode representing an attribute, tries to map the data into
* instance members. If no mapping is defined, the name and value are
* stored in an array.
*
* @param DOMNode $attribute The DOMNode attribute needed to be handled
*/
protected function takeAttributeFromDOM($attribute)
{
switch ($attribute->localName) {
case 'type':
$this->_type = $attribute->nodeValue;
break;
default:
parent::takeAttributeFromDOM($attribute);
}
}
/**
* @return string
*/
public function getType()
{
return $this->_type;
}
/**
* @param string $value
* @return Zend_Gdata_Media_Extension_MediaDescription Provides a fluent interface
*/
public function setType($value)
{
$this->_type = $value;
return $this;
}
}
| dbashyal/MagentoStarterBase | trunk/lib/Zend/Gdata/Media/Extension/MediaDescription.php | PHP | lgpl-3.0 | 3,353 |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using Microsoft.Azure;
using Microsoft.Azure.Management.Sql.LegacySdk.Models;
namespace Microsoft.Azure.Management.Sql.LegacySdk.Models
{
/// <summary>
/// Represents the response to an update recommended action request.
/// </summary>
public partial class RecommendedActionUpdateResponse : AzureOperationResponse
{
private ErrorResponse _error;
/// <summary>
/// Optional. Error details if available.
/// </summary>
public ErrorResponse Error
{
get { return this._error; }
set { this._error = value; }
}
private string _operationStatusLink;
/// <summary>
/// Optional. Gets or sets the location header value.
/// </summary>
public string OperationStatusLink
{
get { return this._operationStatusLink; }
set { this._operationStatusLink = value; }
}
private RecommendedAction _recommendedAction;
/// <summary>
/// Optional. Gets or sets the recommended action object.
/// </summary>
public RecommendedAction RecommendedAction
{
get { return this._recommendedAction; }
set { this._recommendedAction = value; }
}
private int _retryAfter;
/// <summary>
/// Optional. Gets or sets how long to wait before polling.
/// </summary>
public int RetryAfter
{
get { return this._retryAfter; }
set { this._retryAfter = value; }
}
private OperationStatus _status;
/// <summary>
/// Optional. Gets or sets the status of the operation.
/// </summary>
public OperationStatus Status
{
get { return this._status; }
set { this._status = value; }
}
/// <summary>
/// Initializes a new instance of the RecommendedActionUpdateResponse
/// class.
/// </summary>
public RecommendedActionUpdateResponse()
{
}
}
}
| atpham256/azure-powershell | src/ResourceManager/Sql/LegacySdk/Generated/Models/RecommendedActionUpdateResponse.cs | C# | apache-2.0 | 2,978 |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
/**
* Export Environment
*/
public enum ExportEnvironment {
Citrix("citrix"),
Vmware("vmware"),
Microsoft("microsoft");
private String value;
private ExportEnvironment(String value) {
this.value = value;
}
@Override
public String toString() {
return this.value;
}
/**
* Use this in place of valueOf.
*
* @param value
* real value
* @return ExportEnvironment corresponding to the value
*/
public static ExportEnvironment fromValue(String value) {
if (value == null || "".equals(value)) {
throw new IllegalArgumentException("Value cannot be null or empty!");
} else if ("citrix".equals(value)) {
return ExportEnvironment.Citrix;
} else if ("vmware".equals(value)) {
return ExportEnvironment.Vmware;
} else if ("microsoft".equals(value)) {
return ExportEnvironment.Microsoft;
} else {
throw new IllegalArgumentException("Cannot create enum from " + value + " value!");
}
}
}
| mahaliachante/aws-sdk-java | aws-java-sdk-ec2/src/main/java/com/amazonaws/services/ec2/model/ExportEnvironment.java | Java | apache-2.0 | 1,740 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
/**
* Base class for responses that are node responses. These responses always contain the cluster
* name and the {@link NodesResponseHeader}.
*/
public abstract class NodesResponse {
private final NodesResponseHeader header;
private final String clusterName;
protected NodesResponse(NodesResponseHeader header, String clusterName) {
this.header = header;
this.clusterName = clusterName;
}
/**
* Get the cluster name associated with all of the nodes.
*
* @return Never {@code null}.
*/
public String getClusterName() {
return clusterName;
}
/**
* Gets information about the number of total, successful and failed nodes the request was run on.
* Also includes exceptions if relevant.
*/
public NodesResponseHeader getHeader() {
return header;
}
public static <T extends NodesResponse> void declareCommonNodesResponseParsing(ConstructingObjectParser<T, Void> parser) {
parser.declareObject(ConstructingObjectParser.constructorArg(), NodesResponseHeader::fromXContent, new ParseField("_nodes"));
parser.declareString(ConstructingObjectParser.constructorArg(), new ParseField("cluster_name"));
}
}
| gingerwizard/elasticsearch | client/rest-high-level/src/main/java/org/elasticsearch/client/NodesResponse.java | Java | apache-2.0 | 2,170 |
/*
* Copyright 2012-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.flow.core;
public abstract class Functor<T> extends Promise<T> {
private final Settable<T> result = new Settable<T>();
public Functor(Promise<?>... waitFor) {
new Task(waitFor) {
@Override
protected void doExecute() throws Throwable {
result.chain(Functor.this.doExecute());
}
};
}
protected abstract Promise<T> doExecute() throws Throwable;
@Override
public T get() {
return result.get();
}
@Override
public boolean isReady() {
return result.isReady();
}
@Override
protected void addCallback(Runnable callback) {
result.addCallback(callback);
}
@Override
protected void removeCallback(Runnable callback) {
result.removeCallback(callback);
}
}
| mahaliachante/aws-sdk-java | aws-java-sdk-swf-libraries/src/main/java/com/amazonaws/services/simpleworkflow/flow/core/Functor.java | Java | apache-2.0 | 1,448 |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cluster;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.admin.cluster.node.stats.NodeStats;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsRequest;
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
import org.elasticsearch.action.admin.cluster.node.stats.TransportNodesStatsAction;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.admin.indices.stats.ShardStats;
import org.elasticsearch.action.admin.indices.stats.TransportIndicesStatsAction;
import org.elasticsearch.cluster.block.ClusterBlockException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.monitor.fs.FsInfo;
import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.ReceiveTimeoutTransportException;
import java.util.*;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
/**
* InternalClusterInfoService provides the ClusterInfoService interface,
* routinely updated on a timer. The timer can be dynamically changed by
* setting the <code>cluster.info.update.interval</code> setting (defaulting
* to 30 seconds). The InternalClusterInfoService only runs on the master node.
* Listens for changes in the number of data nodes and immediately submits a
* ClusterInfoUpdateJob if a node has been added.
*
* Every time the timer runs, gathers information about the disk usage and
* shard sizes across the cluster.
*/
public class InternalClusterInfoService extends AbstractComponent implements ClusterInfoService, LocalNodeMasterListener, ClusterStateListener {
public static final String INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL = "cluster.info.update.interval";
public static final String INTERNAL_CLUSTER_INFO_TIMEOUT = "cluster.info.update.timeout";
private volatile TimeValue updateFrequency;
private volatile ImmutableMap<String, DiskUsage> usages;
private volatile ImmutableMap<String, Long> shardSizes;
private volatile boolean isMaster = false;
private volatile boolean enabled;
private volatile TimeValue fetchTimeout;
private final TransportNodesStatsAction transportNodesStatsAction;
private final TransportIndicesStatsAction transportIndicesStatsAction;
private final ClusterService clusterService;
private final ThreadPool threadPool;
private final Set<Listener> listeners = Collections.synchronizedSet(new HashSet<Listener>());
@Inject
public InternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService,
TransportNodesStatsAction transportNodesStatsAction,
TransportIndicesStatsAction transportIndicesStatsAction, ClusterService clusterService,
ThreadPool threadPool) {
super(settings);
this.usages = ImmutableMap.of();
this.shardSizes = ImmutableMap.of();
this.transportNodesStatsAction = transportNodesStatsAction;
this.transportIndicesStatsAction = transportIndicesStatsAction;
this.clusterService = clusterService;
this.threadPool = threadPool;
this.updateFrequency = settings.getAsTime(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, TimeValue.timeValueSeconds(30));
this.fetchTimeout = settings.getAsTime(INTERNAL_CLUSTER_INFO_TIMEOUT, TimeValue.timeValueSeconds(15));
this.enabled = settings.getAsBoolean(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, true);
nodeSettingsService.addListener(new ApplySettings());
// Add InternalClusterInfoService to listen for Master changes
this.clusterService.add((LocalNodeMasterListener)this);
// Add to listen for state changes (when nodes are added)
this.clusterService.add((ClusterStateListener)this);
}
class ApplySettings implements NodeSettingsService.Listener {
@Override
public void onRefreshSettings(Settings settings) {
TimeValue newUpdateFrequency = settings.getAsTime(INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, null);
// ClusterInfoService is only enabled if the DiskThresholdDecider is enabled
Boolean newEnabled = settings.getAsBoolean(DiskThresholdDecider.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED, null);
if (newUpdateFrequency != null) {
if (newUpdateFrequency.getMillis() < TimeValue.timeValueSeconds(10).getMillis()) {
logger.warn("[{}] set too low [{}] (< 10s)", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, newUpdateFrequency);
throw new IllegalStateException("Unable to set " + INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL + " less than 10 seconds");
} else {
logger.info("updating [{}] from [{}] to [{}]", INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL, updateFrequency, newUpdateFrequency);
InternalClusterInfoService.this.updateFrequency = newUpdateFrequency;
}
}
TimeValue newFetchTimeout = settings.getAsTime(INTERNAL_CLUSTER_INFO_TIMEOUT, null);
if (newFetchTimeout != null) {
logger.info("updating fetch timeout [{}] from [{}] to [{}]", INTERNAL_CLUSTER_INFO_TIMEOUT, fetchTimeout, newFetchTimeout);
InternalClusterInfoService.this.fetchTimeout = newFetchTimeout;
}
// We don't log about enabling it here, because the DiskThresholdDecider will already be logging about enable/disable
if (newEnabled != null) {
InternalClusterInfoService.this.enabled = newEnabled;
}
}
}
@Override
public void onMaster() {
this.isMaster = true;
if (logger.isTraceEnabled()) {
logger.trace("I have been elected master, scheduling a ClusterInfoUpdateJob");
}
try {
// Submit a job that will start after DEFAULT_STARTING_INTERVAL, and reschedule itself after running
threadPool.schedule(updateFrequency, executorName(), new SubmitReschedulingClusterInfoUpdatedJob());
if (clusterService.state().getNodes().getDataNodes().size() > 1) {
// Submit an info update job to be run immediately
updateOnce();
}
} catch (EsRejectedExecutionException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Couldn't schedule cluster info update task - node might be shutting down", ex);
}
}
}
// called from tests as well
/**
* will collect a fresh {@link ClusterInfo} from the nodes, without scheduling a future collection
*/
void updateOnce() {
threadPool.executor(executorName()).execute(new ClusterInfoUpdateJob(false));
}
@Override
public void offMaster() {
this.isMaster = false;
}
@Override
public String executorName() {
return ThreadPool.Names.MANAGEMENT;
}
@Override
public void clusterChanged(ClusterChangedEvent event) {
if (!this.enabled) {
return;
}
// Check whether it was a data node that was added
boolean dataNodeAdded = false;
for (DiscoveryNode addedNode : event.nodesDelta().addedNodes()) {
if (addedNode.dataNode()) {
dataNodeAdded = true;
break;
}
}
if (this.isMaster && dataNodeAdded && clusterService.state().getNodes().getDataNodes().size() > 1) {
if (logger.isDebugEnabled()) {
logger.debug("data node was added, retrieving new cluster info");
}
updateOnce();
}
if (this.isMaster && event.nodesRemoved()) {
for (DiscoveryNode removedNode : event.nodesDelta().removedNodes()) {
if (removedNode.dataNode()) {
if (logger.isTraceEnabled()) {
logger.trace("Removing node from cluster info: {}", removedNode.getId());
}
Map<String, DiskUsage> newUsages = new HashMap<>(usages);
newUsages.remove(removedNode.getId());
usages = ImmutableMap.copyOf(newUsages);
}
}
}
}
@Override
public ClusterInfo getClusterInfo() {
return new ClusterInfo(usages, shardSizes);
}
@Override
public void addListener(Listener listener) {
this.listeners.add(listener);
}
/**
* Class used to submit {@link ClusterInfoUpdateJob}s on the
* {@link InternalClusterInfoService} threadpool, these jobs will
* reschedule themselves by placing a new instance of this class onto the
* scheduled threadpool.
*/
public class SubmitReschedulingClusterInfoUpdatedJob implements Runnable {
@Override
public void run() {
if (logger.isTraceEnabled()) {
logger.trace("Submitting new rescheduling cluster info update job");
}
try {
threadPool.executor(executorName()).execute(new ClusterInfoUpdateJob(true));
} catch (EsRejectedExecutionException ex) {
if (logger.isDebugEnabled()) {
logger.debug("Couldn't re-schedule cluster info update task - node might be shutting down", ex);
}
}
}
}
/**
* Retrieve the latest nodes stats, calling the listener when complete
* @return a latch that can be used to wait for the nodes stats to complete if desired
*/
protected CountDownLatch updateNodeStats(final ActionListener<NodesStatsResponse> listener) {
final CountDownLatch latch = new CountDownLatch(1);
final NodesStatsRequest nodesStatsRequest = new NodesStatsRequest("data:true");
nodesStatsRequest.clear();
nodesStatsRequest.fs(true);
nodesStatsRequest.timeout(fetchTimeout);
transportNodesStatsAction.execute(nodesStatsRequest, new LatchedActionListener<>(listener, latch));
return latch;
}
/**
* Retrieve the latest indices stats, calling the listener when complete
* @return a latch that can be used to wait for the indices stats to complete if desired
*/
protected CountDownLatch updateIndicesStats(final ActionListener<IndicesStatsResponse> listener) {
final CountDownLatch latch = new CountDownLatch(1);
final IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest();
indicesStatsRequest.clear();
indicesStatsRequest.store(true);
transportIndicesStatsAction.execute(indicesStatsRequest, new LatchedActionListener<>(listener, latch));
return latch;
}
/**
* Runnable class that performs a {@Link NodesStatsRequest} to retrieve
* disk usages for nodes in the cluster and an {@link IndicesStatsRequest}
* to retrieve the sizes of all shards to ensure they can fit on nodes
* during shard balancing.
*/
public class ClusterInfoUpdateJob implements Runnable {
// This boolean is used to signal to the ClusterInfoUpdateJob that it
// needs to reschedule itself to run again at a later time. It can be
// set to false to only run once
private final boolean reschedule;
public ClusterInfoUpdateJob(boolean reschedule) {
this.reschedule = reschedule;
}
@Override
public void run() {
if (logger.isTraceEnabled()) {
logger.trace("Performing ClusterInfoUpdateJob");
}
if (isMaster && this.reschedule) {
if (logger.isTraceEnabled()) {
logger.trace("Scheduling next run for updating cluster info in: {}", updateFrequency.toString());
}
try {
threadPool.schedule(updateFrequency, executorName(), new SubmitReschedulingClusterInfoUpdatedJob());
} catch (EsRejectedExecutionException ex) {
logger.debug("Reschedule cluster info service was rejected", ex);
}
}
if (!enabled) {
// Short-circuit if not enabled
if (logger.isTraceEnabled()) {
logger.trace("Skipping ClusterInfoUpdatedJob since it is disabled");
}
return;
}
CountDownLatch nodeLatch = updateNodeStats(new ActionListener<NodesStatsResponse>() {
@Override
public void onResponse(NodesStatsResponse nodeStatses) {
Map<String, DiskUsage> newUsages = new HashMap<>();
for (NodeStats nodeStats : nodeStatses.getNodes()) {
if (nodeStats.getFs() == null) {
logger.warn("Unable to retrieve node FS stats for {}", nodeStats.getNode().name());
} else {
long available = 0;
long total = 0;
for (FsInfo.Path info : nodeStats.getFs()) {
available += info.getAvailable().bytes();
total += info.getTotal().bytes();
}
String nodeId = nodeStats.getNode().id();
String nodeName = nodeStats.getNode().getName();
if (logger.isTraceEnabled()) {
logger.trace("node: [{}], total disk: {}, available disk: {}", nodeId, total, available);
}
newUsages.put(nodeId, new DiskUsage(nodeId, nodeName, total, available));
}
}
usages = ImmutableMap.copyOf(newUsages);
}
@Override
public void onFailure(Throwable e) {
if (e instanceof ReceiveTimeoutTransportException) {
logger.error("NodeStatsAction timed out for ClusterInfoUpdateJob (reason [{}])", e.getMessage());
} else {
if (e instanceof ClusterBlockException) {
if (logger.isTraceEnabled()) {
logger.trace("Failed to execute NodeStatsAction for ClusterInfoUpdateJob", e);
}
} else {
logger.warn("Failed to execute NodeStatsAction for ClusterInfoUpdateJob", e);
}
// we empty the usages list, to be safe - we don't know what's going on.
usages = ImmutableMap.of();
}
}
});
CountDownLatch indicesLatch = updateIndicesStats(new ActionListener<IndicesStatsResponse>() {
@Override
public void onResponse(IndicesStatsResponse indicesStatsResponse) {
ShardStats[] stats = indicesStatsResponse.getShards();
HashMap<String, Long> newShardSizes = new HashMap<>();
for (ShardStats s : stats) {
long size = s.getStats().getStore().sizeInBytes();
String sid = ClusterInfo.shardIdentifierFromRouting(s.getShardRouting());
if (logger.isTraceEnabled()) {
logger.trace("shard: {} size: {}", sid, size);
}
newShardSizes.put(sid, size);
}
shardSizes = ImmutableMap.copyOf(newShardSizes);
}
@Override
public void onFailure(Throwable e) {
if (e instanceof ReceiveTimeoutTransportException) {
logger.error("IndicesStatsAction timed out for ClusterInfoUpdateJob (reason [{}])", e.getMessage());
} else {
if (e instanceof ClusterBlockException) {
if (logger.isTraceEnabled()) {
logger.trace("Failed to execute IndicesStatsAction for ClusterInfoUpdateJob", e);
}
} else {
logger.warn("Failed to execute IndicesStatsAction for ClusterInfoUpdateJob", e);
}
// we empty the usages list, to be safe - we don't know what's going on.
shardSizes = ImmutableMap.of();
}
}
});
try {
nodeLatch.await(fetchTimeout.getMillis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt(); // restore interrupt status
logger.warn("Failed to update node information for ClusterInfoUpdateJob within 15s timeout");
}
try {
indicesLatch.await(fetchTimeout.getMillis(), TimeUnit.MILLISECONDS);
} catch (InterruptedException e) {
Thread.currentThread().interrupt(); // restore interrupt status
logger.warn("Failed to update shard information for ClusterInfoUpdateJob within 15s timeout");
}
for (Listener l : listeners) {
try {
l.onNewInfo(getClusterInfo());
} catch (Exception e) {
logger.info("Failed executing ClusterInfoService listener", e);
}
}
}
}
}
| ThalaivaStars/OrgRepo1 | core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java | Java | apache-2.0 | 19,259 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.index.lucene.util;
import java.io.Reader;
import java.util.Arrays;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.util.CharFilterFactory;
import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.util.TokenizerFactory;
/**
* An analyzer that uses a tokenizer and a list of token filters to
* create a TokenStream. Taken from org.apache.solr.analysis.TokenizerChain
*/
public final class TokenizerChain extends Analyzer {
private final CharFilterFactory[] charFilters;
private final TokenizerFactory tokenizer;
private final TokenFilterFactory[] filters;
public TokenizerChain(TokenizerFactory tokenizer) {
this(null, tokenizer, null);
}
public TokenizerChain(TokenizerFactory tokenizer, TokenFilterFactory[] filters) {
this(null, tokenizer, filters);
}
public TokenizerChain(CharFilterFactory[] charFilters, TokenizerFactory tokenizer, TokenFilterFactory[] filters) {
this.charFilters = charFilters;
this.tokenizer = tokenizer;
this.filters = filters == null ? new TokenFilterFactory[0] : filters;
}
@Override
public Reader initReader(String fieldName, Reader reader) {
if (charFilters != null && charFilters.length > 0) {
Reader cs = reader;
for (CharFilterFactory charFilter : charFilters) {
cs = charFilter.create(cs);
}
reader = cs;
}
return reader;
}
//Mostly required for testing purpose
public CharFilterFactory[] getCharFilters() {
return Arrays.copyOf(charFilters, charFilters.length);
}
public TokenizerFactory getTokenizer() {
return tokenizer;
}
public TokenFilterFactory[] getFilters() {
return Arrays.copyOf(filters, filters.length);
}
@Override
protected TokenStreamComponents createComponents(String fieldName, Reader reader) {
Tokenizer tk = tokenizer.create(reader);
TokenStream ts = tk;
for (TokenFilterFactory filter : filters) {
ts = filter.create(ts);
}
return new TokenStreamComponents(tk, ts);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("TokenizerChain(");
for (CharFilterFactory filter : charFilters) {
sb.append(filter);
sb.append(", ");
}
sb.append(tokenizer);
for (TokenFilterFactory filter : filters) {
sb.append(", ");
sb.append(filter);
}
sb.append(')');
return sb.toString();
}
}
| anchela/jackrabbit-oak | oak-lucene/src/main/java/org/apache/jackrabbit/oak/plugins/index/lucene/util/TokenizerChain.java | Java | apache-2.0 | 3,587 |
/*
* Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model;
import java.io.Serializable;
/**
* <p>
* Contains information about a workflow execution.
* </p>
*/
public class WorkflowExecutionInfo implements Serializable, Cloneable {
/**
* The workflow execution this information is about.
*/
private WorkflowExecution execution;
/**
* The type of the workflow execution.
*/
private WorkflowType workflowType;
/**
* The time when the execution was started.
*/
private java.util.Date startTimestamp;
/**
* The time when the workflow execution was closed. Set only if the
* execution status is CLOSED.
*/
private java.util.Date closeTimestamp;
/**
* The current status of the execution.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>OPEN, CLOSED
*/
private String executionStatus;
/**
* If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>COMPLETED, FAILED, CANCELED, TERMINATED, CONTINUED_AS_NEW, TIMED_OUT
*/
private String closeStatus;
/**
* If this workflow execution is a child of another execution then
* contains the workflow execution that started this execution.
*/
private WorkflowExecution parent;
/**
* The list of tags associated with the workflow execution. Tags can be
* used to identify and list workflow executions of interest through the
* visibility APIs. A workflow execution can have a maximum of 5 tags.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 5<br/>
*/
private com.amazonaws.internal.ListWithAutoConstructFlag<String> tagList;
/**
* Set to true if a cancellation is requested for this workflow
* execution.
*/
private Boolean cancelRequested;
/**
* The workflow execution this information is about.
*
* @return The workflow execution this information is about.
*/
public WorkflowExecution getExecution() {
return execution;
}
/**
* The workflow execution this information is about.
*
* @param execution The workflow execution this information is about.
*/
public void setExecution(WorkflowExecution execution) {
this.execution = execution;
}
/**
* The workflow execution this information is about.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param execution The workflow execution this information is about.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionInfo withExecution(WorkflowExecution execution) {
this.execution = execution;
return this;
}
/**
* The type of the workflow execution.
*
* @return The type of the workflow execution.
*/
public WorkflowType getWorkflowType() {
return workflowType;
}
/**
* The type of the workflow execution.
*
* @param workflowType The type of the workflow execution.
*/
public void setWorkflowType(WorkflowType workflowType) {
this.workflowType = workflowType;
}
/**
* The type of the workflow execution.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param workflowType The type of the workflow execution.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionInfo withWorkflowType(WorkflowType workflowType) {
this.workflowType = workflowType;
return this;
}
/**
* The time when the execution was started.
*
* @return The time when the execution was started.
*/
public java.util.Date getStartTimestamp() {
return startTimestamp;
}
/**
* The time when the execution was started.
*
* @param startTimestamp The time when the execution was started.
*/
public void setStartTimestamp(java.util.Date startTimestamp) {
this.startTimestamp = startTimestamp;
}
/**
* The time when the execution was started.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param startTimestamp The time when the execution was started.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionInfo withStartTimestamp(java.util.Date startTimestamp) {
this.startTimestamp = startTimestamp;
return this;
}
/**
* The time when the workflow execution was closed. Set only if the
* execution status is CLOSED.
*
* @return The time when the workflow execution was closed. Set only if the
* execution status is CLOSED.
*/
public java.util.Date getCloseTimestamp() {
return closeTimestamp;
}
/**
* The time when the workflow execution was closed. Set only if the
* execution status is CLOSED.
*
* @param closeTimestamp The time when the workflow execution was closed. Set only if the
* execution status is CLOSED.
*/
public void setCloseTimestamp(java.util.Date closeTimestamp) {
this.closeTimestamp = closeTimestamp;
}
/**
* The time when the workflow execution was closed. Set only if the
* execution status is CLOSED.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param closeTimestamp The time when the workflow execution was closed. Set only if the
* execution status is CLOSED.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionInfo withCloseTimestamp(java.util.Date closeTimestamp) {
this.closeTimestamp = closeTimestamp;
return this;
}
/**
* The current status of the execution.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>OPEN, CLOSED
*
* @return The current status of the execution.
*
* @see ExecutionStatus
*/
public String getExecutionStatus() {
return executionStatus;
}
/**
* The current status of the execution.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>OPEN, CLOSED
*
* @param executionStatus The current status of the execution.
*
* @see ExecutionStatus
*/
public void setExecutionStatus(String executionStatus) {
this.executionStatus = executionStatus;
}
/**
* The current status of the execution.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>OPEN, CLOSED
*
* @param executionStatus The current status of the execution.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see ExecutionStatus
*/
public WorkflowExecutionInfo withExecutionStatus(String executionStatus) {
this.executionStatus = executionStatus;
return this;
}
/**
* The current status of the execution.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>OPEN, CLOSED
*
* @param executionStatus The current status of the execution.
*
* @see ExecutionStatus
*/
public void setExecutionStatus(ExecutionStatus executionStatus) {
this.executionStatus = executionStatus.toString();
}
/**
* The current status of the execution.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>OPEN, CLOSED
*
* @param executionStatus The current status of the execution.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see ExecutionStatus
*/
public WorkflowExecutionInfo withExecutionStatus(ExecutionStatus executionStatus) {
this.executionStatus = executionStatus.toString();
return this;
}
/**
* If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>COMPLETED, FAILED, CANCELED, TERMINATED, CONTINUED_AS_NEW, TIMED_OUT
*
* @return If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
*
* @see CloseStatus
*/
public String getCloseStatus() {
return closeStatus;
}
/**
* If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>COMPLETED, FAILED, CANCELED, TERMINATED, CONTINUED_AS_NEW, TIMED_OUT
*
* @param closeStatus If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
*
* @see CloseStatus
*/
public void setCloseStatus(String closeStatus) {
this.closeStatus = closeStatus;
}
/**
* If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>COMPLETED, FAILED, CANCELED, TERMINATED, CONTINUED_AS_NEW, TIMED_OUT
*
* @param closeStatus If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see CloseStatus
*/
public WorkflowExecutionInfo withCloseStatus(String closeStatus) {
this.closeStatus = closeStatus;
return this;
}
/**
* If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>COMPLETED, FAILED, CANCELED, TERMINATED, CONTINUED_AS_NEW, TIMED_OUT
*
* @param closeStatus If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
*
* @see CloseStatus
*/
public void setCloseStatus(CloseStatus closeStatus) {
this.closeStatus = closeStatus.toString();
}
/**
* If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Allowed Values: </b>COMPLETED, FAILED, CANCELED, TERMINATED, CONTINUED_AS_NEW, TIMED_OUT
*
* @param closeStatus If the execution status is closed then this specifies how the
* execution was closed: <ul> <li><code>COMPLETED</code>: the execution
* was successfully completed.</li> <li><code>CANCELED</code>: the
* execution was canceled.Cancellation allows the implementation to
* gracefully clean up before the execution is closed.</li>
* <li><code>TERMINATED</code>: the execution was force terminated.</li>
* <li><code>FAILED</code>: the execution failed to complete.</li>
* <li><code>TIMED_OUT</code>: the execution did not complete in the
* alloted time and was automatically timed out.</li>
* <li><code>CONTINUED_AS_NEW</code>: the execution is logically
* continued. This means the current execution was completed and a new
* execution was started to carry on the workflow.</li> </ul>
*
* @return A reference to this updated object so that method calls can be chained
* together.
*
* @see CloseStatus
*/
public WorkflowExecutionInfo withCloseStatus(CloseStatus closeStatus) {
this.closeStatus = closeStatus.toString();
return this;
}
/**
* If this workflow execution is a child of another execution then
* contains the workflow execution that started this execution.
*
* @return If this workflow execution is a child of another execution then
* contains the workflow execution that started this execution.
*/
public WorkflowExecution getParent() {
return parent;
}
/**
* If this workflow execution is a child of another execution then
* contains the workflow execution that started this execution.
*
* @param parent If this workflow execution is a child of another execution then
* contains the workflow execution that started this execution.
*/
public void setParent(WorkflowExecution parent) {
this.parent = parent;
}
/**
* If this workflow execution is a child of another execution then
* contains the workflow execution that started this execution.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param parent If this workflow execution is a child of another execution then
* contains the workflow execution that started this execution.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionInfo withParent(WorkflowExecution parent) {
this.parent = parent;
return this;
}
/**
* The list of tags associated with the workflow execution. Tags can be
* used to identify and list workflow executions of interest through the
* visibility APIs. A workflow execution can have a maximum of 5 tags.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 5<br/>
*
* @return The list of tags associated with the workflow execution. Tags can be
* used to identify and list workflow executions of interest through the
* visibility APIs. A workflow execution can have a maximum of 5 tags.
*/
public java.util.List<String> getTagList() {
return tagList;
}
/**
* The list of tags associated with the workflow execution. Tags can be
* used to identify and list workflow executions of interest through the
* visibility APIs. A workflow execution can have a maximum of 5 tags.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 5<br/>
*
* @param tagList The list of tags associated with the workflow execution. Tags can be
* used to identify and list workflow executions of interest through the
* visibility APIs. A workflow execution can have a maximum of 5 tags.
*/
public void setTagList(java.util.Collection<String> tagList) {
if (tagList == null) {
this.tagList = null;
return;
}
com.amazonaws.internal.ListWithAutoConstructFlag<String> tagListCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(tagList.size());
tagListCopy.addAll(tagList);
this.tagList = tagListCopy;
}
/**
* The list of tags associated with the workflow execution. Tags can be
* used to identify and list workflow executions of interest through the
* visibility APIs. A workflow execution can have a maximum of 5 tags.
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if
* any). Use {@link #setTagList(java.util.Collection)} or {@link
* #withTagList(java.util.Collection)} if you want to override the
* existing values.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 5<br/>
*
* @param tagList The list of tags associated with the workflow execution. Tags can be
* used to identify and list workflow executions of interest through the
* visibility APIs. A workflow execution can have a maximum of 5 tags.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionInfo withTagList(String... tagList) {
if (getTagList() == null) setTagList(new java.util.ArrayList<String>(tagList.length));
for (String value : tagList) {
getTagList().add(value);
}
return this;
}
/**
* The list of tags associated with the workflow execution. Tags can be
* used to identify and list workflow executions of interest through the
* visibility APIs. A workflow execution can have a maximum of 5 tags.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Length: </b>0 - 5<br/>
*
* @param tagList The list of tags associated with the workflow execution. Tags can be
* used to identify and list workflow executions of interest through the
* visibility APIs. A workflow execution can have a maximum of 5 tags.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionInfo withTagList(java.util.Collection<String> tagList) {
if (tagList == null) {
this.tagList = null;
} else {
com.amazonaws.internal.ListWithAutoConstructFlag<String> tagListCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(tagList.size());
tagListCopy.addAll(tagList);
this.tagList = tagListCopy;
}
return this;
}
/**
* Set to true if a cancellation is requested for this workflow
* execution.
*
* @return Set to true if a cancellation is requested for this workflow
* execution.
*/
public Boolean isCancelRequested() {
return cancelRequested;
}
/**
* Set to true if a cancellation is requested for this workflow
* execution.
*
* @param cancelRequested Set to true if a cancellation is requested for this workflow
* execution.
*/
public void setCancelRequested(Boolean cancelRequested) {
this.cancelRequested = cancelRequested;
}
/**
* Set to true if a cancellation is requested for this workflow
* execution.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param cancelRequested Set to true if a cancellation is requested for this workflow
* execution.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public WorkflowExecutionInfo withCancelRequested(Boolean cancelRequested) {
this.cancelRequested = cancelRequested;
return this;
}
/**
* Set to true if a cancellation is requested for this workflow
* execution.
*
* @return Set to true if a cancellation is requested for this workflow
* execution.
*/
public Boolean getCancelRequested() {
return cancelRequested;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getExecution() != null) sb.append("Execution: " + getExecution() + ",");
if (getWorkflowType() != null) sb.append("WorkflowType: " + getWorkflowType() + ",");
if (getStartTimestamp() != null) sb.append("StartTimestamp: " + getStartTimestamp() + ",");
if (getCloseTimestamp() != null) sb.append("CloseTimestamp: " + getCloseTimestamp() + ",");
if (getExecutionStatus() != null) sb.append("ExecutionStatus: " + getExecutionStatus() + ",");
if (getCloseStatus() != null) sb.append("CloseStatus: " + getCloseStatus() + ",");
if (getParent() != null) sb.append("Parent: " + getParent() + ",");
if (getTagList() != null) sb.append("TagList: " + getTagList() + ",");
if (isCancelRequested() != null) sb.append("CancelRequested: " + isCancelRequested() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getExecution() == null) ? 0 : getExecution().hashCode());
hashCode = prime * hashCode + ((getWorkflowType() == null) ? 0 : getWorkflowType().hashCode());
hashCode = prime * hashCode + ((getStartTimestamp() == null) ? 0 : getStartTimestamp().hashCode());
hashCode = prime * hashCode + ((getCloseTimestamp() == null) ? 0 : getCloseTimestamp().hashCode());
hashCode = prime * hashCode + ((getExecutionStatus() == null) ? 0 : getExecutionStatus().hashCode());
hashCode = prime * hashCode + ((getCloseStatus() == null) ? 0 : getCloseStatus().hashCode());
hashCode = prime * hashCode + ((getParent() == null) ? 0 : getParent().hashCode());
hashCode = prime * hashCode + ((getTagList() == null) ? 0 : getTagList().hashCode());
hashCode = prime * hashCode + ((isCancelRequested() == null) ? 0 : isCancelRequested().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof WorkflowExecutionInfo == false) return false;
WorkflowExecutionInfo other = (WorkflowExecutionInfo)obj;
if (other.getExecution() == null ^ this.getExecution() == null) return false;
if (other.getExecution() != null && other.getExecution().equals(this.getExecution()) == false) return false;
if (other.getWorkflowType() == null ^ this.getWorkflowType() == null) return false;
if (other.getWorkflowType() != null && other.getWorkflowType().equals(this.getWorkflowType()) == false) return false;
if (other.getStartTimestamp() == null ^ this.getStartTimestamp() == null) return false;
if (other.getStartTimestamp() != null && other.getStartTimestamp().equals(this.getStartTimestamp()) == false) return false;
if (other.getCloseTimestamp() == null ^ this.getCloseTimestamp() == null) return false;
if (other.getCloseTimestamp() != null && other.getCloseTimestamp().equals(this.getCloseTimestamp()) == false) return false;
if (other.getExecutionStatus() == null ^ this.getExecutionStatus() == null) return false;
if (other.getExecutionStatus() != null && other.getExecutionStatus().equals(this.getExecutionStatus()) == false) return false;
if (other.getCloseStatus() == null ^ this.getCloseStatus() == null) return false;
if (other.getCloseStatus() != null && other.getCloseStatus().equals(this.getCloseStatus()) == false) return false;
if (other.getParent() == null ^ this.getParent() == null) return false;
if (other.getParent() != null && other.getParent().equals(this.getParent()) == false) return false;
if (other.getTagList() == null ^ this.getTagList() == null) return false;
if (other.getTagList() != null && other.getTagList().equals(this.getTagList()) == false) return false;
if (other.isCancelRequested() == null ^ this.isCancelRequested() == null) return false;
if (other.isCancelRequested() != null && other.isCancelRequested().equals(this.isCancelRequested()) == false) return false;
return true;
}
@Override
public WorkflowExecutionInfo clone() {
try {
return (WorkflowExecutionInfo) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| priyatransbit/aws-sdk-java | aws-java-sdk-simpleworkflow/src/main/java/com/amazonaws/services/simpleworkflow/model/WorkflowExecutionInfo.java | Java | apache-2.0 | 33,107 |
import { DynamsoftEnums as Dynamsoft } from "./Dynamsoft.Enum";
import { WebTwainUtil } from "./WebTwain.Util";
export interface WebTwainIO extends WebTwainUtil {
/**
* The password to connect to the FTP.
*/
FTPPassword: string;
/**
* The port to connect to the FTP.
*/
FTPPort: number;
/**
* The password to connect to the FTP.
*/
FTPUserName: string;
/**
* Return or set whether to use passive mode when connect to the FTP.
*/
IfPASVMode: boolean;
/**
* Return or set the field name for the uploaded file.
* By default, it's "RemoteFile".
*/
HttpFieldNameOfUploadedImage: string;
/**
* [Deprecation] Return or set the password used to log into the HTTP server.
*/
HTTPPassword: string;
/**
* [Deprecation] Return or set the user name used to log into the HTTP server.
*/
HTTPUserName: string;
/**
* Return or set the HTTP Port.
*/
HTTPPort: number;
/**
* Return or set whether to use SSL in HTTP requests.
*/
IfSSL: boolean;
/**
* Return the response string of the latest HTTP Post request.
*/
readonly HTTPPostResponseString: string;
/**
* Return or set whether to show open/save file dialog when saving images in the buffer or loading images from a local directory.
*/
IfShowFileDialog: boolean;
/**
* Return or set whether to show the progress of an operation with a button to cancel it.
*/
IfShowCancelDialogWhenImageTransfer: boolean;
/**
* Return or set whether to show the progressbar.
*/
IfShowProgressBar: boolean;
/**
* Return or set the quality for JPEG compression.
* The values range from 0 to 100.
*/
JPEGQuality: number;
/**
* Return or set whether to insert or append images when they are scanned/loaded.
*/
IfAppendImage: boolean;
/**
* Return or set whether to append to or replace an existing TIFF file with the same name.
*/
IfTiffMultiPage: boolean;
/**
* Return or set the compression type for TIFF files.
*/
TIFFCompressionType: Dynamsoft.EnumDWT_TIFFCompressionType | number;
/**
* Return or set the name of the person who creates the PDF document.
*/
PDFAuthor: string;
/**
* Return or set the compression type of PDF files. This is a runtime property.
*/
PDFCompressionType: Dynamsoft.EnumDWT_PDFCompressionType;
/**
* Return or set the date when the PDF document is created.
*/
PDFCreationDate: string;
/**
* Return or set the name of the application that created the original document, if the PDF document is converted from another form.
*/
PDFCreator: string;
/**
* Return or set the keywords associated with the PDF document.
*/
PDFKeywords: string;
/**
* Return or set the date when the PDF document is last modified.
*/
PDFModifiedDate: string;
/**
* Return or set the name of the application that converted the PDF document from its native.
*/
PDFProducer: string;
/**
* Return or set the subject of the PDF document.
*/
PDFSubject: string;
/**
* Return or set the title of the PDF document.
*/
PDFTitle: string;
/**
* Return or set the value of the PDF version.
*/
PDFVersion: string;
/**
* Clear all the custom fields from the HTTP Post Form.
*/
ClearAllHTTPFormField(): boolean;
/**
* Clear the content of all custom tiff tags.
*/
ClearTiffCustomTag(): boolean;
/**
* Convert the specified images to a base64 string.
* @param indices Specify one or multiple images.
* @param type The file type.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument result The resulting base64 string.
* @argument indices The indices of the converted images.
* @argument type The file type.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
ConvertToBase64(
indices: number[],
type: Dynamsoft.EnumDWT_ImageType | number,
successCallback: (
result: Base64Result,
indices: number[],
type: number) => void,
failureCallBack: (
errorCode: number,
errorString: string) => void
): void;
/**
* Convert the specified images to a blob.
* @param indices Specify one or multiple images.
* @param type The file type.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument result The resulting blob.
* @argument indices The indices of the converted images.
* @argument type The file type.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
ConvertToBlob(
indices: number[],
type: Dynamsoft.EnumDWT_ImageType | number,
successCallback: (
result: Blob,
indices: number[],
type: number) => void,
failureCallBack: (
errorCode: number,
errorString: string) => void
): void;
/**
* Download the specified file via FTP
* @param host The FTP Host.
* @param path Specify the file to download.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
FTPDownload(
host: string,
path: string,
successCallback: () => void,
failureCallBack: (
errorCode: number,
errorString: string) => void
): void;
/**
* Download the specified file via FTP.
* @param host The FTP Host.
* @param path Specify the file to download.
* @param type The format of the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
FTPDownloadEx(
host: string,
path: string,
type: Dynamsoft.EnumDWT_ImageType | number,
successCallback: () => void,
failureCallBack: (
errorCode: number,
errorString: string) => void
): void;
/**
* Upload the specified image via FTP.
* @param host The FTP Host.
* @param index Specify the image.
* @param path The path to save the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
FTPUpload(
host: string,
index: number,
path: string,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Upload the specified image via FTP.
* @param host The FTP Host.
* @param index Specify the image.
* @param path The path to save the file.
* @param type The format of the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
FTPUploadEx(
host: string,
index: number,
path: string,
type: Dynamsoft.EnumDWT_ImageType | number,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Upload all images as a multi-page TIFF via FTP.
* @param host The FTP Host.
* @param path Specify the path to save the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
FTPUploadAllAsMultiPageTIFF(
host: string,
path: string,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Upload all images as a multi-page PDF via FTP.
* @param host The FTP Host.
* @param path Specify the path to save the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
FTPUploadAllAsPDF(
host: string,
path: string,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Upload selected images as a multi-page PDF via FTP.
* @param host The FTP Host.
* @param path Specify the path to save the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
FTPUploadAsMultiPagePDF(
host: string,
path: string,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Upload selected images as a multi-page TIFF via FTP.
* @param host The FTP Host.
* @param path Specify the path to save the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
FTPUploadAsMultiPageTIFF(
host: string,
path: string,
type: Dynamsoft.EnumDWT_ImageType | number,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Download the specified file via a HTTP Get request.
* @param host The HTTP Host.
* @param path Specify the path of the file to download.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
HTTPDownload(
host: string,
path: string,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Download the specified file via a HTTP Get request.
* @param host The HTTP Host.
* @param path Specify the path of the file to download.
* @param type The format of the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
HTTPDownloadEx(
host: string,
path: string,
type: Dynamsoft.EnumDWT_ImageType | number,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Download the specified file via a HTTP Post request.
* @param host The HTTP Host.
* @param path Specify the path of the file to download.
* @param type The format of the file.
* @param onEmptyResponse A callback function that is executed if the response is empty.
* @param onServerReturnedSomething A callback function that is executed if the response is not empty.
* @argument errorCode The error code.
* @argument errorString The error string.
* @argument response The response string.
*/
HTTPDownloadThroughPost(
host: string,
path: string,
type: Dynamsoft.EnumDWT_ImageType | number,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
/**
* Download the specified file via a HTTP Get request.
* @param host The HTTP Host.
* @param path Specify the path of the file to download.
* @param localPath Specify where to save the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
HTTPDownloadDirectly(
host: string,
path: string,
localPath: string,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Upload the specified image(s) via a HTTP Post.
* @param URL The server-side script to receive the post.
* @param indices Specify the image(s).
* @param type The format of the file.
* @param dataFormat Whether to upload the file as binary or a base64 string.
* @param fileName The file name.
* @param onEmptyResponse A callback function that is executed if the response is empty.
* @param onServerReturnedSomething A callback function that is executed if the response is not empty.
* @argument errorCode The error code.
* @argument errorString The error string.
* @argument response The response string.
*/
HTTPUpload(
URL: string,
indices: number[],
type: Dynamsoft.EnumDWT_ImageType | number,
dataFormat: Dynamsoft.EnumDWT_UploadDataFormat | number,
fileName: string,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
HTTPUpload(
URL: string,
indices: number[],
type: Dynamsoft.EnumDWT_ImageType | number,
dataFormat: Dynamsoft.EnumDWT_UploadDataFormat | number,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
HTTPUpload(
URL: string,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
/**
* Upload the specified image via a HTTP Put request.
* @param host The HTTP Host.
* @param index Specify the image.
* @param path Specify the path to put the file.
* @param type The format of the file.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
HTTPUploadThroughPutEx(
host: string,
index: number,
path: string,
type: Dynamsoft.EnumDWT_ImageType | number,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Upload the specified image via a HTTP Post request.
* @param host The HTTP Host.
* @param index Specify the image.
* @param target The target wherethe request is sent.
* @param type The format of the file.
* @param fileName The file name.
* @param onEmptyResponse A callback function that is executed if the response is empty.
* @param onServerReturnedSomething A callback function that is executed if the response is not empty.
* @argument errorCode The error code.
* @argument errorString The error string.
* @argument response The response string.
*/
HTTPUploadThroughPost(
host: string,
index: number,
target: string,
fileName: string,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
/**
* Upload the specified image via a HTTP Post request.
* @param host The HTTP Host.
* @param index Specify the image.
* @param target The target wherethe request is sent.
* @param fileName The file name.
* @param type The format of the file.
* @param onEmptyResponse A callback function that is executed if the response is empty.
* @param onServerReturnedSomething A callback function that is executed if the response is not empty.
* @argument errorCode The error code.
* @argument errorString The error string.
* @argument response The response string.
*/
HTTPUploadThroughPostEx(
host: string,
index: number,
target: string,
fileName: string,
type: Dynamsoft.EnumDWT_ImageType | number,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
/**
* Upload all images in the buffer as a TIFF file via a HTTP Post request.
* @param host The HTTP Host.
* @param target The target wherethe request is sent.
* @param fileName The file name.
* @param onEmptyResponse A callback function that is executed if the response is empty.
* @param onServerReturnedSomething A callback function that is executed if the response is not empty.
* @argument errorCode The error code.
* @argument errorString The error string.
* @argument response The response string.
*/
HTTPUploadAllThroughPostAsMultiPageTIFF(
host: string,
target: string,
fileName: string,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
/**
* Upload all images in the buffer as a PDF file via a HTTP Post request.
* @param host The HTTP Host.
* @param target The target wherethe request is sent.
* @param fileName The file name.
* @param onEmptyResponse A callback function that is executed if the response is empty.
* @param onServerReturnedSomething A callback function that is executed if the response is not empty.
* @argument errorCode The error code.
* @argument errorString The error string.
* @argument response The response string.
*/
HTTPUploadAllThroughPostAsPDF(
host: string,
target: string,
fileName: string,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
/**
* Upload all selected images in the buffer as a PDF file via a HTTP Post request.
* @param host The HTTP Host.
* @param target The target wherethe request is sent.
* @param fileName The file name.
* @param onEmptyResponse A callback function that is executed if the response is empty.
* @param onServerReturnedSomething A callback function that is executed if the response is not empty.
* @argument errorCode The error code.
* @argument errorString The error string.
* @argument response The response string.
*/
HTTPUploadThroughPostAsMultiPagePDF(
host: string,
target: string,
fileName: string,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
/**
* Upload all selected images in the buffer as a TIFF file via a HTTP Post request.
* @param host The HTTP Host.
* @param target The target wherethe request is sent.
* @param fileName The file name.
* @param onEmptyResponse A callback function that is executed if the response is empty.
* @param onServerReturnedSomething A callback function that is executed if the response is not empty.
* @argument errorCode The error code.
* @argument errorString The error string.
* @argument response The response string.
*/
HTTPUploadThroughPostAsMultiPageTIFF(
host: string,
target: string,
fileName: string,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
/**
* Upload the specified file via a HTTP Post request.
* @param host The HTTP Host.
* @param path Specify the file to upload.
* @param target The target wherethe request is sent.
* @param fileName The file name.
* @param onEmptyResponse A callback function that is executed if the response is empty.
* @param onServerReturnedSomething A callback function that is executed if the response is not empty.
* @argument errorCode The error code.
* @argument errorString The error string.
* @argument response The response string.
*/
HTTPUploadThroughPostDirectly(
host: string,
path: string,
target: string,
fileName: string,
onEmptyResponse: () => void,
onServerReturnedSomething: (
errorCode: number,
errorString: string,
response: string) => void
): void;
/**
* Load image(s) specified by its absolute path.
* @param fileName The path of the image to load.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
LoadImage(
fileName: string,
successCallback?: () => void,
failureCallback?: (
errorCode: number,
errorString: string) => void
): void | boolean;
/**
* Load image(s) specified by its absolute path.
* @param fileName The path of the image to load.
* @param type The format of the image.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
LoadImageEx(
fileName: string,
type: Dynamsoft.EnumDWT_ImageType | number,
successCallback?: () => void,
failureCallback?: (
errorCode: number,
errorString: string) => void
): void | boolean;
/**
* Load image(s) from a base64 string.
* @param imageData The image data which is a base64 string without the data URI scheme.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
LoadImageFromBase64Binary(
imageData: string,
imageType: Dynamsoft.EnumDWT_ImageType,
successCallback?: () => void,
failureCallback?: (
errorCode: number,
errorString: string) => void
): void | boolean;
/**
* Load image(s) from a binary object (Blob | ArrayBuffer).
* @param imageData The image data.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
LoadImageFromBinary(
imageData: Blob | ArrayBuffer,
successCallback: () => void,
failureCallback: (
errorCode: number,
errorString: string) => void
): void;
/**
* Load an image from the system clipboard. The image must be in DIB format.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
LoadDibFromClipboard(
successCallback?: () => void,
failureCallback?: (
errorCode: number,
errorString: string) => void
): void | boolean;
/**
* [Deprecation] Return or set how many threads can be used when you upload files through POST.
*/
MaxInternetTransferThreads: number;
/**
* Return or set the maximum allowed size of a file to upload (in bytes).
*/
MaxUploadImageSize: number;
/**
* Export all image data in the buffer to a new browser window and use the browser's built-in print feature to print the image(s).
* @param useOSPrintWindow Whether to use the print feature of the operating system instead.
*/
Print(useOSPrintWindow?: boolean): boolean;
/**
* Save the specified image as a BMP file.
* @param fileName The name to save to.
* @param index The index which specifies the image to save.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveAsBMP(
fileName: string,
index: number,
successCallback?: () => void,
failureCallback?: (errorCode: number, errorString: string) => void
): void | boolean;
/**
* Save the specified image as a JPEG file.
* @param fileName The name to save to.
* @param index The index which specifies the image to save.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveAsJPEG(
fileName: string,
index: number,
successCallback?: () => void,
failureCallback?: (errorCode: number, errorString: string) => void
): void | boolean;
/**
* Save the specified image as a PDF file.
* @param fileName The name to save to.
* @param index The index which specifies the image to save.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveAsPDF(
fileName: string,
index: number,
successCallback?: () => void,
failureCallback?: (errorCode: number, errorString: string) => void
): void | boolean;
/**
* Save the specified image as a PNG file.
* @param fileName The name to save to.
* @param index The index which specifies the image to save.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveAsPNG(
fileName: string,
index: number,
successCallback?: () => void,
failureCallback?: (errorCode: number, errorString: string) => void
): void | boolean;
/**
* Save the specified image as a TIFF file.
* @param fileName The name to save to.
* @param index The index which specifies the image to save.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveAsTIFF(
fileName: string,
index: number,
successCallback?: () => void,
failureCallback?: (errorCode: number, errorString: string) => void
): void | boolean;
/**
* Saves all the images in buffer as a multi-page TIFF file.
* @param fileName The name to save to.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveAllAsMultiPageTIFF(
fileName: string,
successCallback?: () => void,
failureCallback?: (errorCode: number, errorString: string) => void
): void | boolean;
/**
* Saves all the images in buffer as a multi-page PDF file.
* @param fileName The name to save to.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveAllAsPDF(
fileName: string,
successCallback?: () => void,
failureCallback?: (errorCode: number, errorString: string) => void
): void | boolean;
/**
* Saves all selected images in buffer as a multi-page PDF file.
* @param fileName The name to save to.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveSelectedImagesAsMultiPagePDF(
fileName: string,
successCallback?: () => void,
failureCallback?: (errorCode: number, errorString: string) => void
): void | boolean;
/**
* Saves all selected images in buffer as a multi-page TIFF file.
* @param fileName The name to save to.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveSelectedImagesAsMultiPageTIFF(
fileName: string,
successCallback?: () => void,
failureCallback?: (
errorCode: number,
errorString: string) => void
): void | boolean;
/**
* [Deprecation] Return an index from the selected indices array. Read SelectedImagesIndices instead.
* [Alternative] Read SelectedImagesIndices instead.
* @param indexOfIndices Specify the index of the specified image.
*/
SaveSelectedImagesToBase64Binary(indexOfIndices: number): number;
/**
* [Deprecation] Saves the selected images in the buffer to a base64 string.
* [Alternative] Use ConvertToBase64 instead.
* @param successCallback A callback function that is executed if the request succeeds.
* @param failureCallback A callback function that is executed if the request fails.
* @argument result The resulting array of strings.
* @argument errorCode The error code.
* @argument errorString The error string.
*/
SaveSelectedImagesToBase64Binary(
successCallback?: (result: string[]) => void,
failureCallback?: (errorCode: number, errorString: string) => void
): string | boolean;
/**
* Add a custom field to the HTTP Post Form.
* @param name The name of the field.
* @param value The value of the field.
*/
SetHTTPFormField(
name: string,
value: string
): boolean;
/**
* Add a binary file to the HTTP Post Form.
* @param name The name of the field.
* @param content The content of the file.
* @param fileName The name of the file.
*/
SetHTTPFormField(
name: string,
content: Blob,
fileName?: string
): boolean;
/**
* Add a custom header to the HTTP Post Form.
* @param name The name of the field.
* @param value The value of the field.
*/
SetHTTPHeader(
name: string,
value: string
): boolean;
/**
* Clear the content of all custom tiff tags.
* @param id The id of the custom tag.
* @param content The content of the tag.
* @param useBase64Encoding Whether the content is encoded.
*/
SetTiffCustomTag(
id: number,
content: string,
useBase64Encoding: boolean
): boolean;
/**
* Set the segmentation threshold and segment size.
* @param threshold Specify the threshold (in MB).
* @param size Specify the segment size (in KB).
*/
SetUploadSegment(
threshold: number,
size: number
): boolean;
/**
* Show the system's save-file dialog or open-file dialog.
* @param isSave Whether to show a save-file dialog or an open-file dialog
* @param filter The filter pattern like "JPG | *.jpg".
* @param filterIndex The order of the filter. Normally, just put 0.
* @param defaultExtension Extension to be appended to the file name. Only valid in a save-file dialog
* @param initialDirectory The initial directory that the dialog opens.
* @param allowMultiSelect Whether or not multiple files can be selected at the same time. Only valid in an open-file dialog.
* @param showOverwritePrompt Whether or not a prompt shows up when saving a file may overwrite an existing file.
* @param flag If set to 0, bAllowMultiSelect and bShowOverwritePrompt will be effective. Otherwise, these two parameters are ignored.
*/
ShowFileDialog(
isSave: boolean,
filter: string,
filterIndex: number,
defaultExtension: string,
initialDirectory: string,
allowMultiSelect: boolean,
showOverwritePrompt: boolean,
flag: number
): boolean;
/**
* [Deprecation] Set a cookie string into the Http Header to be used when uploading scanned images through POST.
* @param cookie The cookie.
*/
SetCookie(cookie: string): boolean;
}
export interface Base64Result {
/**
* Return the length of the result string.
*/
getLength(): number;
/**
* Return part of the string.
* @param offset The starting position.
* @param length The length of the expected string.
*/
getData(offset: number, length: number): string;
/**
* Return the MD5 value of the result.
*/
getMD5(): string;
}
/**
* Details for each license
*/
export interface LicenseDetailItem {
readonly Browser: string;
readonly EnumLicenseType: string;
readonly ExpireDate: string;
readonly LicenseType: string;
readonly OS: string;
readonly Trial: string;
readonly Version: string;
}
| markogresak/DefinitelyTyped | types/dwt/WebTwain.IO.d.ts | TypeScript | mit | 36,425 |
/*
* Copyright (c) 1997, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.xml.internal.bind.v2.model.core;
import java.util.Map;
import javax.xml.bind.JAXBException;
import javax.xml.bind.annotation.XmlSchema;
import javax.xml.bind.annotation.XmlNsForm;
import javax.xml.namespace.QName;
import javax.xml.transform.Result;
import com.sun.xml.internal.bind.v2.model.nav.Navigator;
/**
* Root of models. Set of {@link TypeInfo}s.
*
* @author Kohsuke Kawaguchi
*/
public interface TypeInfoSet<T,C,F,M> {
/**
* {@link Navigator} for this model.
*/
Navigator<T,C,F,M> getNavigator();
// turns out we can't have AnnotationReader in XJC, so it's impossible to have this here.
// perhaps we should revisit this in the future.
// /**
// * {@link AnnotationReader} for this model.
// */
// AnnotationReader<T,C,F,M> getReader();
/**
* Returns a {@link TypeInfo} for the given type.
*
* @return
* null if the specified type cannot be bound by JAXB, or
* not known to this set.
*/
NonElement<T,C> getTypeInfo( T type );
/**
* Gets the {@link TypeInfo} for the any type.
*/
NonElement<T,C> getAnyTypeInfo();
/**
* Returns a {@link ClassInfo}, {@link ArrayInfo}, or {@link LeafInfo}
* for the given bean.
*
* <p>
* This method is almost like refinement of {@link #getTypeInfo(Object)} except
* our C cannot derive from T.
*
* @return
* null if the specified type is not bound by JAXB or otherwise
* unknown to this set.
*/
NonElement<T,C> getClassInfo( C type );
/**
* Returns all the {@link ArrayInfo}s known to this set.
*/
Map<? extends T,? extends ArrayInfo<T,C>> arrays();
/**
* Returns all the {@link ClassInfo}s known to this set.
*/
Map<C,? extends ClassInfo<T,C>> beans();
/**
* Returns all the {@link BuiltinLeafInfo}s known to this set.
*/
Map<T,? extends BuiltinLeafInfo<T,C>> builtins();
/**
* Returns all the {@link EnumLeafInfo}s known to this set.
*/
Map<C,? extends EnumLeafInfo<T,C>> enums();
/**
* Returns a {@link ElementInfo} for the given element.
*
* @param scope
* if null, return the info about a global element.
* Otherwise return a local element in the given scope if available,
* then look for a global element next.
*/
ElementInfo<T,C> getElementInfo( C scope, QName name );
/**
* Returns a type information for the given reference.
*/
NonElement<T,C> getTypeInfo(Ref<T,C> ref);
/**
* Returns all {@link ElementInfo}s in the given scope.
*
* @param scope
* if non-null, this method only returns the local element mapping.
*/
Map<QName,? extends ElementInfo<T,C>> getElementMappings( C scope );
/**
* Returns all the {@link ElementInfo} known to this set.
*/
Iterable<? extends ElementInfo<T,C>> getAllElements();
/**
* Gets all {@link XmlSchema#xmlns()} found in this context for the given namespace URI.
*
* <p>
* This operation is expected to be only used in schema generator, so it can be slow.
*
* @return
* A map from prefixes to namespace URIs, which should be declared when generating a schema.
* Could be empty but never null.
*/
Map<String,String> getXmlNs(String namespaceUri);
/**
* Gets {@link XmlSchema#location()} found in this context.
*
* <p>
* This operation is expected to be only used in schema generator, so it can be slow.
*
* @return
* A map from namespace URI to the value of the location.
* If the entry is missing, that means a schema should be generated for that namespace.
* If the value is "", that means the schema location is implied
* (<xs:schema namespace="..."/> w/o schemaLocation.)
*/
Map<String,String> getSchemaLocations();
/**
* Gets the reasonable {@link XmlNsForm} for the given namespace URI.
*
* <p>
* The spec doesn't define very precisely what the {@link XmlNsForm} value
* for the given namespace would be, so this method is implemented in rather
* ad-hoc way. It should work as what most people expect for simple cases.
*
* @return never null.
*/
XmlNsForm getElementFormDefault(String nsUri);
/**
* Gets the reasonable {@link XmlNsForm} for the given namespace URI.
*
* <p>
* The spec doesn't define very precisely what the {@link XmlNsForm} value
* for the given namespace would be, so this method is implemented in rather
* ad-hoc way. It should work as what most people expect for simple cases.
*
* @return never null.
*/
XmlNsForm getAttributeFormDefault(String nsUri);
/**
* Dumps this model into XML.
*
* For debug only.
*
* TODO: not sure if this actually works. We don't really know what are T,C.
*/
public void dump( Result out ) throws JAXBException;
}
| rokn/Count_Words_2015 | testing/openjdk2/jaxws/src/share/jaxws_classes/com/sun/xml/internal/bind/v2/model/core/TypeInfoSet.java | Java | mit | 6,284 |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// =+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+=+
//
// DataflowBlockOptions.cs
//
//
// DataflowBlockOptions types for configuring dataflow blocks
//
// =-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-
using System;
using System.Diagnostics;
using System.Threading.Tasks;
namespace System.Threading.Tasks.Dataflow
{
/// <summary>
/// Provides options used to configure the processing performed by dataflow blocks.
/// </summary>
/// <remarks>
/// <see cref="DataflowBlockOptions"/> is mutable and can be configured through its properties.
/// When specific configuration options are not set, the following defaults are used:
/// <list type="table">
/// <listheader>
/// <term>Options</term>
/// <description>Default</description>
/// </listheader>
/// <item>
/// <term>TaskScheduler</term>
/// <description><see cref="System.Threading.Tasks.TaskScheduler.Default"/></description>
/// </item>
/// <item>
/// <term>MaxMessagesPerTask</term>
/// <description>DataflowBlockOptions.Unbounded (-1)</description>
/// </item>
/// <item>
/// <term>CancellationToken</term>
/// <description><see cref="System.Threading.CancellationToken.None"/></description>
/// </item>
/// <item>
/// <term>BoundedCapacity</term>
/// <description>DataflowBlockOptions.Unbounded (-1)</description>
/// </item>
/// <item>
/// <term>NameFormat</term>
/// <description>"{0} Id={1}"</description>
/// </item>
/// <item>
/// <term>EnsureOrdered</term>
/// <description>true</description>
/// </item>
/// </list>
/// Dataflow blocks capture the state of the options at their construction. Subsequent changes
/// to the provided <see cref="DataflowBlockOptions"/> instance should not affect the behavior
/// of a dataflow block.
/// </remarks>
[DebuggerDisplay("TaskScheduler = {TaskScheduler}, MaxMessagesPerTask = {MaxMessagesPerTask}, BoundedCapacity = {BoundedCapacity}")]
public class DataflowBlockOptions
{
/// <summary>
/// A constant used to specify an unlimited quantity for <see cref="DataflowBlockOptions"/> members
/// that provide an upper bound. This field is constant.
/// </summary>
public const int Unbounded = -1;
/// <summary>The scheduler to use for scheduling tasks to process messages.</summary>
private TaskScheduler _taskScheduler = TaskScheduler.Default;
/// <summary>The cancellation token to monitor for cancellation requests.</summary>
private CancellationToken _cancellationToken = CancellationToken.None;
/// <summary>The maximum number of messages that may be processed per task.</summary>
private int _maxMessagesPerTask = Unbounded;
/// <summary>The maximum number of messages that may be buffered by the block.</summary>
private int _boundedCapacity = Unbounded;
/// <summary>The name format to use for creating a name for a block.</summary>
private string _nameFormat = "{0} Id={1}"; // see NameFormat property for a description of format items
/// <summary>Whether to force ordered processing of messages.</summary>
private bool _ensureOrdered = true;
/// <summary>A default instance of <see cref="DataflowBlockOptions"/>.</summary>
/// <remarks>
/// Do not change the values of this instance. It is shared by all of our blocks when no options are provided by the user.
/// </remarks>
internal static readonly DataflowBlockOptions Default = new DataflowBlockOptions();
/// <summary>Returns this <see cref="DataflowBlockOptions"/> instance if it's the default instance or else a cloned instance.</summary>
/// <returns>An instance of the options that may be cached by the block.</returns>
internal DataflowBlockOptions DefaultOrClone()
{
return (this == Default) ?
this :
new DataflowBlockOptions
{
TaskScheduler = this.TaskScheduler,
CancellationToken = this.CancellationToken,
MaxMessagesPerTask = this.MaxMessagesPerTask,
BoundedCapacity = this.BoundedCapacity,
NameFormat = this.NameFormat,
EnsureOrdered = this.EnsureOrdered
};
}
/// <summary>Initializes the <see cref="DataflowBlockOptions"/>.</summary>
public DataflowBlockOptions() { }
/// <summary>Gets or sets the <see cref="System.Threading.Tasks.TaskScheduler"/> to use for scheduling tasks.</summary>
public TaskScheduler TaskScheduler
{
get { return _taskScheduler; }
set
{
Debug.Assert(this != Default, "Default instance is supposed to be immutable.");
if (value == null) throw new ArgumentNullException(nameof(value));
_taskScheduler = value;
}
}
/// <summary>Gets or sets the <see cref="System.Threading.CancellationToken"/> to monitor for cancellation requests.</summary>
public CancellationToken CancellationToken
{
get { return _cancellationToken; }
set
{
Debug.Assert(this != Default, "Default instance is supposed to be immutable.");
_cancellationToken = value;
}
}
/// <summary>Gets or sets the maximum number of messages that may be processed per task.</summary>
public int MaxMessagesPerTask
{
get { return _maxMessagesPerTask; }
set
{
Debug.Assert(this != Default, "Default instance is supposed to be immutable.");
if (value < 1 && value != Unbounded) throw new ArgumentOutOfRangeException(nameof(value));
_maxMessagesPerTask = value;
}
}
/// <summary>Gets a MaxMessagesPerTask value that may be used for comparison purposes.</summary>
/// <returns>The maximum value, usable for comparison purposes.</returns>
/// <remarks>Unlike MaxMessagesPerTask, this property will always return a positive value.</remarks>
internal int ActualMaxMessagesPerTask
{
get { return (_maxMessagesPerTask == Unbounded) ? int.MaxValue : _maxMessagesPerTask; }
}
/// <summary>Gets or sets the maximum number of messages that may be buffered by the block.</summary>
public int BoundedCapacity
{
get { return _boundedCapacity; }
set
{
Debug.Assert(this != Default, "Default instance is supposed to be immutable.");
if (value < 1 && value != Unbounded) throw new ArgumentOutOfRangeException(nameof(value));
_boundedCapacity = value;
}
}
/// <summary>
/// Gets or sets the format string to use when a block is queried for its name.
/// </summary>
/// <remarks>
/// The name format may contain up to two format items. {0} will be substituted
/// with the block's name. {1} will be substituted with the block's Id, as is
/// returned from the block's Completion.Id property.
/// </remarks>
public string NameFormat
{
get { return _nameFormat; }
set
{
Debug.Assert(this != Default, "Default instance is supposed to be immutable.");
if (value == null) throw new ArgumentNullException(nameof(value));
_nameFormat = value;
}
}
/// <summary>Gets or sets whether ordered processing should be enforced on a block's handling of messages.</summary>
/// <remarks>
/// By default, dataflow blocks enforce ordering on the processing of messages. This means that a
/// block like <see cref="TransformBlock{TInput, TOutput}"/> will ensure that messages are output in the same
/// order they were input, even if parallelism is employed by the block and the processing of a message N finishes
/// after the processing of a subsequent message N+1 (the block will reorder the results to maintain the input
/// ordering prior to making those results available to a consumer). Some blocks may allow this to be relaxed,
/// however. Setting <see cref="EnsureOrdered"/> to false tells a block that it may relax this ordering if
/// it's able to do so. This can be beneficial if the immediacy of a processed result being made available
/// is more important than the input-to-output ordering being maintained.
/// </remarks>
public bool EnsureOrdered
{
get { return _ensureOrdered; }
set { _ensureOrdered = value; }
}
}
/// <summary>
/// Provides options used to configure the processing performed by dataflow blocks that
/// process each message through the invocation of a user-provided delegate, blocks such
/// as <see cref="ActionBlock{T}"/> and <see cref="TransformBlock{TInput,TOutput}"/>.
/// </summary>
/// <remarks>
/// <see cref="ExecutionDataflowBlockOptions"/> is mutable and can be configured through its properties.
/// When specific configuration options are not set, the following defaults are used:
/// <list type="table">
/// <listheader>
/// <term>Options</term>
/// <description>Default</description>
/// </listheader>
/// <item>
/// <term>TaskScheduler</term>
/// <description><see cref="System.Threading.Tasks.TaskScheduler.Default"/></description>
/// </item>
/// <item>
/// <term>CancellationToken</term>
/// <description><see cref="System.Threading.CancellationToken.None"/></description>
/// </item>
/// <item>
/// <term>MaxMessagesPerTask</term>
/// <description>DataflowBlockOptions.Unbounded (-1)</description>
/// </item>
/// <item>
/// <term>BoundedCapacity</term>
/// <description>DataflowBlockOptions.Unbounded (-1)</description>
/// </item>
/// <item>
/// <term>NameFormat</term>
/// <description>"{0} Id={1}"</description>
/// </item>
/// <item>
/// <term>EnsureOrdered</term>
/// <description>true</description>
/// </item>
/// <item>
/// <term>MaxDegreeOfParallelism</term>
/// <description>1</description>
/// </item>
/// <item>
/// <term>SingleProducerConstrained</term>
/// <description>false</description>
/// </item>
/// </list>
/// Dataflow block captures the state of the options at their construction. Subsequent changes
/// to the provided <see cref="ExecutionDataflowBlockOptions"/> instance should not affect the behavior
/// of a dataflow block.
/// </remarks>
[DebuggerDisplay("TaskScheduler = {TaskScheduler}, MaxMessagesPerTask = {MaxMessagesPerTask}, BoundedCapacity = {BoundedCapacity}, MaxDegreeOfParallelism = {MaxDegreeOfParallelism}")]
public class ExecutionDataflowBlockOptions : DataflowBlockOptions
{
/// <summary>A default instance of <see cref="DataflowBlockOptions"/>.</summary>
/// <remarks>
/// Do not change the values of this instance. It is shared by all of our blocks when no options are provided by the user.
/// </remarks>
internal static new readonly ExecutionDataflowBlockOptions Default = new ExecutionDataflowBlockOptions();
/// <summary>Returns this <see cref="ExecutionDataflowBlockOptions"/> instance if it's the default instance or else a cloned instance.</summary>
/// <returns>An instance of the options that may be cached by the block.</returns>
internal new ExecutionDataflowBlockOptions DefaultOrClone()
{
return (this == Default) ?
this :
new ExecutionDataflowBlockOptions
{
TaskScheduler = this.TaskScheduler,
CancellationToken = this.CancellationToken,
MaxMessagesPerTask = this.MaxMessagesPerTask,
BoundedCapacity = this.BoundedCapacity,
NameFormat = this.NameFormat,
EnsureOrdered = this.EnsureOrdered,
MaxDegreeOfParallelism = this.MaxDegreeOfParallelism,
SingleProducerConstrained = this.SingleProducerConstrained
};
}
/// <summary>The maximum number of tasks that may be used concurrently to process messages.</summary>
private int _maxDegreeOfParallelism = 1;
/// <summary>Whether the code using this block will only ever have a single producer accessing the block at any given time.</summary>
private bool _singleProducerConstrained = false;
/// <summary>Initializes the <see cref="ExecutionDataflowBlockOptions"/>.</summary>
public ExecutionDataflowBlockOptions() { }
/// <summary>Gets the maximum number of messages that may be processed by the block concurrently.</summary>
public int MaxDegreeOfParallelism
{
get { return _maxDegreeOfParallelism; }
set
{
Debug.Assert(this != Default, "Default instance is supposed to be immutable.");
if (value < 1 && value != Unbounded) throw new ArgumentOutOfRangeException(nameof(value));
_maxDegreeOfParallelism = value;
}
}
/// <summary>
/// Gets whether code using the dataflow block is constrained to one producer at a time.
/// </summary>
/// <remarks>
/// This property defaults to false, such that the block may be used by multiple
/// producers concurrently. This property should only be set to true if the code
/// using the block can guarantee that it will only ever be used by one producer
/// (e.g. a source linked to the block) at a time, meaning that methods like Post,
/// Complete, Fault, and OfferMessage will never be called concurrently. Some blocks
/// may choose to capitalize on the knowledge that there will only be one producer at a time
/// in order to provide better performance.
/// </remarks>
public bool SingleProducerConstrained
{
get { return _singleProducerConstrained; }
set
{
Debug.Assert(this != Default, "Default instance is supposed to be immutable.");
_singleProducerConstrained = value;
}
}
/// <summary>Gets a MaxDegreeOfParallelism value that may be used for comparison purposes.</summary>
/// <returns>The maximum value, usable for comparison purposes.</returns>
/// <remarks>Unlike MaxDegreeOfParallelism, this property will always return a positive value.</remarks>
internal int ActualMaxDegreeOfParallelism
{
get { return (_maxDegreeOfParallelism == Unbounded) ? int.MaxValue : _maxDegreeOfParallelism; }
}
/// <summary>Gets whether these dataflow block options allow for parallel execution.</summary>
internal bool SupportsParallelExecution { get { return _maxDegreeOfParallelism == Unbounded || _maxDegreeOfParallelism > 1; } }
}
/// <summary>
/// Provides options used to configure the processing performed by dataflow blocks that
/// group together multiple messages, blocks such as <see cref="JoinBlock{T1,T2}"/> and
/// <see cref="BatchBlock{T}"/>.
/// </summary>
/// <remarks>
/// <see cref="GroupingDataflowBlockOptions"/> is mutable and can be configured through its properties.
/// When specific configuration options are not set, the following defaults are used:
/// <list type="table">
/// <listheader>
/// <term>Options</term>
/// <description>Default</description>
/// </listheader>
/// <item>
/// <term>TaskScheduler</term>
/// <description><see cref="System.Threading.Tasks.TaskScheduler.Default"/></description>
/// </item>
/// <item>
/// <term>CancellationToken</term>
/// <description><see cref="System.Threading.CancellationToken.None"/></description>
/// </item>
/// <item>
/// <term>MaxMessagesPerTask</term>
/// <description>DataflowBlockOptions.Unbounded (-1)</description>
/// </item>
/// <item>
/// <term>BoundedCapacity</term>
/// <description>DataflowBlockOptions.Unbounded (-1)</description>
/// </item>
/// <item>
/// <term>NameFormat</term>
/// <description>"{0} Id={1}"</description>
/// </item>
/// <item>
/// <term>EnsureOrdered</term>
/// <description>true</description>
/// </item>
/// <item>
/// <term>MaxNumberOfGroups</term>
/// <description>GroupingDataflowBlockOptions.Unbounded (-1)</description>
/// </item>
/// <item>
/// <term>Greedy</term>
/// <description>true</description>
/// </item>
/// </list>
/// Dataflow block capture the state of the options at their construction. Subsequent changes
/// to the provided <see cref="GroupingDataflowBlockOptions"/> instance should not affect the behavior
/// of a dataflow block.
/// </remarks>
[DebuggerDisplay("TaskScheduler = {TaskScheduler}, MaxMessagesPerTask = {MaxMessagesPerTask}, BoundedCapacity = {BoundedCapacity}, Greedy = {Greedy}, MaxNumberOfGroups = {MaxNumberOfGroups}")]
public class GroupingDataflowBlockOptions : DataflowBlockOptions
{
/// <summary>A default instance of <see cref="DataflowBlockOptions"/>.</summary>
/// <remarks>
/// Do not change the values of this instance. It is shared by all of our blocks when no options are provided by the user.
/// </remarks>
internal static new readonly GroupingDataflowBlockOptions Default = new GroupingDataflowBlockOptions();
/// <summary>Returns this <see cref="GroupingDataflowBlockOptions"/> instance if it's the default instance or else a cloned instance.</summary>
/// <returns>An instance of the options that may be cached by the block.</returns>
internal new GroupingDataflowBlockOptions DefaultOrClone()
{
return (this == Default) ?
this :
new GroupingDataflowBlockOptions
{
TaskScheduler = this.TaskScheduler,
CancellationToken = this.CancellationToken,
MaxMessagesPerTask = this.MaxMessagesPerTask,
BoundedCapacity = this.BoundedCapacity,
NameFormat = this.NameFormat,
EnsureOrdered = this.EnsureOrdered,
Greedy = this.Greedy,
MaxNumberOfGroups = this.MaxNumberOfGroups
};
}
/// <summary>Whether the block should greedily consume offered messages.</summary>
private bool _greedy = true;
/// <summary>The maximum number of groups that should be generated by the block.</summary>
private long _maxNumberOfGroups = Unbounded;
/// <summary>Initializes the <see cref="GroupingDataflowBlockOptions"/>.</summary>
public GroupingDataflowBlockOptions() { }
/// <summary>Gets or sets the Boolean value to use to determine whether to greedily consume offered messages.</summary>
public bool Greedy
{
get { return _greedy; }
set
{
Debug.Assert(this != Default, "Default instance is supposed to be immutable.");
_greedy = value;
}
}
/// <summary>Gets or sets the maximum number of groups that should be generated by the block.</summary>
public long MaxNumberOfGroups
{
get { return _maxNumberOfGroups; }
set
{
Debug.Assert(this != Default, "Default instance is supposed to be immutable.");
if (value <= 0 && value != Unbounded) throw new ArgumentOutOfRangeException(nameof(value));
_maxNumberOfGroups = value;
}
}
/// <summary>Gets a MaxNumberOfGroups value that may be used for comparison purposes.</summary>
/// <returns>The maximum value, usable for comparison purposes.</returns>
/// <remarks>Unlike MaxNumberOfGroups, this property will always return a positive value.</remarks>
internal long ActualMaxNumberOfGroups
{
get { return (_maxNumberOfGroups == Unbounded) ? long.MaxValue : _maxNumberOfGroups; }
}
}
}
| shimingsg/corefx | src/System.Threading.Tasks.Dataflow/src/Base/DataflowBlockOptions.cs | C# | mit | 21,643 |
// -*- C++ -*-
// Copyright (C) 2005-2016 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the terms
// of the GNU General Public License as published by the Free Software
// Foundation; either version 3, or (at your option) any later
// version.
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
// Under Section 7 of GPL version 3, you are granted additional
// permissions described in the GCC Runtime Library Exception, version
// 3.1, as published by the Free Software Foundation.
// You should have received a copy of the GNU General Public License and
// a copy of the GCC Runtime Library Exception along with this program;
// see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
// <http://www.gnu.org/licenses/>.
// Copyright (C) 2004 Ami Tavory and Vladimir Dreizin, IBM-HRL.
// Permission to use, copy, modify, sell, and distribute this software
// is hereby granted without fee, provided that the above copyright
// notice appears in all copies, and that both that copyright notice
// and this permission notice appear in supporting documentation. None
// of the above authors, nor IBM Haifa Research Laboratories, make any
// representation about the suitability of this software for any
// purpose. It is provided "as is" without express or implied
// warranty.
/**
* @file gp_hash_table_map_/debug_no_store_hash_fn_imps.hpp
* Contains implementations of gp_ht_map_'s debug-mode functions.
*/
#ifdef _GLIBCXX_DEBUG
PB_DS_CLASS_T_DEC
void
PB_DS_CLASS_C_DEC::
assert_entry_array_valid(const entry_array a_entries, false_type,
const char* __file, int __line) const
{
size_type iterated_num_used_e = 0;
for (size_type pos = 0; pos < m_num_e; ++pos)
{
const_entry_pointer p_e = &a_entries[pos];
switch(p_e->m_stat)
{
case empty_entry_status:
case erased_entry_status:
break;
case valid_entry_status:
{
key_const_reference r_key = PB_DS_V2F(p_e->m_value);
debug_base::check_key_exists(r_key, __file, __line);
++iterated_num_used_e;
break;
}
default:
PB_DS_DEBUG_VERIFY(0);
};
}
PB_DS_DEBUG_VERIFY(iterated_num_used_e == m_num_used_e);
}
#endif
| ChangsoonKim/STM32F7DiscTutor | toolchain/osx/gcc-arm-none-eabi-6-2017-q1-update/arm-none-eabi/include/c++/6.3.1/ext/pb_ds/detail/gp_hash_table_map_/debug_no_store_hash_fn_imps.hpp | C++ | mit | 2,505 |
/*
* Copyright (C) 2015-2018 Team Kodi
* This file is part of Kodi - https://kodi.tv
*
* SPDX-License-Identifier: GPL-2.0-or-later
* See LICENSES/README.md for more information.
*/
#include "GameClientKeyboard.h"
#include "GameClientInput.h"
#include "addons/kodi-dev-kit/include/kodi/addon-instance/Game.h"
#include "games/addons/GameClient.h"
#include "games/addons/GameClientTranslator.h"
#include "input/keyboard/interfaces/IKeyboardInputProvider.h"
#include "utils/log.h"
#include <utility>
using namespace KODI;
using namespace GAME;
#define BUTTON_INDEX_MASK 0x01ff
CGameClientKeyboard::CGameClientKeyboard(CGameClient& gameClient,
std::string controllerId,
KEYBOARD::IKeyboardInputProvider* inputProvider)
: m_gameClient(gameClient),
m_controllerId(std::move(controllerId)),
m_inputProvider(inputProvider)
{
m_inputProvider->RegisterKeyboardHandler(this, false);
}
CGameClientKeyboard::~CGameClientKeyboard()
{
m_inputProvider->UnregisterKeyboardHandler(this);
}
std::string CGameClientKeyboard::ControllerID() const
{
return m_controllerId;
}
bool CGameClientKeyboard::HasKey(const KEYBOARD::KeyName& key) const
{
return m_gameClient.Input().HasFeature(ControllerID(), key);
}
bool CGameClientKeyboard::OnKeyPress(const KEYBOARD::KeyName& key,
KEYBOARD::Modifier mod,
uint32_t unicode)
{
// Only allow activated input in fullscreen game
if (!m_gameClient.Input().AcceptsInput())
{
CLog::Log(LOGDEBUG, "GAME: key press ignored, not in fullscreen game");
return false;
}
game_input_event event;
event.type = GAME_INPUT_EVENT_KEY;
event.controller_id = m_controllerId.c_str();
event.port_type = GAME_PORT_KEYBOARD;
event.port_address = ""; // Not used
event.feature_name = key.c_str();
event.key.pressed = true;
event.key.unicode = unicode;
event.key.modifiers = CGameClientTranslator::GetModifiers(mod);
return m_gameClient.Input().InputEvent(event);
}
void CGameClientKeyboard::OnKeyRelease(const KEYBOARD::KeyName& key,
KEYBOARD::Modifier mod,
uint32_t unicode)
{
game_input_event event;
event.type = GAME_INPUT_EVENT_KEY;
event.controller_id = m_controllerId.c_str();
event.port_type = GAME_PORT_KEYBOARD;
event.port_address = ""; // Not used
event.feature_name = key.c_str();
event.key.pressed = false;
event.key.unicode = unicode;
event.key.modifiers = CGameClientTranslator::GetModifiers(mod);
m_gameClient.Input().InputEvent(event);
}
| scbash/xbmc | xbmc/games/addons/input/GameClientKeyboard.cpp | C++ | gpl-2.0 | 2,683 |
class AddMetaCategory < ActiveRecord::Migration
def up
unless Rails.env.test?
result = Category.exec_sql "SELECT 1 FROM site_settings where name = 'meta_category_id'"
if result.count == 0
description = I18n.t('meta_category_description')
name = I18n.t('meta_category_name')
if Category.exec_sql("SELECT 1 FROM categories where name ilike :name", name: name).count == 0
result = Category.exec_sql "INSERT INTO categories
(name, color, text_color, created_at, updated_at, user_id, slug, description, read_restricted, position)
VALUES (:name, '808281', 'FFFFFF', now(), now(), -1, :slug, :description, true, 1)
RETURNING id", name: name, slug: Slug.for(name), description: description
category_id = result[0]["id"].to_i
execute "INSERT INTO site_settings(name, data_type, value, created_at, updated_at)
VALUES ('meta_category_id', 3, #{category_id}, now(), now())"
end
end
end
end
def down
# Don't reverse this change. There is so much logic around deleting a category that it's messy
# to try to do in sql. The up method will just make sure never to create the category twice.
end
end
| fs/open-core | spec/dummy/db/migrate/20140122043508_add_meta_category.rb | Ruby | gpl-2.0 | 1,275 |
if (ign.ignited())
{
volScalarField& heau = thermo.heu();
fvScalarMatrix heauEqn
(
fvm::ddt(rho, heau) + mvConvection->fvmDiv(phi, heau)
+ (fvc::ddt(rho, K) + fvc::div(phi, K))*rho/thermo.rhou()
+ (
heau.name() == "eau"
? fvc::div
(
fvc::absolute(phi/fvc::interpolate(rho), U),
p,
"div(phiv,p)"
)*rho/thermo.rhou()
: -dpdt*rho/thermo.rhou()
)
- fvm::laplacian(turbulence->alphaEff(), heau)
// These terms cannot be used in partially-premixed combustion due to
// the resultant inconsistency between ft and heau transport.
// A possible solution would be to solve for ftu as well as ft.
//- fvm::div(muEff*fvc::grad(b)/(b + 0.001), heau)
//+ fvm::Sp(fvc::div(muEff*fvc::grad(b)/(b + 0.001)), heau)
==
fvOptions(rho, heau)
);
fvOptions.constrain(heauEqn);
heauEqn.solve();
fvOptions.correct(heau);
}
| adrcad/OpenFOAM-2.3.x | applications/solvers/combustion/XiFoam/EauEqn.H | C++ | gpl-3.0 | 1,023 |
/*
MIT License http://www.opensource.org/licenses/mit-license.php
Author Tobias Koppers @sokra
*/
"use strict";
const Hook = require("./Hook");
const HookCodeFactory = require("./HookCodeFactory");
class AsyncSeriesWaterfallHookCodeFactory extends HookCodeFactory {
content({ onError, onResult, onDone }) {
return this.callTapsSeries({
onError: (i, err, next, doneBreak) => onError(err) + doneBreak(true),
onResult: (i, result, next) => {
let code = "";
code += `if(${result} !== undefined) {\n`;
code += `${this._args[0]} = ${result};\n`;
code += `}\n`;
code += next();
return code;
},
onDone: () => onResult(this._args[0])
});
}
}
const factory = new AsyncSeriesWaterfallHookCodeFactory();
class AsyncSeriesWaterfallHook extends Hook {
constructor(args) {
super(args);
if (args.length < 1)
throw new Error("Waterfall hooks must have at least one argument");
}
compile(options) {
factory.setup(this, options);
return factory.create(options);
}
}
Object.defineProperties(AsyncSeriesWaterfallHook.prototype, {
_call: { value: undefined, configurable: true, writable: true }
});
module.exports = AsyncSeriesWaterfallHook;
| convox/convox.github.io | webpack/node_modules/tapable/lib/AsyncSeriesWaterfallHook.js | JavaScript | apache-2.0 | 1,187 |
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package set
import (
"github.com/spf13/cobra"
"k8s.io/cli-runtime/pkg/genericclioptions"
cmdutil "k8s.io/kubectl/pkg/cmd/util"
"k8s.io/kubectl/pkg/util/i18n"
"k8s.io/kubectl/pkg/util/templates"
)
var (
setLong = templates.LongDesc(i18n.T(`
Configure application resources
These commands help you make changes to existing application resources.`))
)
// NewCmdSet returns an initialized Command instance for 'set' sub command
func NewCmdSet(f cmdutil.Factory, streams genericclioptions.IOStreams) *cobra.Command {
cmd := &cobra.Command{
Use: "set SUBCOMMAND",
DisableFlagsInUseLine: true,
Short: i18n.T("Set specific features on objects"),
Long: setLong,
Run: cmdutil.DefaultSubCommandRun(streams.ErrOut),
}
// add subcommands
cmd.AddCommand(NewCmdImage(f, streams))
cmd.AddCommand(NewCmdResources(f, streams))
cmd.AddCommand(NewCmdSelector(f, streams))
cmd.AddCommand(NewCmdSubject(f, streams))
cmd.AddCommand(NewCmdServiceAccount(f, streams))
cmd.AddCommand(NewCmdEnv(f, streams))
return cmd
}
| fgimenez/kubernetes | staging/src/k8s.io/kubectl/pkg/cmd/set/set.go | GO | apache-2.0 | 1,670 |
/*
Copyright 2016 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package statefulset
import (
"reflect"
"sort"
"testing"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/tools/cache"
"k8s.io/kubernetes/pkg/api/v1"
apps "k8s.io/kubernetes/pkg/apis/apps/v1beta1"
"k8s.io/kubernetes/pkg/client/clientset_generated/clientset/fake"
informers "k8s.io/kubernetes/pkg/client/informers/informers_generated/externalversions"
"k8s.io/kubernetes/pkg/controller"
)
func alwaysReady() bool { return true }
func TestStatefulSetControllerCreates(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
if err := scaleUpStatefulSetController(set, ssc, spc); err != nil {
t.Errorf("Failed to turn up StatefulSet : %s", err)
}
if obj, _, err := spc.setsIndexer.Get(set); err != nil {
t.Error(err)
} else {
set = obj.(*apps.StatefulSet)
}
if set.Status.Replicas != 3 {
t.Error("Falied to scale statefulset to 3 replicas")
}
}
func TestStatefulSetControllerDeletes(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
if err := scaleUpStatefulSetController(set, ssc, spc); err != nil {
t.Errorf("Failed to turn up StatefulSet : %s", err)
}
if obj, _, err := spc.setsIndexer.Get(set); err != nil {
t.Error(err)
} else {
set = obj.(*apps.StatefulSet)
}
if set.Status.Replicas != 3 {
t.Error("Falied to scale statefulset to 3 replicas")
}
*set.Spec.Replicas = 0
if err := scaleDownStatefulSetController(set, ssc, spc); err != nil {
t.Errorf("Failed to turn down StatefulSet : %s", err)
}
if obj, _, err := spc.setsIndexer.Get(set); err != nil {
t.Error(err)
} else {
set = obj.(*apps.StatefulSet)
}
if set.Status.Replicas != 0 {
t.Error("Falied to scale statefulset to 3 replicas")
}
}
func TestStatefulSetControllerRespectsTermination(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
if err := scaleUpStatefulSetController(set, ssc, spc); err != nil {
t.Errorf("Failed to turn up StatefulSet : %s", err)
}
if obj, _, err := spc.setsIndexer.Get(set); err != nil {
t.Error(err)
} else {
set = obj.(*apps.StatefulSet)
}
if set.Status.Replicas != 3 {
t.Error("Falied to scale statefulset to 3 replicas")
}
pods, err := spc.addTerminatedPod(set, 3)
if err != nil {
t.Error(err)
}
pods, err = spc.addTerminatedPod(set, 4)
if err != nil {
t.Error(err)
}
ssc.syncStatefulSet(set, pods)
selector, err := metav1.LabelSelectorAsSelector(set.Spec.Selector)
if err != nil {
t.Error(err)
}
pods, err = spc.podsLister.Pods(set.Namespace).List(selector)
if err != nil {
t.Error(err)
}
if len(pods) != 5 {
t.Error("StatefulSet does not respect termination")
}
sort.Sort(ascendingOrdinal(pods))
spc.DeleteStatefulPod(set, pods[3])
spc.DeleteStatefulPod(set, pods[4])
*set.Spec.Replicas = 0
if err := scaleDownStatefulSetController(set, ssc, spc); err != nil {
t.Errorf("Failed to turn down StatefulSet : %s", err)
}
if obj, _, err := spc.setsIndexer.Get(set); err != nil {
t.Error(err)
} else {
set = obj.(*apps.StatefulSet)
}
if set.Status.Replicas != 0 {
t.Error("Falied to scale statefulset to 3 replicas")
}
}
func TestStatefulSetControllerBlocksScaling(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
if err := scaleUpStatefulSetController(set, ssc, spc); err != nil {
t.Errorf("Failed to turn up StatefulSet : %s", err)
}
if obj, _, err := spc.setsIndexer.Get(set); err != nil {
t.Error(err)
} else {
set = obj.(*apps.StatefulSet)
}
if set.Status.Replicas != 3 {
t.Error("Falied to scale statefulset to 3 replicas")
}
*set.Spec.Replicas = 5
fakeResourceVersion(set)
spc.setsIndexer.Update(set)
pods, err := spc.setPodTerminated(set, 0)
if err != nil {
t.Error("Failed to set pod terminated at ordinal 0")
}
ssc.enqueueStatefulSet(set)
fakeWorker(ssc)
selector, err := metav1.LabelSelectorAsSelector(set.Spec.Selector)
if err != nil {
t.Error(err)
}
pods, err = spc.podsLister.Pods(set.Namespace).List(selector)
if err != nil {
t.Error(err)
}
if len(pods) != 3 {
t.Error("StatefulSet does not block scaling")
}
sort.Sort(ascendingOrdinal(pods))
spc.DeleteStatefulPod(set, pods[0])
ssc.enqueueStatefulSet(set)
fakeWorker(ssc)
pods, err = spc.podsLister.Pods(set.Namespace).List(selector)
if err != nil {
t.Error(err)
}
if len(pods) != 3 {
t.Error("StatefulSet does not resume when terminated Pod is removed")
}
}
func TestStatefulSetControllerAddPod(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set1 := newStatefulSet(3)
set2 := newStatefulSet(3)
pod1 := newStatefulSetPod(set1, 0)
pod2 := newStatefulSetPod(set2, 0)
spc.setsIndexer.Add(set1)
spc.setsIndexer.Add(set2)
ssc.addPod(pod1)
key, done := ssc.queue.Get()
if key == nil || done {
t.Error("failed to enqueue StatefulSet")
} else if key, ok := key.(string); !ok {
t.Error("key is not a string")
} else if expectedKey, _ := controller.KeyFunc(set1); expectedKey != key {
t.Errorf("expected StatefulSet key %s found %s", expectedKey, key)
}
ssc.queue.Done(key)
ssc.addPod(pod2)
key, done = ssc.queue.Get()
if key == nil || done {
t.Error("failed to enqueue StatefulSet")
} else if key, ok := key.(string); !ok {
t.Error("key is not a string")
} else if expectedKey, _ := controller.KeyFunc(set2); expectedKey != key {
t.Errorf("expected StatefulSet key %s found %s", expectedKey, key)
}
ssc.queue.Done(key)
}
func TestStatefulSetControllerAddPodOrphan(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set1 := newStatefulSet(3)
set2 := newStatefulSet(3)
set2.Name = "foo2"
set3 := newStatefulSet(3)
set3.Name = "foo3"
set3.Spec.Selector.MatchLabels = map[string]string{"foo3": "bar"}
pod := newStatefulSetPod(set1, 0)
spc.setsIndexer.Add(set1)
spc.setsIndexer.Add(set2)
spc.setsIndexer.Add(set3)
// Make pod an orphan. Expect matching sets to be queued.
pod.OwnerReferences = nil
ssc.addPod(pod)
if got, want := ssc.queue.Len(), 2; got != want {
t.Errorf("queue.Len() = %v, want %v", got, want)
}
}
func TestStatefulSetControllerAddPodNoSet(t *testing.T) {
ssc, _ := newFakeStatefulSetController()
set := newStatefulSet(3)
pod := newStatefulSetPod(set, 0)
ssc.addPod(pod)
ssc.queue.ShutDown()
key, _ := ssc.queue.Get()
if key != nil {
t.Errorf("StatefulSet enqueued key for Pod with no Set %s", key)
}
}
func TestStatefulSetControllerUpdatePod(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set1 := newStatefulSet(3)
set2 := newStatefulSet(3)
set2.Name = "foo2"
pod1 := newStatefulSetPod(set1, 0)
pod2 := newStatefulSetPod(set2, 0)
spc.setsIndexer.Add(set1)
spc.setsIndexer.Add(set2)
prev := *pod1
fakeResourceVersion(pod1)
ssc.updatePod(&prev, pod1)
key, done := ssc.queue.Get()
if key == nil || done {
t.Error("failed to enqueue StatefulSet")
} else if key, ok := key.(string); !ok {
t.Error("key is not a string")
} else if expectedKey, _ := controller.KeyFunc(set1); expectedKey != key {
t.Errorf("expected StatefulSet key %s found %s", expectedKey, key)
}
prev = *pod2
fakeResourceVersion(pod2)
ssc.updatePod(&prev, pod2)
key, done = ssc.queue.Get()
if key == nil || done {
t.Error("failed to enqueue StatefulSet")
} else if key, ok := key.(string); !ok {
t.Error("key is not a string")
} else if expectedKey, _ := controller.KeyFunc(set2); expectedKey != key {
t.Errorf("expected StatefulSet key %s found %s", expectedKey, key)
}
}
func TestStatefulSetControllerUpdatePodWithNoSet(t *testing.T) {
ssc, _ := newFakeStatefulSetController()
set := newStatefulSet(3)
pod := newStatefulSetPod(set, 0)
prev := *pod
fakeResourceVersion(pod)
ssc.updatePod(&prev, pod)
ssc.queue.ShutDown()
key, _ := ssc.queue.Get()
if key != nil {
t.Errorf("StatefulSet enqueued key for Pod with no Set %s", key)
}
}
func TestStatefulSetControllerUpdatePodWithSameVersion(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
pod := newStatefulSetPod(set, 0)
spc.setsIndexer.Add(set)
ssc.updatePod(pod, pod)
ssc.queue.ShutDown()
key, _ := ssc.queue.Get()
if key != nil {
t.Errorf("StatefulSet enqueued key for Pod with no Set %s", key)
}
}
func TestStatefulSetControllerUpdatePodOrphanWithNewLabels(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
pod := newStatefulSetPod(set, 0)
pod.OwnerReferences = nil
set2 := newStatefulSet(3)
set2.Name = "foo2"
spc.setsIndexer.Add(set)
spc.setsIndexer.Add(set2)
clone := *pod
clone.Labels = map[string]string{"foo2": "bar2"}
fakeResourceVersion(&clone)
ssc.updatePod(&clone, pod)
if got, want := ssc.queue.Len(), 2; got != want {
t.Errorf("queue.Len() = %v, want %v", got, want)
}
}
func TestStatefulSetControllerUpdatePodChangeControllerRef(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
set2 := newStatefulSet(3)
set2.Name = "foo2"
pod := newStatefulSetPod(set, 0)
pod2 := newStatefulSetPod(set2, 0)
spc.setsIndexer.Add(set)
spc.setsIndexer.Add(set2)
clone := *pod
clone.OwnerReferences = pod2.OwnerReferences
fakeResourceVersion(&clone)
ssc.updatePod(&clone, pod)
if got, want := ssc.queue.Len(), 2; got != want {
t.Errorf("queue.Len() = %v, want %v", got, want)
}
}
func TestStatefulSetControllerUpdatePodRelease(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
set2 := newStatefulSet(3)
set2.Name = "foo2"
pod := newStatefulSetPod(set, 0)
spc.setsIndexer.Add(set)
spc.setsIndexer.Add(set2)
clone := *pod
clone.OwnerReferences = nil
fakeResourceVersion(&clone)
ssc.updatePod(pod, &clone)
if got, want := ssc.queue.Len(), 2; got != want {
t.Errorf("queue.Len() = %v, want %v", got, want)
}
}
func TestStatefulSetControllerDeletePod(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set1 := newStatefulSet(3)
set2 := newStatefulSet(3)
set2.Name = "foo2"
pod1 := newStatefulSetPod(set1, 0)
pod2 := newStatefulSetPod(set2, 0)
spc.setsIndexer.Add(set1)
spc.setsIndexer.Add(set2)
ssc.deletePod(pod1)
key, done := ssc.queue.Get()
if key == nil || done {
t.Error("failed to enqueue StatefulSet")
} else if key, ok := key.(string); !ok {
t.Error("key is not a string")
} else if expectedKey, _ := controller.KeyFunc(set1); expectedKey != key {
t.Errorf("expected StatefulSet key %s found %s", expectedKey, key)
}
ssc.deletePod(pod2)
key, done = ssc.queue.Get()
if key == nil || done {
t.Error("failed to enqueue StatefulSet")
} else if key, ok := key.(string); !ok {
t.Error("key is not a string")
} else if expectedKey, _ := controller.KeyFunc(set2); expectedKey != key {
t.Errorf("expected StatefulSet key %s found %s", expectedKey, key)
}
}
func TestStatefulSetControllerDeletePodOrphan(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set1 := newStatefulSet(3)
set2 := newStatefulSet(3)
set2.Name = "foo2"
pod1 := newStatefulSetPod(set1, 0)
spc.setsIndexer.Add(set1)
spc.setsIndexer.Add(set2)
pod1.OwnerReferences = nil
ssc.deletePod(pod1)
if got, want := ssc.queue.Len(), 0; got != want {
t.Errorf("queue.Len() = %v, want %v", got, want)
}
}
func TestStatefulSetControllerDeletePodTombstone(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
pod := newStatefulSetPod(set, 0)
spc.setsIndexer.Add(set)
tombstoneKey, _ := controller.KeyFunc(pod)
tombstone := cache.DeletedFinalStateUnknown{Key: tombstoneKey, Obj: pod}
ssc.deletePod(tombstone)
key, done := ssc.queue.Get()
if key == nil || done {
t.Error("failed to enqueue StatefulSet")
} else if key, ok := key.(string); !ok {
t.Error("key is not a string")
} else if expectedKey, _ := controller.KeyFunc(set); expectedKey != key {
t.Errorf("expected StatefulSet key %s found %s", expectedKey, key)
}
}
func TestStatefulSetControllerGetStatefulSetsForPod(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set1 := newStatefulSet(3)
set2 := newStatefulSet(3)
set2.Name = "foo2"
pod := newStatefulSetPod(set1, 0)
spc.setsIndexer.Add(set1)
spc.setsIndexer.Add(set2)
spc.podsIndexer.Add(pod)
sets := ssc.getStatefulSetsForPod(pod)
if got, want := len(sets), 2; got != want {
t.Errorf("len(sets) = %v, want %v", got, want)
}
}
func TestGetPodsForStatefulSetAdopt(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(5)
pod1 := newStatefulSetPod(set, 1)
// pod2 is an orphan with matching labels and name.
pod2 := newStatefulSetPod(set, 2)
pod2.OwnerReferences = nil
// pod3 has wrong labels.
pod3 := newStatefulSetPod(set, 3)
pod3.OwnerReferences = nil
pod3.Labels = nil
// pod4 has wrong name.
pod4 := newStatefulSetPod(set, 4)
pod4.OwnerReferences = nil
pod4.Name = "x" + pod4.Name
spc.podsIndexer.Add(pod1)
spc.podsIndexer.Add(pod2)
spc.podsIndexer.Add(pod3)
spc.podsIndexer.Add(pod4)
selector, err := metav1.LabelSelectorAsSelector(set.Spec.Selector)
if err != nil {
t.Fatal(err)
}
pods, err := ssc.getPodsForStatefulSet(set, selector)
if err != nil {
t.Fatalf("getPodsForStatefulSet() error: %v", err)
}
var got []string
for _, pod := range pods {
got = append(got, pod.Name)
}
// pod2 should be claimed, pod3 and pod4 ignored
want := []string{pod1.Name, pod2.Name}
sort.Strings(got)
sort.Strings(want)
if !reflect.DeepEqual(got, want) {
t.Errorf("getPodsForStatefulSet() = %v, want %v", got, want)
}
}
func TestGetPodsForStatefulSetRelease(t *testing.T) {
ssc, spc := newFakeStatefulSetController()
set := newStatefulSet(3)
pod1 := newStatefulSetPod(set, 1)
// pod2 is owned but has wrong name.
pod2 := newStatefulSetPod(set, 2)
pod2.Name = "x" + pod2.Name
// pod3 is owned but has wrong labels.
pod3 := newStatefulSetPod(set, 3)
pod3.Labels = nil
// pod4 is an orphan that doesn't match.
pod4 := newStatefulSetPod(set, 4)
pod4.OwnerReferences = nil
pod4.Labels = nil
spc.podsIndexer.Add(pod1)
spc.podsIndexer.Add(pod2)
spc.podsIndexer.Add(pod3)
selector, err := metav1.LabelSelectorAsSelector(set.Spec.Selector)
if err != nil {
t.Fatal(err)
}
pods, err := ssc.getPodsForStatefulSet(set, selector)
if err != nil {
t.Fatalf("getPodsForStatefulSet() error: %v", err)
}
var got []string
for _, pod := range pods {
got = append(got, pod.Name)
}
// Expect only pod1 (pod2 and pod3 should be released, pod4 ignored).
want := []string{pod1.Name}
sort.Strings(got)
sort.Strings(want)
if !reflect.DeepEqual(got, want) {
t.Errorf("getPodsForStatefulSet() = %v, want %v", got, want)
}
}
func newFakeStatefulSetController() (*StatefulSetController, *fakeStatefulPodControl) {
client := fake.NewSimpleClientset()
informerFactory := informers.NewSharedInformerFactory(client, controller.NoResyncPeriodFunc())
fpc := newFakeStatefulPodControl(informerFactory.Core().V1().Pods(), informerFactory.Apps().V1beta1().StatefulSets())
ssc := NewStatefulSetController(
informerFactory.Core().V1().Pods(),
informerFactory.Apps().V1beta1().StatefulSets(),
informerFactory.Core().V1().PersistentVolumeClaims(),
client,
)
ssc.podListerSynced = alwaysReady
ssc.setListerSynced = alwaysReady
ssc.control = NewDefaultStatefulSetControl(fpc)
return ssc, fpc
}
func fakeWorker(ssc *StatefulSetController) {
if obj, done := ssc.queue.Get(); !done {
ssc.sync(obj.(string))
ssc.queue.Done(obj)
}
}
func getPodAtOrdinal(pods []*v1.Pod, ordinal int) *v1.Pod {
if 0 > ordinal || ordinal >= len(pods) {
return nil
}
sort.Sort(ascendingOrdinal(pods))
return pods[ordinal]
}
func scaleUpStatefulSetController(set *apps.StatefulSet, ssc *StatefulSetController, spc *fakeStatefulPodControl) error {
spc.setsIndexer.Add(set)
ssc.enqueueStatefulSet(set)
fakeWorker(ssc)
selector, err := metav1.LabelSelectorAsSelector(set.Spec.Selector)
if err != nil {
return err
}
for set.Status.Replicas < *set.Spec.Replicas {
pods, err := spc.podsLister.Pods(set.Namespace).List(selector)
ord := len(pods) - 1
pod := getPodAtOrdinal(pods, ord)
if pods, err = spc.setPodPending(set, ord); err != nil {
return err
}
pod = getPodAtOrdinal(pods, ord)
ssc.addPod(pod)
fakeWorker(ssc)
pod = getPodAtOrdinal(pods, ord)
prev := *pod
if pods, err = spc.setPodRunning(set, ord); err != nil {
return err
}
pod = getPodAtOrdinal(pods, ord)
ssc.updatePod(&prev, pod)
fakeWorker(ssc)
pod = getPodAtOrdinal(pods, ord)
prev = *pod
if pods, err = spc.setPodReady(set, ord); err != nil {
return err
}
pod = getPodAtOrdinal(pods, ord)
ssc.updatePod(&prev, pod)
fakeWorker(ssc)
if err := assertInvariants(set, spc); err != nil {
return err
}
if obj, _, err := spc.setsIndexer.Get(set); err != nil {
return err
} else {
set = obj.(*apps.StatefulSet)
}
}
return assertInvariants(set, spc)
}
func scaleDownStatefulSetController(set *apps.StatefulSet, ssc *StatefulSetController, spc *fakeStatefulPodControl) error {
selector, err := metav1.LabelSelectorAsSelector(set.Spec.Selector)
if err != nil {
return err
}
pods, err := spc.podsLister.Pods(set.Namespace).List(selector)
if err != nil {
return err
}
ord := len(pods) - 1
pod := getPodAtOrdinal(pods, ord)
prev := *pod
fakeResourceVersion(set)
spc.setsIndexer.Add(set)
ssc.enqueueStatefulSet(set)
fakeWorker(ssc)
pods, err = spc.addTerminatedPod(set, ord)
pod = getPodAtOrdinal(pods, ord)
ssc.updatePod(&prev, pod)
fakeWorker(ssc)
spc.DeleteStatefulPod(set, pod)
ssc.deletePod(pod)
fakeWorker(ssc)
for set.Status.Replicas > *set.Spec.Replicas {
pods, err = spc.podsLister.Pods(set.Namespace).List(selector)
ord := len(pods)
pods, err = spc.addTerminatedPod(set, ord)
pod = getPodAtOrdinal(pods, ord)
ssc.updatePod(&prev, pod)
fakeWorker(ssc)
spc.DeleteStatefulPod(set, pod)
ssc.deletePod(pod)
fakeWorker(ssc)
if obj, _, err := spc.setsIndexer.Get(set); err != nil {
return err
} else {
set = obj.(*apps.StatefulSet)
}
}
return assertInvariants(set, spc)
}
| sp-borja-juncosa/kops | vendor/k8s.io/kubernetes/pkg/controller/statefulset/stateful_set_test.go | GO | apache-2.0 | 18,539 |
import os
import sys
import shutil
import tempfile
import contextlib
from ._compat import iteritems, PY2
# If someone wants to vendor click, we want to ensure the
# correct package is discovered. Ideally we could use a
# relative import here but unfortunately Python does not
# support that.
clickpkg = sys.modules[__name__.rsplit('.', 1)[0]]
if PY2:
from cStringIO import StringIO
else:
import io
from ._compat import _find_binary_reader
class EchoingStdin(object):
def __init__(self, input, output):
self._input = input
self._output = output
def __getattr__(self, x):
return getattr(self._input, x)
def _echo(self, rv):
self._output.write(rv)
return rv
def read(self, n=-1):
return self._echo(self._input.read(n))
def readline(self, n=-1):
return self._echo(self._input.readline(n))
def readlines(self):
return [self._echo(x) for x in self._input.readlines()]
def __iter__(self):
return iter(self._echo(x) for x in self._input)
def __repr__(self):
return repr(self._input)
def make_input_stream(input, charset):
# Is already an input stream.
if hasattr(input, 'read'):
if PY2:
return input
rv = _find_binary_reader(input)
if rv is not None:
return rv
raise TypeError('Could not find binary reader for input stream.')
if input is None:
input = b''
elif not isinstance(input, bytes):
input = input.encode(charset)
if PY2:
return StringIO(input)
return io.BytesIO(input)
class Result(object):
"""Holds the captured result of an invoked CLI script."""
def __init__(self, runner, output_bytes, exit_code, exception,
exc_info=None):
#: The runner that created the result
self.runner = runner
#: The output as bytes.
self.output_bytes = output_bytes
#: The exit code as integer.
self.exit_code = exit_code
#: The exception that happend if one did.
self.exception = exception
#: The traceback
self.exc_info = exc_info
@property
def output(self):
"""The output as unicode string."""
return self.output_bytes.decode(self.runner.charset, 'replace') \
.replace('\r\n', '\n')
def __repr__(self):
return '<Result %s>' % (
self.exception and repr(self.exception) or 'okay',
)
class CliRunner(object):
"""The CLI runner provides functionality to invoke a Click command line
script for unittesting purposes in a isolated environment. This only
works in single-threaded systems without any concurrency as it changes the
global interpreter state.
:param charset: the character set for the input and output data. This is
UTF-8 by default and should not be changed currently as
the reporting to Click only works in Python 2 properly.
:param env: a dictionary with environment variables for overriding.
:param echo_stdin: if this is set to `True`, then reading from stdin writes
to stdout. This is useful for showing examples in
some circumstances. Note that regular prompts
will automatically echo the input.
"""
def __init__(self, charset=None, env=None, echo_stdin=False):
if charset is None:
charset = 'utf-8'
self.charset = charset
self.env = env or {}
self.echo_stdin = echo_stdin
def get_default_prog_name(self, cli):
"""Given a command object it will return the default program name
for it. The default is the `name` attribute or ``"root"`` if not
set.
"""
return cli.name or 'root'
def make_env(self, overrides=None):
"""Returns the environment overrides for invoking a script."""
rv = dict(self.env)
if overrides:
rv.update(overrides)
return rv
@contextlib.contextmanager
def isolation(self, input=None, env=None, color=False):
"""A context manager that sets up the isolation for invoking of a
command line tool. This sets up stdin with the given input data
and `os.environ` with the overrides from the given dictionary.
This also rebinds some internals in Click to be mocked (like the
prompt functionality).
This is automatically done in the :meth:`invoke` method.
.. versionadded:: 4.0
The ``color`` parameter was added.
:param input: the input stream to put into sys.stdin.
:param env: the environment overrides as dictionary.
:param color: whether the output should contain color codes. The
application can still override this explicitly.
"""
input = make_input_stream(input, self.charset)
old_stdin = sys.stdin
old_stdout = sys.stdout
old_stderr = sys.stderr
env = self.make_env(env)
if PY2:
sys.stdout = sys.stderr = bytes_output = StringIO()
if self.echo_stdin:
input = EchoingStdin(input, bytes_output)
else:
bytes_output = io.BytesIO()
if self.echo_stdin:
input = EchoingStdin(input, bytes_output)
input = io.TextIOWrapper(input, encoding=self.charset)
sys.stdout = sys.stderr = io.TextIOWrapper(
bytes_output, encoding=self.charset)
sys.stdin = input
def visible_input(prompt=None):
sys.stdout.write(prompt or '')
val = input.readline().rstrip('\r\n')
sys.stdout.write(val + '\n')
sys.stdout.flush()
return val
def hidden_input(prompt=None):
sys.stdout.write((prompt or '') + '\n')
sys.stdout.flush()
return input.readline().rstrip('\r\n')
def _getchar(echo):
char = sys.stdin.read(1)
if echo:
sys.stdout.write(char)
sys.stdout.flush()
return char
default_color = color
def should_strip_ansi(stream=None, color=None):
if color is None:
return not default_color
return not color
old_visible_prompt_func = clickpkg.termui.visible_prompt_func
old_hidden_prompt_func = clickpkg.termui.hidden_prompt_func
old__getchar_func = clickpkg.termui._getchar
old_should_strip_ansi = clickpkg.utils.should_strip_ansi
clickpkg.termui.visible_prompt_func = visible_input
clickpkg.termui.hidden_prompt_func = hidden_input
clickpkg.termui._getchar = _getchar
clickpkg.utils.should_strip_ansi = should_strip_ansi
old_env = {}
try:
for key, value in iteritems(env):
old_env[key] = os.environ.get(value)
if value is None:
try:
del os.environ[key]
except Exception:
pass
else:
os.environ[key] = value
yield bytes_output
finally:
for key, value in iteritems(old_env):
if value is None:
try:
del os.environ[key]
except Exception:
pass
else:
os.environ[key] = value
sys.stdout = old_stdout
sys.stderr = old_stderr
sys.stdin = old_stdin
clickpkg.termui.visible_prompt_func = old_visible_prompt_func
clickpkg.termui.hidden_prompt_func = old_hidden_prompt_func
clickpkg.termui._getchar = old__getchar_func
clickpkg.utils.should_strip_ansi = old_should_strip_ansi
def invoke(self, cli, args=None, input=None, env=None,
catch_exceptions=True, color=False, **extra):
"""Invokes a command in an isolated environment. The arguments are
forwarded directly to the command line script, the `extra` keyword
arguments are passed to the :meth:`~clickpkg.Command.main` function of
the command.
This returns a :class:`Result` object.
.. versionadded:: 3.0
The ``catch_exceptions`` parameter was added.
.. versionchanged:: 3.0
The result object now has an `exc_info` attribute with the
traceback if available.
.. versionadded:: 4.0
The ``color`` parameter was added.
:param cli: the command to invoke
:param args: the arguments to invoke
:param input: the input data for `sys.stdin`.
:param env: the environment overrides.
:param catch_exceptions: Whether to catch any other exceptions than
``SystemExit``.
:param extra: the keyword arguments to pass to :meth:`main`.
:param color: whether the output should contain color codes. The
application can still override this explicitly.
"""
exc_info = None
with self.isolation(input=input, env=env, color=color) as out:
exception = None
exit_code = 0
try:
cli.main(args=args or (),
prog_name=self.get_default_prog_name(cli), **extra)
except SystemExit as e:
if e.code != 0:
exception = e
exc_info = sys.exc_info()
exit_code = e.code
if not isinstance(exit_code, int):
sys.stdout.write(str(exit_code))
sys.stdout.write('\n')
exit_code = 1
except Exception as e:
if not catch_exceptions:
raise
exception = e
exit_code = -1
exc_info = sys.exc_info()
finally:
sys.stdout.flush()
output = out.getvalue()
return Result(runner=self,
output_bytes=output,
exit_code=exit_code,
exception=exception,
exc_info=exc_info)
@contextlib.contextmanager
def isolated_filesystem(self):
"""A context manager that creates a temporary folder and changes
the current working directory to it for isolated filesystem tests.
"""
cwd = os.getcwd()
t = tempfile.mkdtemp()
os.chdir(t)
try:
yield t
finally:
os.chdir(cwd)
try:
shutil.rmtree(t)
except (OSError, IOError):
pass
| gameduell/duell | pylib/click/testing.py | Python | bsd-2-clause | 10,834 |
--TEST--
Function -- array_walk_recursive
--SKIPIF--
<?php if (function_exists('array_walk_recursive')) { echo 'skip'; } ?>
--FILE--
<?php
require_once 'PHP/Compat.php';
PHP_Compat::loadFunction('array_walk_recursive');
$sweet = array('a' => 'apple', 'b' => 'banana');
$fruits = array('sweet' => $sweet, 'sour' => 'lemon');
function test_print($item, $key)
{
echo "$key holds $item\n";
}
array_walk_recursive($fruits, 'test_print');
?>
--EXPECT--
a holds apple
b holds banana
sour holds lemon | nanasess/ec-azure | html/data/module/Compat/tests/function/array_walk_recursive.phpt | PHP | gpl-2.0 | 498 |
/*
Copyright (c) 2007, 2014, Oracle and/or its affiliates. All rights reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
*/
/*
ndbapi_blob_ndbrecord
Illustrates the manipulation of BLOB (actually TEXT in this example).
This example uses the NdbRecord style way of accessing tuples.
Shows insert, read, and update, using both inline value buffer and
read/write methods.
*/
#ifdef _WIN32
#include <winsock2.h>
#endif
#include <mysql.h>
#include <mysqld_error.h>
#include <NdbApi.hpp>
/* Used for cout. */
#include <iostream>
#include <stdio.h>
#include <ctype.h>
#include <stdlib.h>
#include <stddef.h>
#include <string.h>
/**
* Helper debugging macros
*/
#define PRINT_ERROR(code,msg) \
std::cout << "Error in " << __FILE__ << ", line: " << __LINE__ \
<< ", code: " << code \
<< ", msg: " << msg << "." << std::endl
#define MYSQLERROR(mysql) { \
PRINT_ERROR(mysql_errno(&mysql),mysql_error(&mysql)); \
exit(-1); }
#define APIERROR(error) { \
PRINT_ERROR(error.code,error.message); \
exit(-1); }
/* Quote taken from Project Gutenberg. */
const char *text_quote=
"Just at this moment, somehow or other, they began to run.\n"
"\n"
" Alice never could quite make out, in thinking it over\n"
"afterwards, how it was that they began: all she remembers is,\n"
"that they were running hand in hand, and the Queen went so fast\n"
"that it was all she could do to keep up with her: and still the\n"
"Queen kept crying 'Faster! Faster!' but Alice felt she COULD NOT\n"
"go faster, though she had not breath left to say so.\n"
"\n"
" The most curious part of the thing was, that the trees and the\n"
"other things round them never changed their places at all:\n"
"however fast they went, they never seemed to pass anything. 'I\n"
"wonder if all the things move along with us?' thought poor\n"
"puzzled Alice. And the Queen seemed to guess her thoughts, for\n"
"she cried, 'Faster! Don't try to talk!'\n"
"\n"
" Not that Alice had any idea of doing THAT. She felt as if she\n"
"would never be able to talk again, she was getting so much out of\n"
"breath: and still the Queen cried 'Faster! Faster!' and dragged\n"
"her along. 'Are we nearly there?' Alice managed to pant out at\n"
"last.\n"
"\n"
" 'Nearly there!' the Queen repeated. 'Why, we passed it ten\n"
"minutes ago! Faster!' And they ran on for a time in silence,\n"
"with the wind whistling in Alice's ears, and almost blowing her\n"
"hair off her head, she fancied.\n"
"\n"
" 'Now! Now!' cried the Queen. 'Faster! Faster!' And they\n"
"went so fast that at last they seemed to skim through the air,\n"
"hardly touching the ground with their feet, till suddenly, just\n"
"as Alice was getting quite exhausted, they stopped, and she found\n"
"herself sitting on the ground, breathless and giddy.\n"
"\n"
" The Queen propped her up against a tree, and said kindly, 'You\n"
"may rest a little now.'\n"
"\n"
" Alice looked round her in great surprise. 'Why, I do believe\n"
"we've been under this tree the whole time! Everything's just as\n"
"it was!'\n"
"\n"
" 'Of course it is,' said the Queen, 'what would you have it?'\n"
"\n"
" 'Well, in OUR country,' said Alice, still panting a little,\n"
"'you'd generally get to somewhere else--if you ran very fast\n"
"for a long time, as we've been doing.'\n"
"\n"
" 'A slow sort of country!' said the Queen. 'Now, HERE, you see,\n"
"it takes all the running YOU can do, to keep in the same place.\n"
"If you want to get somewhere else, you must run at least twice as\n"
"fast as that!'\n"
"\n"
" 'I'd rather not try, please!' said Alice. 'I'm quite content\n"
"to stay here--only I AM so hot and thirsty!'\n"
"\n"
" -- Lewis Carroll, 'Through the Looking-Glass'.";
/* NdbRecord objects. */
const NdbRecord *key_record; // For specifying table key
const NdbRecord *blob_record; // For accessing blob
const NdbRecord *full_record; // All columns, for insert
/* C struct representing the row layout */
struct MyRow
{
unsigned int myId;
/* Pointer to Blob handle for operations on the blob column
* Space must be left for it in the row, but a pointer to the
* blob handle can also be obtained via calls to
* NdbOperation::getBlobHandle()
*/
NdbBlob* myText;
};
static void setup_records(Ndb *myNdb)
{
NdbDictionary::RecordSpecification spec[2];
NdbDictionary::Dictionary *myDict= myNdb->getDictionary();
const NdbDictionary::Table *myTable= myDict->getTable("api_blob_ndbrecord");
if (myTable == NULL)
APIERROR(myDict->getNdbError());
const NdbDictionary::Column *col1= myTable->getColumn("my_id");
if (col1 == NULL)
APIERROR(myDict->getNdbError());
const NdbDictionary::Column *col2= myTable->getColumn("my_text");
if (col2 == NULL)
APIERROR(myDict->getNdbError());
spec[0].column= col1;
spec[0].offset= offsetof(MyRow, myId);
spec[0].nullbit_byte_offset= 0;
spec[0].nullbit_bit_in_byte= 0;
spec[1].column= col2;
spec[1].offset= offsetof(MyRow, myText);
spec[1].nullbit_byte_offset= 0;
spec[1].nullbit_bit_in_byte= 0;
key_record= myDict->createRecord(myTable, &spec[0], 1, sizeof(spec[0]));
if (key_record == NULL)
APIERROR(myDict->getNdbError());
blob_record= myDict->createRecord(myTable, &spec[1], 1, sizeof(spec[0]));
if (blob_record == NULL)
APIERROR(myDict->getNdbError());
full_record= myDict->createRecord(myTable, &spec[0], 2, sizeof(spec[0]));
if (full_record == NULL)
APIERROR(myDict->getNdbError());
}
/*
Function to drop table.
*/
void drop_table(MYSQL &mysql)
{
if (mysql_query(&mysql, "DROP TABLE api_blob_ndbrecord"))
MYSQLERROR(mysql);
}
/*
Functions to create table.
*/
int try_create_table(MYSQL &mysql)
{
return mysql_query(&mysql,
"CREATE TABLE"
" api_blob_ndbrecord"
" (my_id INT UNSIGNED NOT NULL,"
" my_text TEXT NOT NULL,"
" PRIMARY KEY USING HASH (my_id))"
" ENGINE=NDB");
}
void create_table(MYSQL &mysql)
{
if (try_create_table(mysql))
{
if (mysql_errno(&mysql) != ER_TABLE_EXISTS_ERROR)
MYSQLERROR(mysql);
std::cout << "MySQL Cluster already has example table: api_blob_ndbrecord. "
<< "Dropping it..." << std::endl;
/******************
* Recreate table *
******************/
drop_table(mysql);
if (try_create_table(mysql))
MYSQLERROR(mysql);
}
}
int populate(Ndb *myNdb)
{
MyRow row;
NdbTransaction *myTrans= myNdb->startTransaction();
if (myTrans == NULL)
APIERROR(myNdb->getNdbError());
row.myId= 1;
const NdbOperation *myNdbOperation= myTrans->insertTuple(full_record, (const char*) &row);
if (myNdbOperation == NULL)
APIERROR(myTrans->getNdbError());
NdbBlob *myBlobHandle= myNdbOperation->getBlobHandle("my_text");
if (myBlobHandle == NULL)
APIERROR(myNdbOperation->getNdbError());
myBlobHandle->setValue(text_quote, strlen(text_quote));
int check= myTrans->execute(NdbTransaction::Commit);
myTrans->close();
return check != -1;
}
int update_key(Ndb *myNdb)
{
MyRow row;
/*
Uppercase all characters in TEXT field, using primary key operation.
Use piece-wise read/write to avoid loading entire data into memory
at once.
*/
NdbTransaction *myTrans= myNdb->startTransaction();
if (myTrans == NULL)
APIERROR(myNdb->getNdbError());
row.myId= 1;
const NdbOperation *myNdbOperation=
myTrans->updateTuple(key_record,
(const char*) &row,
blob_record,
(const char*) &row);
if (myNdbOperation == NULL)
APIERROR(myTrans->getNdbError());
NdbBlob *myBlobHandle= myNdbOperation->getBlobHandle("my_text");
if (myBlobHandle == NULL)
APIERROR(myNdbOperation->getNdbError());
/* Execute NoCommit to make the blob handle active so
* that we can determine the actual Blob length
*/
if (-1 == myTrans->execute(NdbTransaction::NoCommit))
APIERROR(myTrans->getNdbError());
Uint64 length= 0;
if (-1 == myBlobHandle->getLength(length))
APIERROR(myBlobHandle->getNdbError());
/*
A real application should use a much larger chunk size for
efficiency, preferably much larger than the part size, which
defaults to 2000. 64000 might be a good value.
*/
#define CHUNK_SIZE 100
int chunk;
char buffer[CHUNK_SIZE];
for (chunk= (length-1)/CHUNK_SIZE; chunk >=0; chunk--)
{
Uint64 pos= chunk*CHUNK_SIZE;
Uint32 chunk_length= CHUNK_SIZE;
if (pos + chunk_length > length)
chunk_length= length - pos;
/* Read from the end back, to illustrate seeking. */
if (-1 == myBlobHandle->setPos(pos))
APIERROR(myBlobHandle->getNdbError());
if (-1 == myBlobHandle->readData(buffer, chunk_length))
APIERROR(myBlobHandle->getNdbError());
int res= myTrans->execute(NdbTransaction::NoCommit);
if (-1 == res)
APIERROR(myTrans->getNdbError());
/* Uppercase everything. */
for (Uint64 j= 0; j < chunk_length; j++)
buffer[j]= toupper(buffer[j]);
if (-1 == myBlobHandle->setPos(pos))
APIERROR(myBlobHandle->getNdbError());
if (-1 == myBlobHandle->writeData(buffer, chunk_length))
APIERROR(myBlobHandle->getNdbError());
/* Commit on the final update. */
if (-1 == myTrans->execute(chunk ?
NdbTransaction::NoCommit :
NdbTransaction::Commit))
APIERROR(myTrans->getNdbError());
}
myNdb->closeTransaction(myTrans);
return 1;
}
int update_scan(Ndb *myNdb)
{
/*
Lowercase all characters in TEXT field, using a scan with
updateCurrentTuple().
*/
char buffer[10000];
NdbTransaction *myTrans= myNdb->startTransaction();
if (myTrans == NULL)
APIERROR(myNdb->getNdbError());
NdbScanOperation *myScanOp=
myTrans->scanTable(blob_record, NdbOperation::LM_Exclusive);
if (myScanOp == NULL)
APIERROR(myTrans->getNdbError());
NdbBlob *myBlobHandle= myScanOp->getBlobHandle("my_text");
if (myBlobHandle == NULL)
APIERROR(myScanOp->getNdbError());
if (myBlobHandle->getValue(buffer, sizeof(buffer)))
APIERROR(myBlobHandle->getNdbError());
/* Start the scan. */
if (-1 == myTrans->execute(NdbTransaction::NoCommit))
APIERROR(myTrans->getNdbError());
const MyRow *out_row;
int res;
for (;;)
{
res= myScanOp->nextResult((const char**)&out_row, true, false);
if (res==1)
break; // Scan done.
else if (res)
APIERROR(myScanOp->getNdbError());
Uint64 length= 0;
if (myBlobHandle->getLength(length) == -1)
APIERROR(myBlobHandle->getNdbError());
/* Lowercase everything. */
for (Uint64 j= 0; j < length; j++)
buffer[j]= tolower(buffer[j]);
/* 'Take over' the row locks from the scan to a separate
* operation for updating the tuple
*/
const NdbOperation *myUpdateOp=
myScanOp->updateCurrentTuple(myTrans,
blob_record,
(const char*)out_row);
if (myUpdateOp == NULL)
APIERROR(myTrans->getNdbError());
NdbBlob *myBlobHandle2= myUpdateOp->getBlobHandle("my_text");
if (myBlobHandle2 == NULL)
APIERROR(myUpdateOp->getNdbError());
if (myBlobHandle2->setValue(buffer, length))
APIERROR(myBlobHandle2->getNdbError());
if (-1 == myTrans->execute(NdbTransaction::NoCommit))
APIERROR(myTrans->getNdbError());
}
if (-1 == myTrans->execute(NdbTransaction::Commit))
APIERROR(myTrans->getNdbError());
myNdb->closeTransaction(myTrans);
return 1;
}
struct ActiveHookData {
char buffer[10000];
Uint32 readLength;
};
int myFetchHook(NdbBlob* myBlobHandle, void* arg)
{
ActiveHookData *ahd= (ActiveHookData *)arg;
ahd->readLength= sizeof(ahd->buffer) - 1;
return myBlobHandle->readData(ahd->buffer, ahd->readLength);
}
int fetch_key(Ndb *myNdb)
{
/* Fetch a blob without specifying how many bytes
* to read up front, in one execution using
* the 'ActiveHook' mechanism.
* The supplied ActiveHook procedure is called when
* the Blob handle becomes 'active'. At that point
* the length of the Blob can be obtained, and buffering
* arranged, and the data read requested.
*/
/* Separate rows used to specify key and hold result */
MyRow key_row;
MyRow out_row;
/*
Fetch and show the blob field, using setActiveHook().
*/
NdbTransaction *myTrans= myNdb->startTransaction();
if (myTrans == NULL)
APIERROR(myNdb->getNdbError());
key_row.myId= 1;
out_row.myText= NULL;
const NdbOperation *myNdbOperation=
myTrans->readTuple(key_record,
(const char*) &key_row,
blob_record,
(char*) &out_row);
if (myNdbOperation == NULL)
APIERROR(myTrans->getNdbError());
/* This time, we'll get the blob handle from the row, because
* we can. Alternatively, we could use the normal mechanism
* of calling getBlobHandle().
*/
NdbBlob *myBlobHandle= out_row.myText;
if (myBlobHandle == NULL)
APIERROR(myNdbOperation->getNdbError());
struct ActiveHookData ahd;
if (myBlobHandle->setActiveHook(myFetchHook, &ahd) == -1)
APIERROR(myBlobHandle->getNdbError());
/*
Execute Commit, but calling our callback set up in setActiveHook()
before actually committing.
*/
if (-1 == myTrans->execute(NdbTransaction::Commit))
APIERROR(myTrans->getNdbError());
myNdb->closeTransaction(myTrans);
/* Our fetch callback will have been called during the execute(). */
ahd.buffer[ahd.readLength]= '\0';
std::cout << "Fetched data:" << std::endl << ahd.buffer << std::endl;
return 1;
}
int update2_key(Ndb *myNdb)
{
char buffer[10000];
MyRow row;
/* Simple setValue() update specified before the
* Blob handle is made active
*/
NdbTransaction *myTrans= myNdb->startTransaction();
if (myTrans == NULL)
APIERROR(myNdb->getNdbError());
row.myId= 1;
const NdbOperation *myNdbOperation=
myTrans->updateTuple(key_record,
(const char*)&row,
blob_record,
(char*) &row);
if (myNdbOperation == NULL)
APIERROR(myTrans->getNdbError());
NdbBlob *myBlobHandle= myNdbOperation->getBlobHandle("my_text");
if (myBlobHandle == NULL)
APIERROR(myNdbOperation->getNdbError());
memset(buffer, ' ', sizeof(buffer));
if (myBlobHandle->setValue(buffer, sizeof(buffer)) == -1)
APIERROR(myBlobHandle->getNdbError());
if (-1 == myTrans->execute(NdbTransaction::Commit))
APIERROR(myTrans->getNdbError());
myNdb->closeTransaction(myTrans);
return 1;
}
int delete_key(Ndb *myNdb)
{
MyRow row;
/* Deletion of row containing blob via primary key. */
NdbTransaction *myTrans= myNdb->startTransaction();
if (myTrans == NULL)
APIERROR(myNdb->getNdbError());
row.myId= 1;
const NdbOperation *myNdbOperation= myTrans->deleteTuple(key_record,
(const char*)&row,
full_record);
if (myNdbOperation == NULL)
APIERROR(myTrans->getNdbError());
if (-1 == myTrans->execute(NdbTransaction::Commit))
APIERROR(myTrans->getNdbError());
myNdb->closeTransaction(myTrans);
return 1;
}
int main(int argc, char**argv)
{
if (argc != 3)
{
std::cout << "Arguments are <socket mysqld> <connect_string cluster>.\n";
exit(-1);
}
char *mysqld_sock = argv[1];
const char *connectstring = argv[2];
ndb_init();
MYSQL mysql;
/* Connect to mysql server and create table. */
{
if ( !mysql_init(&mysql) ) {
std::cout << "mysql_init failed.\n";
exit(-1);
}
if ( !mysql_real_connect(&mysql, "localhost", "root", "", "",
0, mysqld_sock, 0) )
MYSQLERROR(mysql);
mysql_query(&mysql, "CREATE DATABASE ndb_examples");
if (mysql_query(&mysql, "USE ndb_examples") != 0)
MYSQLERROR(mysql);
create_table(mysql);
}
/* Connect to ndb cluster. */
Ndb_cluster_connection cluster_connection(connectstring);
if (cluster_connection.connect(4, 5, 1))
{
std::cout << "Unable to connect to cluster within 30 secs." << std::endl;
exit(-1);
}
/* Optionally connect and wait for the storage nodes (ndbd's). */
if (cluster_connection.wait_until_ready(30,0) < 0)
{
std::cout << "Cluster was not ready within 30 secs.\n";
exit(-1);
}
Ndb myNdb(&cluster_connection,"ndb_examples");
if (myNdb.init(1024) == -1) { // Set max 1024 parallel transactions
APIERROR(myNdb.getNdbError());
exit(-1);
}
setup_records(&myNdb);
if(populate(&myNdb) > 0)
std::cout << "populate: Success!" << std::endl;
if(update_key(&myNdb) > 0)
std::cout << "update_key: Success!" << std::endl;
if(update_scan(&myNdb) > 0)
std::cout << "update_scan: Success!" << std::endl;
if(fetch_key(&myNdb) > 0)
std::cout << "fetch_key: Success!" << std::endl;
if(update2_key(&myNdb) > 0)
std::cout << "update2_key: Success!" << std::endl;
if(delete_key(&myNdb) > 0)
std::cout << "delete_key: Success!" << std::endl;
return 0;
}
| ForcerKing/ShaoqunXu-mysql5.7 | storage/ndb/ndbapi-examples/ndbapi_blob_ndbrecord/main.cpp | C++ | gpl-2.0 | 17,977 |
/*
* Copyright (c) 1999, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright 2009 Red Hat, Inc.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#include "precompiled.hpp"
#include "shark/llvmHeaders.hpp"
#include "shark/sharkEntry.hpp"
#include "shark/sharkMemoryManager.hpp"
using namespace llvm;
void SharkMemoryManager::AllocateGOT() {
mm()->AllocateGOT();
}
unsigned char* SharkMemoryManager::getGOTBase() const {
return mm()->getGOTBase();
}
unsigned char* SharkMemoryManager::allocateStub(const GlobalValue* F,
unsigned StubSize,
unsigned Alignment) {
return mm()->allocateStub(F, StubSize, Alignment);
}
unsigned char* SharkMemoryManager::startFunctionBody(const Function* F,
uintptr_t& ActualSize) {
return mm()->startFunctionBody(F, ActualSize);
}
void SharkMemoryManager::endFunctionBody(const Function* F,
unsigned char* FunctionStart,
unsigned char* FunctionEnd) {
mm()->endFunctionBody(F, FunctionStart, FunctionEnd);
SharkEntry *entry = get_entry_for_function(F);
if (entry != NULL)
entry->set_code_limit(FunctionEnd);
}
unsigned char* SharkMemoryManager::startExceptionTable(const Function* F,
uintptr_t& ActualSize) {
return mm()->startExceptionTable(F, ActualSize);
}
void SharkMemoryManager::endExceptionTable(const Function* F,
unsigned char* TableStart,
unsigned char* TableEnd,
unsigned char* FrameRegister) {
mm()->endExceptionTable(F, TableStart, TableEnd, FrameRegister);
}
void SharkMemoryManager::setMemoryWritable() {
mm()->setMemoryWritable();
}
void SharkMemoryManager::setMemoryExecutable() {
mm()->setMemoryExecutable();
}
#if SHARK_LLVM_VERSION >= 27
void SharkMemoryManager::deallocateExceptionTable(void *ptr) {
mm()->deallocateExceptionTable(ptr);
}
void SharkMemoryManager::deallocateFunctionBody(void *ptr) {
mm()->deallocateFunctionBody(ptr);
}
#else
void SharkMemoryManager::deallocateMemForFunction(const Function* F) {
return mm()->deallocateMemForFunction(F);
}
#endif
uint8_t* SharkMemoryManager::allocateGlobal(uintptr_t Size,
unsigned int Alignment) {
return mm()->allocateGlobal(Size, Alignment);
}
#if SHARK_LLVM_VERSION < 27
void* SharkMemoryManager::getDlsymTable() const {
return mm()->getDlsymTable();
}
void SharkMemoryManager::SetDlsymTable(void *ptr) {
mm()->SetDlsymTable(ptr);
}
#endif
void SharkMemoryManager::setPoisonMemory(bool poison) {
mm()->setPoisonMemory(poison);
}
unsigned char *SharkMemoryManager::allocateSpace(intptr_t Size,
unsigned int Alignment) {
return mm()->allocateSpace(Size, Alignment);
}
| ikeji/openjdk7-hotspot | src/share/vm/shark/sharkMemoryManager.cpp | C++ | gpl-2.0 | 3,985 |
// Boost.Geometry
// Copyright (c) 2017-2019, Oracle and/or its affiliates.
// Contributed and/or modified by Vissarion Fysikopoulos, on behalf of Oracle
// Contributed and/or modified by Adam Wulkiewicz, on behalf of Oracle
// Licensed under the Boost Software License version 1.0.
// http://www.boost.org/users/license.html
#ifndef BOOST_GEOMETRY_STRATEGIES_SPHERICAL_DENSIFY_HPP
#define BOOST_GEOMETRY_STRATEGIES_SPHERICAL_DENSIFY_HPP
#include <boost/geometry/algorithms/detail/convert_point_to_point.hpp>
#include <boost/geometry/algorithms/detail/signed_size_type.hpp>
#include <boost/geometry/arithmetic/arithmetic.hpp>
#include <boost/geometry/arithmetic/cross_product.hpp>
#include <boost/geometry/arithmetic/dot_product.hpp>
#include <boost/geometry/arithmetic/normalize.hpp>
#include <boost/geometry/core/assert.hpp>
#include <boost/geometry/core/coordinate_dimension.hpp>
#include <boost/geometry/core/coordinate_type.hpp>
#include <boost/geometry/core/radian_access.hpp>
#include <boost/geometry/formulas/spherical.hpp>
#include <boost/geometry/formulas/interpolate_point_spherical.hpp>
#include <boost/geometry/geometries/point.hpp>
#include <boost/geometry/srs/sphere.hpp>
#include <boost/geometry/strategies/densify.hpp>
#include <boost/geometry/strategies/spherical/get_radius.hpp>
#include <boost/geometry/util/math.hpp>
#include <boost/geometry/util/select_most_precise.hpp>
namespace boost { namespace geometry
{
namespace strategy { namespace densify
{
/*!
\brief Densification of spherical segment.
\ingroup strategies
\tparam RadiusTypeOrSphere \tparam_radius_or_sphere
\tparam CalculationType \tparam_calculation
\qbk{
[heading See also]
[link geometry.reference.algorithms.densify.densify_4_with_strategy densify (with strategy)]
}
*/
template
<
typename RadiusTypeOrSphere = double,
typename CalculationType = void
>
class spherical
{
public:
// For consistency with area strategy the radius is set to 1
inline spherical()
: m_radius(1.0)
{}
template <typename RadiusOrSphere>
explicit inline spherical(RadiusOrSphere const& radius_or_sphere)
: m_radius(strategy_detail::get_radius
<
RadiusOrSphere
>::apply(radius_or_sphere))
{}
template <typename Point, typename AssignPolicy, typename T>
inline void apply(Point const& p0, Point const& p1, AssignPolicy & policy, T const& length_threshold) const
{
typedef typename AssignPolicy::point_type out_point_t;
typedef typename select_most_precise
<
typename coordinate_type<Point>::type,
typename coordinate_type<out_point_t>::type,
CalculationType
>::type calc_t;
calc_t angle01;
formula::interpolate_point_spherical<calc_t> formula;
formula.compute_angle(p0, p1, angle01);
BOOST_GEOMETRY_ASSERT(length_threshold > T(0));
signed_size_type n = signed_size_type(angle01 * m_radius / length_threshold);
if (n <= 0)
return;
formula.compute_axis(p0, angle01);
calc_t step = angle01 / (n + 1);
calc_t a = step;
for (signed_size_type i = 0 ; i < n ; ++i, a += step)
{
out_point_t p;
formula.compute_point(a, p);
geometry::detail::conversion::point_to_point
<
Point, out_point_t,
2, dimension<out_point_t>::value
>::apply(p0, p);
policy.apply(p);
}
}
private:
typename strategy_detail::get_radius
<
RadiusTypeOrSphere
>::type m_radius;
};
#ifndef DOXYGEN_NO_STRATEGY_SPECIALIZATIONS
namespace services
{
template <>
struct default_strategy<spherical_equatorial_tag>
{
typedef strategy::densify::spherical<> type;
};
} // namespace services
#endif // DOXYGEN_NO_STRATEGY_SPECIALIZATIONS
}} // namespace strategy::densify
}} // namespace boost::geometry
#endif // BOOST_GEOMETRY_ALGORITHMS_DENSIFY_HPP
| zcobell/QADCModules | thirdparty/boost_1_75_0/boost/geometry/strategies/spherical/densify.hpp | C++ | gpl-3.0 | 4,079 |