code stringlengths 3 1.01M | repo_name stringlengths 5 116 | path stringlengths 3 311 | language stringclasses 30
values | license stringclasses 15
values | size int64 3 1.01M |
|---|---|---|---|---|---|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim:set ts=2 sw=2 sts=2 et cindent: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
* You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_BASEPIN_H_
#define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_BASEPIN_H_
#include "BaseFilter.h"
#include "MediaType.h"
#include "dshow.h"
#include "strmif.h"
#include <string>
namespace mozilla {
namespace media {
_COM_SMARTPTR_TYPEDEF(IPin, __uuidof(IPin));
// Base class for DirectShow filter pins.
//
// Implements:
// * IPin
// * IQualityControl
// * IUnknown
//
class DECLSPEC_UUID("199669c6-672a-4130-b13e-57aa830eae55")
BasePin
: public IPin
, public IQualityControl
{
public:
BasePin(BaseFilter* aFilter,
CriticalSection* aLock,
const wchar_t* aName,
PIN_DIRECTION aDirection);
virtual ~BasePin() {}
// Reference count of the pin is actually stored on the owning filter.
// So don't AddRef() the filter from the pin, else you'll create a cycle.
STDMETHODIMP QueryInterface(REFIID aIId, void **aInterface);
STDMETHODIMP_(ULONG) AddRef() { return mFilter->AddRef(); }
STDMETHODIMP_(ULONG) Release() { return mFilter->Release(); }
// IPin overrides.
// Connects the pin to another pin. The pmt parameter can be NULL or a
// partial media type.
STDMETHODIMP Connect(IPin* aReceivePin,
const AM_MEDIA_TYPE* aMediaType);
//Accepts a connection from another pin.
STDMETHODIMP ReceiveConnection(IPin* aConnector,
const AM_MEDIA_TYPE* aMediaType);
// Breaks the current pin connection.
STDMETHODIMP Disconnect();
// Retrieves the pin connected to this pin.
STDMETHODIMP ConnectedTo(IPin** aPin);
// Retrieves the media type for the current pin connection.
STDMETHODIMP ConnectionMediaType(AM_MEDIA_TYPE* aMediaType);
// Retrieves information about the pin, such as the name, the owning filter,
// and the direction.
STDMETHODIMP QueryPinInfo(PIN_INFO* aInfo);
// Retrieves the direction of the pin (input or output).
STDMETHODIMP QueryDirection(PIN_DIRECTION* aDirection);
// Retrieves the pin identifier.
STDMETHODIMP QueryId(LPWSTR* Id);
// Determines whether the pin accepts a specified media type.
STDMETHODIMP QueryAccept(const AM_MEDIA_TYPE* aMediaType);
// Enumerates the pin's preferred media types.
STDMETHODIMP EnumMediaTypes(IEnumMediaTypes** aEnum);
// Retrieves the pins that are connected internally to this pin
// (within the filter).
STDMETHODIMP QueryInternalConnections(IPin** apPin,
ULONG* aPin);
// Notifies the pin that no additional data is expected.
STDMETHODIMP EndOfStream(void);
// IPin::BeginFlush() and IPin::EndFlush() are still pure virtual,
// and must be implemented in a subclass.
// Notifies the pin that media samples received after this call
// are grouped as a segment.
STDMETHODIMP NewSegment(
REFERENCE_TIME aStartTime,
REFERENCE_TIME aStopTime,
double aRate);
// IQualityControl overrides.
// Notifies the recipient that a quality change is requested.
STDMETHODIMP Notify(IBaseFilter * aSender, Quality aQuality);
// Sets the IQualityControl object that will receive quality messages.
STDMETHODIMP SetSink(IQualityControl* aQualitySink);
// Other methods.
// Sets the media type of the connection.
virtual HRESULT SetMediaType(const MediaType *aMediaType);
// check if the pin can support this specific proposed type and format
virtual HRESULT CheckMediaType(const MediaType *) = 0;
// This is called to release any resources needed for a connection.
virtual HRESULT BreakConnect();
// Called when we've made a connection to another pin. Returning failure
// triggers the caller to break the connection. Subclasses may want to
// override this.
virtual HRESULT CompleteConnect(IPin *pReceivePin);
// Checks if this pin can connect to |aPin|. We expect sub classes to
// override this method to support their own needs. Default implementation
// simply checks that the directions of the pins do not match.
virtual HRESULT CheckConnect(IPin *);
// Check if our filter is currently stopped
BOOL IsStopped() {
return mFilter->mState == State_Stopped;
};
// Moves pin to active state (running or paused). Subclasses will
// override to prepare to handle data.
virtual HRESULT Active(void);
// Moves pin into inactive state (stopped). Releases resources associated
// with handling data. Subclasses should override this.
virtual HRESULT Inactive(void);
// Called when Run() is called on the parent filter. Subclasses may want to
// override this.
virtual HRESULT Run(REFERENCE_TIME aStartTime);
// Gets the supported media types for this pin.
virtual HRESULT GetMediaType(int aIndex, MediaType *aMediaType);
// Access name.
const std::wstring& Name() { return mName; };
bool IsConnected() { return mConnectedPin != NULL; }
IPin* GetConnected() { return mConnectedPin; }
protected:
// The pin's name, as returned by QueryPinInfo().
std::wstring mName;
// Event sink for quality messages.
IQualityControl *mQualitySink;
// The pin which this one is connected to.
IPinPtr mConnectedPin;
// Direction of data flow through this pin.
PIN_DIRECTION mDirection;
// Media type of the pin's connection.
MediaType mMediaType;
// Our state lock. All state should be accessed while this is locked.
mozilla::CriticalSection *mLock;
// Our owning filter.
BaseFilter *mFilter;
// This pin attempts to connect to |aPin| with media type |aMediaType|.
// If |aMediaType| is fully specified, we must attempt to connect with
// that, else we just enumerate our types, then the other pin's type and
// try them, filtering them using |aMediaType| if it's paritally
// specificed. Used by Connect().
HRESULT AttemptConnection(IPin* aPin, const MediaType* aMediaType);
// Tries to form a connection using all media types in the enumeration.
HRESULT TryMediaTypes(IPin *aPin,
const MediaType *aMediaType,
IEnumMediaTypes *aEnum);
};
_COM_SMARTPTR_TYPEDEF(BasePin, __uuidof(BasePin));
}
}
#endif
| sergecodd/FireFox-OS | B2G/gecko/media/webrtc/trunk/src/modules/video_capture/main/source/Windows/BasePin.h | C | apache-2.0 | 6,461 |
'use strict';
/* global describe, it */
var fs = require('fs');
var expect = require('chai').expect;
var bigrig = require('../');
describe('Big Rig', function () {
it ('throws if no processes are found', function () {
expect(function () {
bigrig.analyze(null);
}).to.throw('Zero processes (tabs) found.');
});
it ('throws if given invalid input data is given', function () {
expect(function () {
bigrig.analyze('wobble');
}).to.throw('Invalid trace contents; not JSON');
});
it ('throws if given a trace with extensions and strict mode is enabled',
function (done) {
fs.readFile('./test/data/load-extensions.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var error = 'Extensions running during capture; ' +
'see http://bit.ly/bigrig-extensions';
expect(function () {
bigrig.analyze(data, {
strict: true
});
}).to.throw(error);
done();
});
});
// TODO(paullewis) Add multiprocess test.
it ('returns JSON for a file with a single process', function (done) {
fs.readFile('./test/data/load.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData).to.be.an('array');
expect(jsonData[0]).to.be.an('object');
done();
});
});
it ('generates valid JSON', function (done) {
fs.readFile('./test/data/load.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
jsonData = JSON.parse(JSON.stringify(jsonData));
expect(jsonData).to.be.an('array');
done();
});
});
it ('supports timed ranges', function (done) {
fs.readFile('./test/data/animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData[0]).to.be.an('object');
expect(jsonData[0].title).to.equal('sideNavAnimation');
expect(jsonData[0].start).to.be.above(0);
expect(jsonData[0].end).to.be.within(1179, 1180);
done();
});
});
it ('correctly applies RAIL type when time range is specified',
function (done) {
fs.readFile('./test/data/animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data, {
types: {
'sideNavAnimation': bigrig.ANIMATION
}
});
expect(jsonData[0].type).to.equal(bigrig.ANIMATION);
done();
});
});
it ('correctly infers RAIL Load when time range not specified',
function (done) {
fs.readFile('./test/data/load.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData[0].type).to.equal(bigrig.LOAD);
expect(jsonData[0].title).to.equal('Load');
done();
});
});
it ('correctly infers RAIL Response when time range not specified',
function (done) {
fs.readFile('./test/data/response.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData[0].type).to.equal(bigrig.RESPONSE);
expect(jsonData[0].title).to.equal('sideNavResponse');
done();
});
});
it ('correctly infers RAIL Animation when time range not specified',
function (done) {
fs.readFile('./test/data/animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData[0].type).to.equal(bigrig.ANIMATION);
expect(jsonData[0].title).to.equal('sideNavAnimation');
done();
});
});
it ('correctly infers multiple RAIL regions', function (done) {
fs.readFile('./test/data/response-animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData.length).to.equal(2);
expect(jsonData[0].type).to.equal(bigrig.RESPONSE);
expect(jsonData[0].title).to.equal('sideNavResponse');
expect(jsonData[1].type).to.equal(bigrig.ANIMATION);
expect(jsonData[1].title).to.equal('sideNavAnimation');
done();
});
});
it ('returns the correct fps for animations', function (done) {
fs.readFile('./test/data/animation.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(jsonData[0].fps).to.be.within(59, 61);
done();
});
});
it ('returns the correct JS breakdown', function (done) {
fs.readFile('./test/data/load.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(
jsonData[0].extendedInfo.javaScript['localhost:11080']
).to.be.within(245, 246);
expect(
jsonData[0].extendedInfo.javaScript['www.google-analytics.com']
).to.be.within(59, 60);
done();
});
});
it ('correctly captures forced layouts and recalcs', function (done) {
fs.readFile('./test/data/forced-recalc-layout.json', 'utf8',
function (err, data) {
if (err) {
throw err;
}
var jsonData = bigrig.analyze(data);
expect(
jsonData[0].extendedInfo.forcedRecalcs
).to.equal(1);
expect(
jsonData[0].extendedInfo.forcedLayouts
).to.equal(1);
done();
});
});
});
| googlearchive/node-big-rig | test/bigrig_tests.js | JavaScript | apache-2.0 | 6,062 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use :mod:`airflow.providers.qubole.operators.qubole`."""
import warnings
# pylint: disable=unused-import
from airflow.providers.qubole.operators.qubole import QuboleOperator # noqa
warnings.warn(
"This module is deprecated. Please use `airflow.providers.qubole.operators.qubole`.",
DeprecationWarning,
stacklevel=2,
)
| nathanielvarona/airflow | airflow/contrib/operators/qubole_operator.py | Python | apache-2.0 | 1,158 |
package io.katharsis.jpa.meta;
import java.io.Serializable;
import java.util.UUID;
import org.junit.Assert;
import org.junit.Test;
import io.katharsis.meta.model.MetaPrimitiveType;
public class MetaPrimitiveTypeTest {
@Test
public void testString() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(String.class);
}
@Test
public void testInteger() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Integer.class);
}
@Test
public void testShort() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Short.class);
}
@Test
public void testLong() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Long.class);
}
@Test
public void testFloat() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Float.class);
}
@Test
public void testDouble() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Double.class);
}
@Test
public void testBoolean() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Boolean.class);
}
@Test
public void testByte() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(Byte.class);
}
@Test
public void testUUID() {
UUID uuid = UUID.randomUUID();
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(UUID.class);
}
enum TestEnum {
A
}
@Test
public void testEnum() {
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(TestEnum.class);
}
public static class TestObjectWithParse {
int value;
public static TestObjectWithParse parse(String value) {
TestObjectWithParse parser = new TestObjectWithParse();
parser.value = Integer.parseInt(value);
return parser;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TestObjectWithParse other = (TestObjectWithParse) obj;
if (value != other.value)
return false;
return true;
}
}
public static class TestObjectWithConstructor implements Serializable {
int value;
public TestObjectWithConstructor() {
}
public TestObjectWithConstructor(String value) {
this.value = Integer.parseInt(value);
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
TestObjectWithConstructor other = (TestObjectWithConstructor) obj;
if (value != other.value)
return false;
return true;
}
}
@Test
public void testParse() {
TestObjectWithParse value = new TestObjectWithParse();
value.value = 12;
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(TestObjectWithParse.class);
}
@Test
public void testOther() {
TestObjectWithConstructor value = new TestObjectWithConstructor();
value.value = 12;
MetaPrimitiveType type = new MetaPrimitiveType();
type.setImplementationType(TestObjectWithConstructor.class);
}
}
| apetrucci/katharsis-framework | katharsis-jpa/src/test/java/io/katharsis/jpa/meta/MetaPrimitiveTypeTest.java | Java | apache-2.0 | 3,187 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.clustering
import org.apache.hadoop.fs.Path
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import org.json4s.jackson.JsonMethods._
import org.apache.spark.annotation.{DeveloperApi, Since}
import org.apache.spark.internal.Logging
import org.apache.spark.ml.{Estimator, Model}
import org.apache.spark.ml.linalg.{Matrix, Vector, Vectors, VectorUDT}
import org.apache.spark.ml.param._
import org.apache.spark.ml.param.shared.{HasCheckpointInterval, HasFeaturesCol, HasMaxIter, HasSeed}
import org.apache.spark.ml.util._
import org.apache.spark.ml.util.DefaultParamsReader.Metadata
import org.apache.spark.mllib.clustering.{DistributedLDAModel => OldDistributedLDAModel,
EMLDAOptimizer => OldEMLDAOptimizer, LDA => OldLDA, LDAModel => OldLDAModel,
LDAOptimizer => OldLDAOptimizer, LocalLDAModel => OldLocalLDAModel,
OnlineLDAOptimizer => OldOnlineLDAOptimizer}
import org.apache.spark.mllib.impl.PeriodicCheckpointer
import org.apache.spark.mllib.linalg.{Vector => OldVector, Vectors => OldVectors}
import org.apache.spark.mllib.linalg.MatrixImplicits._
import org.apache.spark.mllib.linalg.VectorImplicits._
import org.apache.spark.mllib.util.MLUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Dataset, Row, SparkSession}
import org.apache.spark.sql.functions.{col, monotonically_increasing_id, udf}
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.VersionUtils
private[clustering] trait LDAParams extends Params with HasFeaturesCol with HasMaxIter
with HasSeed with HasCheckpointInterval {
/**
* Param for the number of topics (clusters) to infer. Must be > 1. Default: 10.
*
* @group param
*/
@Since("1.6.0")
final val k = new IntParam(this, "k", "The number of topics (clusters) to infer. " +
"Must be > 1.", ParamValidators.gt(1))
/** @group getParam */
@Since("1.6.0")
def getK: Int = $(k)
/**
* Concentration parameter (commonly named "alpha") for the prior placed on documents'
* distributions over topics ("theta").
*
* This is the parameter to a Dirichlet distribution, where larger values mean more smoothing
* (more regularization).
*
* If not set by the user, then docConcentration is set automatically. If set to
* singleton vector [alpha], then alpha is replicated to a vector of length k in fitting.
* Otherwise, the [[docConcentration]] vector must be length k.
* (default = automatic)
*
* Optimizer-specific parameter settings:
* - EM
* - Currently only supports symmetric distributions, so all values in the vector should be
* the same.
* - Values should be greater than 1.0
* - default = uniformly (50 / k) + 1, where 50/k is common in LDA libraries and +1 follows
* from Asuncion et al. (2009), who recommend a +1 adjustment for EM.
* - Online
* - Values should be greater than or equal to 0
* - default = uniformly (1.0 / k), following the implementation from
* <a href="https://github.com/Blei-Lab/onlineldavb">here</a>.
*
* @group param
*/
@Since("1.6.0")
final val docConcentration = new DoubleArrayParam(this, "docConcentration",
"Concentration parameter (commonly named \"alpha\") for the prior placed on documents'" +
" distributions over topics (\"theta\").", (alpha: Array[Double]) => alpha.forall(_ >= 0.0))
/** @group getParam */
@Since("1.6.0")
def getDocConcentration: Array[Double] = $(docConcentration)
/** Get docConcentration used by spark.mllib LDA */
protected def getOldDocConcentration: Vector = {
if (isSet(docConcentration)) {
Vectors.dense(getDocConcentration)
} else {
Vectors.dense(-1.0)
}
}
/**
* Concentration parameter (commonly named "beta" or "eta") for the prior placed on topics'
* distributions over terms.
*
* This is the parameter to a symmetric Dirichlet distribution.
*
* Note: The topics' distributions over terms are called "beta" in the original LDA paper
* by Blei et al., but are called "phi" in many later papers such as Asuncion et al., 2009.
*
* If not set by the user, then topicConcentration is set automatically.
* (default = automatic)
*
* Optimizer-specific parameter settings:
* - EM
* - Value should be greater than 1.0
* - default = 0.1 + 1, where 0.1 gives a small amount of smoothing and +1 follows
* Asuncion et al. (2009), who recommend a +1 adjustment for EM.
* - Online
* - Value should be greater than or equal to 0
* - default = (1.0 / k), following the implementation from
* <a href="https://github.com/Blei-Lab/onlineldavb">here</a>.
*
* @group param
*/
@Since("1.6.0")
final val topicConcentration = new DoubleParam(this, "topicConcentration",
"Concentration parameter (commonly named \"beta\" or \"eta\") for the prior placed on topic'" +
" distributions over terms.", ParamValidators.gtEq(0))
/** @group getParam */
@Since("1.6.0")
def getTopicConcentration: Double = $(topicConcentration)
/** Get topicConcentration used by spark.mllib LDA */
protected def getOldTopicConcentration: Double = {
if (isSet(topicConcentration)) {
getTopicConcentration
} else {
-1.0
}
}
/** Supported values for Param [[optimizer]]. */
@Since("1.6.0")
final val supportedOptimizers: Array[String] = Array("online", "em")
/**
* Optimizer or inference algorithm used to estimate the LDA model.
* Currently supported (case-insensitive):
* - "online": Online Variational Bayes (default)
* - "em": Expectation-Maximization
*
* For details, see the following papers:
* - Online LDA:
* Hoffman, Blei and Bach. "Online Learning for Latent Dirichlet Allocation."
* Neural Information Processing Systems, 2010.
* See <a href="http://www.cs.columbia.edu/~blei/papers/HoffmanBleiBach2010b.pdf">here</a>
* - EM:
* Asuncion et al. "On Smoothing and Inference for Topic Models."
* Uncertainty in Artificial Intelligence, 2009.
* See <a href="http://arxiv.org/pdf/1205.2662.pdf">here</a>
*
* @group param
*/
@Since("1.6.0")
final val optimizer = new Param[String](this, "optimizer", "Optimizer or inference" +
" algorithm used to estimate the LDA model. Supported: " + supportedOptimizers.mkString(", "),
(o: String) => ParamValidators.inArray(supportedOptimizers).apply(o.toLowerCase))
/** @group getParam */
@Since("1.6.0")
def getOptimizer: String = $(optimizer)
/**
* Output column with estimates of the topic mixture distribution for each document (often called
* "theta" in the literature). Returns a vector of zeros for an empty document.
*
* This uses a variational approximation following Hoffman et al. (2010), where the approximate
* distribution is called "gamma." Technically, this method returns this approximation "gamma"
* for each document.
*
* @group param
*/
@Since("1.6.0")
final val topicDistributionCol = new Param[String](this, "topicDistributionCol", "Output column" +
" with estimates of the topic mixture distribution for each document (often called \"theta\"" +
" in the literature). Returns a vector of zeros for an empty document.")
setDefault(topicDistributionCol -> "topicDistribution")
/** @group getParam */
@Since("1.6.0")
def getTopicDistributionCol: String = $(topicDistributionCol)
/**
* For Online optimizer only: [[optimizer]] = "online".
*
* A (positive) learning parameter that downweights early iterations. Larger values make early
* iterations count less.
* This is called "tau0" in the Online LDA paper (Hoffman et al., 2010)
* Default: 1024, following Hoffman et al.
*
* @group expertParam
*/
@Since("1.6.0")
final val learningOffset = new DoubleParam(this, "learningOffset", "(For online optimizer)" +
" A (positive) learning parameter that downweights early iterations. Larger values make early" +
" iterations count less.",
ParamValidators.gt(0))
/** @group expertGetParam */
@Since("1.6.0")
def getLearningOffset: Double = $(learningOffset)
/**
* For Online optimizer only: [[optimizer]] = "online".
*
* Learning rate, set as an exponential decay rate.
* This should be between (0.5, 1.0] to guarantee asymptotic convergence.
* This is called "kappa" in the Online LDA paper (Hoffman et al., 2010).
* Default: 0.51, based on Hoffman et al.
*
* @group expertParam
*/
@Since("1.6.0")
final val learningDecay = new DoubleParam(this, "learningDecay", "(For online optimizer)" +
" Learning rate, set as an exponential decay rate. This should be between (0.5, 1.0] to" +
" guarantee asymptotic convergence.", ParamValidators.gt(0))
/** @group expertGetParam */
@Since("1.6.0")
def getLearningDecay: Double = $(learningDecay)
/**
* For Online optimizer only: [[optimizer]] = "online".
*
* Fraction of the corpus to be sampled and used in each iteration of mini-batch gradient descent,
* in range (0, 1].
*
* Note that this should be adjusted in synch with `LDA.maxIter`
* so the entire corpus is used. Specifically, set both so that
* maxIterations * miniBatchFraction greater than or equal to 1.
*
* Note: This is the same as the `miniBatchFraction` parameter in
* [[org.apache.spark.mllib.clustering.OnlineLDAOptimizer]].
*
* Default: 0.05, i.e., 5% of total documents.
*
* @group param
*/
@Since("1.6.0")
final val subsamplingRate = new DoubleParam(this, "subsamplingRate", "(For online optimizer)" +
" Fraction of the corpus to be sampled and used in each iteration of mini-batch" +
" gradient descent, in range (0, 1].",
ParamValidators.inRange(0.0, 1.0, lowerInclusive = false, upperInclusive = true))
/** @group getParam */
@Since("1.6.0")
def getSubsamplingRate: Double = $(subsamplingRate)
/**
* For Online optimizer only (currently): [[optimizer]] = "online".
*
* Indicates whether the docConcentration (Dirichlet parameter for
* document-topic distribution) will be optimized during training.
* Setting this to true will make the model more expressive and fit the training data better.
* Default: false
*
* @group expertParam
*/
@Since("1.6.0")
final val optimizeDocConcentration = new BooleanParam(this, "optimizeDocConcentration",
"(For online optimizer only, currently) Indicates whether the docConcentration" +
" (Dirichlet parameter for document-topic distribution) will be optimized during training.")
/** @group expertGetParam */
@Since("1.6.0")
def getOptimizeDocConcentration: Boolean = $(optimizeDocConcentration)
/**
* For EM optimizer only: [[optimizer]] = "em".
*
* If using checkpointing, this indicates whether to keep the last
* checkpoint. If false, then the checkpoint will be deleted. Deleting the checkpoint can
* cause failures if a data partition is lost, so set this bit with care.
* Note that checkpoints will be cleaned up via reference counting, regardless.
*
* See `DistributedLDAModel.getCheckpointFiles` for getting remaining checkpoints and
* `DistributedLDAModel.deleteCheckpointFiles` for removing remaining checkpoints.
*
* Default: true
*
* @group expertParam
*/
@Since("2.0.0")
final val keepLastCheckpoint = new BooleanParam(this, "keepLastCheckpoint",
"(For EM optimizer) If using checkpointing, this indicates whether to keep the last" +
" checkpoint. If false, then the checkpoint will be deleted. Deleting the checkpoint can" +
" cause failures if a data partition is lost, so set this bit with care.")
/** @group expertGetParam */
@Since("2.0.0")
def getKeepLastCheckpoint: Boolean = $(keepLastCheckpoint)
/**
* Validates and transforms the input schema.
*
* @param schema input schema
* @return output schema
*/
protected def validateAndTransformSchema(schema: StructType): StructType = {
if (isSet(docConcentration)) {
if (getDocConcentration.length != 1) {
require(getDocConcentration.length == getK, s"LDA docConcentration was of length" +
s" ${getDocConcentration.length}, but k = $getK. docConcentration must be an array of" +
s" length either 1 (scalar) or k (num topics).")
}
getOptimizer match {
case "online" =>
require(getDocConcentration.forall(_ >= 0),
"For Online LDA optimizer, docConcentration values must be >= 0. Found values: " +
getDocConcentration.mkString(","))
case "em" =>
require(getDocConcentration.forall(_ >= 0),
"For EM optimizer, docConcentration values must be >= 1. Found values: " +
getDocConcentration.mkString(","))
}
}
if (isSet(topicConcentration)) {
getOptimizer match {
case "online" =>
require(getTopicConcentration >= 0, s"For Online LDA optimizer, topicConcentration" +
s" must be >= 0. Found value: $getTopicConcentration")
case "em" =>
require(getTopicConcentration >= 0, s"For EM optimizer, topicConcentration" +
s" must be >= 1. Found value: $getTopicConcentration")
}
}
SchemaUtils.checkColumnType(schema, $(featuresCol), new VectorUDT)
SchemaUtils.appendColumn(schema, $(topicDistributionCol), new VectorUDT)
}
private[clustering] def getOldOptimizer: OldLDAOptimizer = getOptimizer match {
case "online" =>
new OldOnlineLDAOptimizer()
.setTau0($(learningOffset))
.setKappa($(learningDecay))
.setMiniBatchFraction($(subsamplingRate))
.setOptimizeDocConcentration($(optimizeDocConcentration))
case "em" =>
new OldEMLDAOptimizer()
.setKeepLastCheckpoint($(keepLastCheckpoint))
}
}
private object LDAParams {
/**
* Equivalent to [[DefaultParamsReader.getAndSetParams()]], but handles [[LDA]] and [[LDAModel]]
* formats saved with Spark 1.6, which differ from the formats in Spark 2.0+.
*
* @param model [[LDA]] or [[LDAModel]] instance. This instance will be modified with
* [[Param]] values extracted from metadata.
* @param metadata Loaded model metadata
*/
def getAndSetParams(model: LDAParams, metadata: Metadata): Unit = {
VersionUtils.majorMinorVersion(metadata.sparkVersion) match {
case (1, 6) =>
implicit val format = DefaultFormats
metadata.params match {
case JObject(pairs) =>
pairs.foreach { case (paramName, jsonValue) =>
val origParam =
if (paramName == "topicDistribution") "topicDistributionCol" else paramName
val param = model.getParam(origParam)
val value = param.jsonDecode(compact(render(jsonValue)))
model.set(param, value)
}
case _ =>
throw new IllegalArgumentException(
s"Cannot recognize JSON metadata: ${metadata.metadataJson}.")
}
case _ => // 2.0+
DefaultParamsReader.getAndSetParams(model, metadata)
}
}
}
/**
* Model fitted by [[LDA]].
*
* @param vocabSize Vocabulary size (number of terms or words in the vocabulary)
* @param sparkSession Used to construct local DataFrames for returning query results
*/
@Since("1.6.0")
abstract class LDAModel private[ml] (
@Since("1.6.0") override val uid: String,
@Since("1.6.0") val vocabSize: Int,
@Since("1.6.0") @transient private[ml] val sparkSession: SparkSession)
extends Model[LDAModel] with LDAParams with Logging with MLWritable {
// NOTE to developers:
// This abstraction should contain all important functionality for basic LDA usage.
// Specializations of this class can contain expert-only functionality.
/**
* Underlying spark.mllib model.
* If this model was produced by Online LDA, then this is the only model representation.
* If this model was produced by EM, then this local representation may be built lazily.
*/
@Since("1.6.0")
private[clustering] def oldLocalModel: OldLocalLDAModel
/** Returns underlying spark.mllib model, which may be local or distributed */
@Since("1.6.0")
private[clustering] def getModel: OldLDAModel
private[ml] def getEffectiveDocConcentration: Array[Double] = getModel.docConcentration.toArray
private[ml] def getEffectiveTopicConcentration: Double = getModel.topicConcentration
/**
* The features for LDA should be a `Vector` representing the word counts in a document.
* The vector should be of length vocabSize, with counts for each term (word).
*
* @group setParam
*/
@Since("1.6.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
@Since("2.2.0")
def setTopicDistributionCol(value: String): this.type = set(topicDistributionCol, value)
/** @group setParam */
@Since("1.6.0")
def setSeed(value: Long): this.type = set(seed, value)
/**
* Transforms the input dataset.
*
* WARNING: If this model is an instance of [[DistributedLDAModel]] (produced when [[optimizer]]
* is set to "em"), this involves collecting a large [[topicsMatrix]] to the driver.
* This implementation may be changed in the future.
*/
@Since("2.0.0")
override def transform(dataset: Dataset[_]): DataFrame = {
if ($(topicDistributionCol).nonEmpty) {
// TODO: Make the transformer natively in ml framework to avoid extra conversion.
val transformer = oldLocalModel.getTopicDistributionMethod(sparkSession.sparkContext)
val t = udf { (v: Vector) => transformer(OldVectors.fromML(v)).asML }
dataset.withColumn($(topicDistributionCol), t(col($(featuresCol)))).toDF()
} else {
logWarning("LDAModel.transform was called without any output columns. Set an output column" +
" such as topicDistributionCol to produce results.")
dataset.toDF()
}
}
@Since("1.6.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
/**
* Value for [[docConcentration]] estimated from data.
* If Online LDA was used and [[optimizeDocConcentration]] was set to false,
* then this returns the fixed (given) value for the [[docConcentration]] parameter.
*/
@Since("2.0.0")
def estimatedDocConcentration: Vector = getModel.docConcentration
/**
* Inferred topics, where each topic is represented by a distribution over terms.
* This is a matrix of size vocabSize x k, where each column is a topic.
* No guarantees are given about the ordering of the topics.
*
* WARNING: If this model is actually a [[DistributedLDAModel]] instance produced by
* the Expectation-Maximization ("em") [[optimizer]], then this method could involve
* collecting a large amount of data to the driver (on the order of vocabSize x k).
*/
@Since("2.0.0")
def topicsMatrix: Matrix = oldLocalModel.topicsMatrix.asML
/** Indicates whether this instance is of type [[DistributedLDAModel]] */
@Since("1.6.0")
def isDistributed: Boolean
/**
* Calculates a lower bound on the log likelihood of the entire corpus.
*
* See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
*
* WARNING: If this model is an instance of [[DistributedLDAModel]] (produced when [[optimizer]]
* is set to "em"), this involves collecting a large [[topicsMatrix]] to the driver.
* This implementation may be changed in the future.
*
* @param dataset test corpus to use for calculating log likelihood
* @return variational lower bound on the log likelihood of the entire corpus
*/
@Since("2.0.0")
def logLikelihood(dataset: Dataset[_]): Double = {
val oldDataset = LDA.getOldDataset(dataset, $(featuresCol))
oldLocalModel.logLikelihood(oldDataset)
}
/**
* Calculate an upper bound on perplexity. (Lower is better.)
* See Equation (16) in the Online LDA paper (Hoffman et al., 2010).
*
* WARNING: If this model is an instance of [[DistributedLDAModel]] (produced when [[optimizer]]
* is set to "em"), this involves collecting a large [[topicsMatrix]] to the driver.
* This implementation may be changed in the future.
*
* @param dataset test corpus to use for calculating perplexity
* @return Variational upper bound on log perplexity per token.
*/
@Since("2.0.0")
def logPerplexity(dataset: Dataset[_]): Double = {
val oldDataset = LDA.getOldDataset(dataset, $(featuresCol))
oldLocalModel.logPerplexity(oldDataset)
}
/**
* Return the topics described by their top-weighted terms.
*
* @param maxTermsPerTopic Maximum number of terms to collect for each topic.
* Default value of 10.
* @return Local DataFrame with one topic per Row, with columns:
* - "topic": IntegerType: topic index
* - "termIndices": ArrayType(IntegerType): term indices, sorted in order of decreasing
* term importance
* - "termWeights": ArrayType(DoubleType): corresponding sorted term weights
*/
@Since("1.6.0")
def describeTopics(maxTermsPerTopic: Int): DataFrame = {
val topics = getModel.describeTopics(maxTermsPerTopic).zipWithIndex.map {
case ((termIndices, termWeights), topic) =>
(topic, termIndices.toSeq, termWeights.toSeq)
}
sparkSession.createDataFrame(topics).toDF("topic", "termIndices", "termWeights")
}
@Since("1.6.0")
def describeTopics(): DataFrame = describeTopics(10)
}
/**
*
* Local (non-distributed) model fitted by [[LDA]].
*
* This model stores the inferred topics only; it does not store info about the training dataset.
*/
@Since("1.6.0")
class LocalLDAModel private[ml] (
uid: String,
vocabSize: Int,
@Since("1.6.0") override private[clustering] val oldLocalModel: OldLocalLDAModel,
sparkSession: SparkSession)
extends LDAModel(uid, vocabSize, sparkSession) {
@Since("1.6.0")
override def copy(extra: ParamMap): LocalLDAModel = {
val copied = new LocalLDAModel(uid, vocabSize, oldLocalModel, sparkSession)
copyValues(copied, extra).setParent(parent).asInstanceOf[LocalLDAModel]
}
override private[clustering] def getModel: OldLDAModel = oldLocalModel
@Since("1.6.0")
override def isDistributed: Boolean = false
@Since("1.6.0")
override def write: MLWriter = new LocalLDAModel.LocalLDAModelWriter(this)
}
@Since("1.6.0")
object LocalLDAModel extends MLReadable[LocalLDAModel] {
private[LocalLDAModel]
class LocalLDAModelWriter(instance: LocalLDAModel) extends MLWriter {
private case class Data(
vocabSize: Int,
topicsMatrix: Matrix,
docConcentration: Vector,
topicConcentration: Double,
gammaShape: Double)
override protected def saveImpl(path: String): Unit = {
DefaultParamsWriter.saveMetadata(instance, path, sc)
val oldModel = instance.oldLocalModel
val data = Data(instance.vocabSize, oldModel.topicsMatrix, oldModel.docConcentration,
oldModel.topicConcentration, oldModel.gammaShape)
val dataPath = new Path(path, "data").toString
sparkSession.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath)
}
}
private class LocalLDAModelReader extends MLReader[LocalLDAModel] {
private val className = classOf[LocalLDAModel].getName
override def load(path: String): LocalLDAModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val dataPath = new Path(path, "data").toString
val data = sparkSession.read.parquet(dataPath)
val vectorConverted = MLUtils.convertVectorColumnsToML(data, "docConcentration")
val matrixConverted = MLUtils.convertMatrixColumnsToML(vectorConverted, "topicsMatrix")
val Row(vocabSize: Int, topicsMatrix: Matrix, docConcentration: Vector,
topicConcentration: Double, gammaShape: Double) =
matrixConverted.select("vocabSize", "topicsMatrix", "docConcentration",
"topicConcentration", "gammaShape").head()
val oldModel = new OldLocalLDAModel(topicsMatrix, docConcentration, topicConcentration,
gammaShape)
val model = new LocalLDAModel(metadata.uid, vocabSize, oldModel, sparkSession)
LDAParams.getAndSetParams(model, metadata)
model
}
}
@Since("1.6.0")
override def read: MLReader[LocalLDAModel] = new LocalLDAModelReader
@Since("1.6.0")
override def load(path: String): LocalLDAModel = super.load(path)
}
/**
*
* Distributed model fitted by [[LDA]].
* This type of model is currently only produced by Expectation-Maximization (EM).
*
* This model stores the inferred topics, the full training dataset, and the topic distribution
* for each training document.
*
* @param oldLocalModelOption Used to implement [[oldLocalModel]] as a lazy val, but keeping
* `copy()` cheap.
*/
@Since("1.6.0")
class DistributedLDAModel private[ml] (
uid: String,
vocabSize: Int,
private val oldDistributedModel: OldDistributedLDAModel,
sparkSession: SparkSession,
private var oldLocalModelOption: Option[OldLocalLDAModel])
extends LDAModel(uid, vocabSize, sparkSession) {
override private[clustering] def oldLocalModel: OldLocalLDAModel = {
if (oldLocalModelOption.isEmpty) {
oldLocalModelOption = Some(oldDistributedModel.toLocal)
}
oldLocalModelOption.get
}
override private[clustering] def getModel: OldLDAModel = oldDistributedModel
/**
* Convert this distributed model to a local representation. This discards info about the
* training dataset.
*
* WARNING: This involves collecting a large [[topicsMatrix]] to the driver.
*/
@Since("1.6.0")
def toLocal: LocalLDAModel = new LocalLDAModel(uid, vocabSize, oldLocalModel, sparkSession)
@Since("1.6.0")
override def copy(extra: ParamMap): DistributedLDAModel = {
val copied = new DistributedLDAModel(
uid, vocabSize, oldDistributedModel, sparkSession, oldLocalModelOption)
copyValues(copied, extra).setParent(parent)
copied
}
@Since("1.6.0")
override def isDistributed: Boolean = true
/**
* Log likelihood of the observed tokens in the training set,
* given the current parameter estimates:
* log P(docs | topics, topic distributions for docs, Dirichlet hyperparameters)
*
* Notes:
* - This excludes the prior; for that, use [[logPrior]].
* - Even with [[logPrior]], this is NOT the same as the data log likelihood given the
* hyperparameters.
* - This is computed from the topic distributions computed during training. If you call
* `logLikelihood()` on the same training dataset, the topic distributions will be computed
* again, possibly giving different results.
*/
@Since("1.6.0")
lazy val trainingLogLikelihood: Double = oldDistributedModel.logLikelihood
/**
* Log probability of the current parameter estimate:
* log P(topics, topic distributions for docs | Dirichlet hyperparameters)
*/
@Since("1.6.0")
lazy val logPrior: Double = oldDistributedModel.logPrior
private var _checkpointFiles: Array[String] = oldDistributedModel.checkpointFiles
/**
* :: DeveloperApi ::
*
* If using checkpointing and `LDA.keepLastCheckpoint` is set to true, then there may be
* saved checkpoint files. This method is provided so that users can manage those files.
*
* Note that removing the checkpoints can cause failures if a partition is lost and is needed
* by certain [[DistributedLDAModel]] methods. Reference counting will clean up the checkpoints
* when this model and derivative data go out of scope.
*
* @return Checkpoint files from training
*/
@DeveloperApi
@Since("2.0.0")
def getCheckpointFiles: Array[String] = _checkpointFiles
/**
* :: DeveloperApi ::
*
* Remove any remaining checkpoint files from training.
*
* @see [[getCheckpointFiles]]
*/
@DeveloperApi
@Since("2.0.0")
def deleteCheckpointFiles(): Unit = {
val hadoopConf = sparkSession.sparkContext.hadoopConfiguration
_checkpointFiles.foreach(PeriodicCheckpointer.removeCheckpointFile(_, hadoopConf))
_checkpointFiles = Array.empty[String]
}
@Since("1.6.0")
override def write: MLWriter = new DistributedLDAModel.DistributedWriter(this)
}
@Since("1.6.0")
object DistributedLDAModel extends MLReadable[DistributedLDAModel] {
private[DistributedLDAModel]
class DistributedWriter(instance: DistributedLDAModel) extends MLWriter {
override protected def saveImpl(path: String): Unit = {
DefaultParamsWriter.saveMetadata(instance, path, sc)
val modelPath = new Path(path, "oldModel").toString
instance.oldDistributedModel.save(sc, modelPath)
}
}
private class DistributedLDAModelReader extends MLReader[DistributedLDAModel] {
private val className = classOf[DistributedLDAModel].getName
override def load(path: String): DistributedLDAModel = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val modelPath = new Path(path, "oldModel").toString
val oldModel = OldDistributedLDAModel.load(sc, modelPath)
val model = new DistributedLDAModel(metadata.uid, oldModel.vocabSize,
oldModel, sparkSession, None)
LDAParams.getAndSetParams(model, metadata)
model
}
}
@Since("1.6.0")
override def read: MLReader[DistributedLDAModel] = new DistributedLDAModelReader
@Since("1.6.0")
override def load(path: String): DistributedLDAModel = super.load(path)
}
/**
*
* Latent Dirichlet Allocation (LDA), a topic model designed for text documents.
*
* Terminology:
* - "term" = "word": an element of the vocabulary
* - "token": instance of a term appearing in a document
* - "topic": multinomial distribution over terms representing some concept
* - "document": one piece of text, corresponding to one row in the input data
*
* Original LDA paper (journal version):
* Blei, Ng, and Jordan. "Latent Dirichlet Allocation." JMLR, 2003.
*
* Input data (featuresCol):
* LDA is given a collection of documents as input data, via the featuresCol parameter.
* Each document is specified as a `Vector` of length vocabSize, where each entry is the
* count for the corresponding term (word) in the document. Feature transformers such as
* [[org.apache.spark.ml.feature.Tokenizer]] and [[org.apache.spark.ml.feature.CountVectorizer]]
* can be useful for converting text to word count vectors.
*
* @see <a href="http://en.wikipedia.org/wiki/Latent_Dirichlet_allocation">
* Latent Dirichlet allocation (Wikipedia)</a>
*/
@Since("1.6.0")
class LDA @Since("1.6.0") (
@Since("1.6.0") override val uid: String)
extends Estimator[LDAModel] with LDAParams with DefaultParamsWritable {
@Since("1.6.0")
def this() = this(Identifiable.randomUID("lda"))
setDefault(maxIter -> 20, k -> 10, optimizer -> "online", checkpointInterval -> 10,
learningOffset -> 1024, learningDecay -> 0.51, subsamplingRate -> 0.05,
optimizeDocConcentration -> true, keepLastCheckpoint -> true)
/**
* The features for LDA should be a `Vector` representing the word counts in a document.
* The vector should be of length vocabSize, with counts for each term (word).
*
* @group setParam
*/
@Since("1.6.0")
def setFeaturesCol(value: String): this.type = set(featuresCol, value)
/** @group setParam */
@Since("1.6.0")
def setMaxIter(value: Int): this.type = set(maxIter, value)
/** @group setParam */
@Since("1.6.0")
def setSeed(value: Long): this.type = set(seed, value)
/** @group setParam */
@Since("1.6.0")
def setCheckpointInterval(value: Int): this.type = set(checkpointInterval, value)
/** @group setParam */
@Since("1.6.0")
def setK(value: Int): this.type = set(k, value)
/** @group setParam */
@Since("1.6.0")
def setDocConcentration(value: Array[Double]): this.type = set(docConcentration, value)
/** @group setParam */
@Since("1.6.0")
def setDocConcentration(value: Double): this.type = set(docConcentration, Array(value))
/** @group setParam */
@Since("1.6.0")
def setTopicConcentration(value: Double): this.type = set(topicConcentration, value)
/** @group setParam */
@Since("1.6.0")
def setOptimizer(value: String): this.type = set(optimizer, value)
/** @group setParam */
@Since("1.6.0")
def setTopicDistributionCol(value: String): this.type = set(topicDistributionCol, value)
/** @group expertSetParam */
@Since("1.6.0")
def setLearningOffset(value: Double): this.type = set(learningOffset, value)
/** @group expertSetParam */
@Since("1.6.0")
def setLearningDecay(value: Double): this.type = set(learningDecay, value)
/** @group setParam */
@Since("1.6.0")
def setSubsamplingRate(value: Double): this.type = set(subsamplingRate, value)
/** @group expertSetParam */
@Since("1.6.0")
def setOptimizeDocConcentration(value: Boolean): this.type = set(optimizeDocConcentration, value)
/** @group expertSetParam */
@Since("2.0.0")
def setKeepLastCheckpoint(value: Boolean): this.type = set(keepLastCheckpoint, value)
@Since("1.6.0")
override def copy(extra: ParamMap): LDA = defaultCopy(extra)
@Since("2.0.0")
override def fit(dataset: Dataset[_]): LDAModel = {
transformSchema(dataset.schema, logging = true)
val instr = Instrumentation.create(this, dataset)
instr.logParams(featuresCol, topicDistributionCol, k, maxIter, subsamplingRate,
checkpointInterval, keepLastCheckpoint, optimizeDocConcentration, topicConcentration,
learningDecay, optimizer, learningOffset, seed)
val oldLDA = new OldLDA()
.setK($(k))
.setDocConcentration(getOldDocConcentration)
.setTopicConcentration(getOldTopicConcentration)
.setMaxIterations($(maxIter))
.setSeed($(seed))
.setCheckpointInterval($(checkpointInterval))
.setOptimizer(getOldOptimizer)
// TODO: persist here, or in old LDA?
val oldData = LDA.getOldDataset(dataset, $(featuresCol))
val oldModel = oldLDA.run(oldData)
val newModel = oldModel match {
case m: OldLocalLDAModel =>
new LocalLDAModel(uid, m.vocabSize, m, dataset.sparkSession)
case m: OldDistributedLDAModel =>
new DistributedLDAModel(uid, m.vocabSize, m, dataset.sparkSession, None)
}
instr.logNumFeatures(newModel.vocabSize)
val model = copyValues(newModel).setParent(this)
instr.logSuccess(model)
model
}
@Since("1.6.0")
override def transformSchema(schema: StructType): StructType = {
validateAndTransformSchema(schema)
}
}
@Since("2.0.0")
object LDA extends MLReadable[LDA] {
/** Get dataset for spark.mllib LDA */
private[clustering] def getOldDataset(
dataset: Dataset[_],
featuresCol: String): RDD[(Long, OldVector)] = {
dataset
.withColumn("docId", monotonically_increasing_id())
.select("docId", featuresCol)
.rdd
.map { case Row(docId: Long, features: Vector) =>
(docId, OldVectors.fromML(features))
}
}
private class LDAReader extends MLReader[LDA] {
private val className = classOf[LDA].getName
override def load(path: String): LDA = {
val metadata = DefaultParamsReader.loadMetadata(path, sc, className)
val model = new LDA(metadata.uid)
LDAParams.getAndSetParams(model, metadata)
model
}
}
override def read: MLReader[LDA] = new LDAReader
@Since("2.0.0")
override def load(path: String): LDA = super.load(path)
}
| jianran/spark | mllib/src/main/scala/org/apache/spark/ml/clustering/LDA.scala | Scala | apache-2.0 | 36,552 |
function TorneoGolfWindow(Window) {
window1 = Titanium.UI.createWindow({
tabBarHidden : true,
backgroundColor : "white",
width : '100%',
height : '100%',
layout : 'vertical'
});
table = Ti.UI.createTableView({
width : '90%',
height : '100%'
});
scrollView_1 = Titanium.UI.createView({
id : "scrollView_1",
backgroundImage : '/images/background.png',
height : '100%',
width : '100%',
layout : 'vertical'
});
scrollView_1.add(table);
imageViewBar = Titanium.UI.createView({
id : "imageViewBar",
backgroundColor : Ti.App.Properties.getString('viewcolor'),
height : 80,
left : 0,
top : 0,
width : '100%',
layout : 'horizontal'
});
imageView = Titanium.UI.createImageView({
id : "imageView",
image : "/images/icongolf.png",
width : 60,
height : 60,
top : 7,
right : 3
});
imageViewBar.add(imageView);
labelTitulo = Titanium.UI.createLabel({
id : "labelTitulo",
height : 'auto',
width : '70%',
text : L('golf'),
font : {
fontSize : '22dp'
},
color : 'white',
textAlign : Ti.UI.TEXT_ALIGNMENT_CENTER
});
imageViewBar.add(labelTitulo);
buttonClose = Titanium.UI.createImageView({
id : "buttonClose",
image : "/images/close.png",
width : 30,
height : 30,
top : 25
});
imageViewBar.add(buttonClose);
window1.add(imageViewBar);
window1.add(scrollView_1);
function populateTable() {
var data = [];
var row = Titanium.UI.createTableViewRow({
id : 2,
title : 'Horarios',
leftImage : '/images/horarios.png',
isparent : true,
opened : false,
hasChild : false,
font : {
fontSize : '22dp'
},
color : 'black'
});
data.push(row);
var row = Titanium.UI.createTableViewRow({
id : 3,
title : 'Mapa',
leftImage : '/images/mapa.png',
isparent : true,
opened : false,
hasChild : false,
font : {
fontSize : '22dp'
},
color : 'black'
});
data.push(row);
table.setData(data);
}
populateTable();
table.addEventListener('click', function(e) {
if (e.rowData.id == 2) {
var Window;
var mainWindow = require("ui/handheld/golf/HorariosWindow");
new mainWindow(Window).open();
} else if (e.rowData.id == 3) {
var Window;
var mainWindow = require("ui/handheld/mapa/MapaWindow");
new mainWindow(Window).open();
}
});
buttonClose.addEventListener('click', function(e) {
Ti.Media.vibrate();
var Window;
var mainWindow = require("ui/handheld/MainWindow");
new mainWindow(Window).open();
});
window1.addEventListener('android:back', function(e) {
Ti.Media.vibrate();
var Window;
var mainWindow = require("ui/handheld/MainWindow");
new mainWindow(Window).open();
});
return window1;
}
module.exports = TorneoGolfWindow;
| emobile/expomobile_mobile | Resources/ui/tablet/TorneoGolfWindow.js | JavaScript | apache-2.0 | 2,863 |
/*
* File: NetWork.h
* Author: guoxinhua
*
* Created on 2014年9月24日, 下午5:14
*/
#ifndef CP_NETWORK_H
#define CP_NETWORK_H
#ifdef __cplusplus
extern "C" {
#endif
#define CP_REACTOR_MAXEVENTS 4096
#define CP_MAX_EVENT 1024
#define CP_BUFFER_SIZE (1024*1024)
#define CP_MAX_UINT 4294967295
#define EPOLL_CLOSE 10
#define CP_CLIENT_EOF_STR "\r\n^CON^eof\r\n"
#define CP_TOO_MANY_CON "not enough con"
#define CP_TOO_MANY_CON_ERR "ERROR!not enough con"
#define CP_MULTI_PROCESS_ERR "ERROR!the connection object create in parent process and use in multi process,please create in every process"
#define CP_CLIENT_EOF_LEN strlen(CP_CLIENT_EOF_STR)
#define CP_HEADER_CON_SUCCESS "CON_SUCCESS!"
#define CP_HEADER_ERROR "ERROR!"
#define CP_PDO_HEADER_STATE "PDOStatement!"
#define CP_RELEASE_HEADER "r"
#define CP_RELEASE_HEADER_LEN 1
typedef int (*epoll_wait_handle)(int fd);
int cpEpoll_add(int epfd, int fd, int fdtype);
int cpEpoll_set(int fd, int fdtype);
int cpEpoll_del(int epfd, int fd);
int cpEpoll_wait(epoll_wait_handle*, struct timeval *timeo, int epfd);
void cpEpoll_free();
CPINLINE int cpEpoll_event_set(int fdtype);
#ifdef __cplusplus
}
#endif
#endif /* NETWORK_H */
| swoole/php-cp | include/cpNetWork.h | C | apache-2.0 | 1,356 |
<div id="failover_confirmation_dialog" style="width:711px">
<h1 class="dialog_corner_radius">Confirm Node Fail Over for {{serversFailOverDialogCtl.node.hostname}}</h1>
<div>
<div mn-spinner="serversFailOverDialogCtl.viewLoading">
<div class="pas_20">
<div class="failover_warning pat_20">
<div ng-show="!serversFailOverDialogCtl.status.down && !serversFailOverDialogCtl.status.dataless">
<h2>Fail Over Options</h2>
<label>
<input
type="radio"
name="failOver"
ng-model="serversFailOverDialogCtl.status.failOver"
value="startGracefulFailover"
ng-disabled="!serversFailOverDialogCtl.status.gracefulFailoverPossible">
<span>Graceful Fail Over (default).</span>
</label>
<label>
<input
type="radio"
name="failOver"
ng-model="serversFailOverDialogCtl.status.failOver"
value="failOver">
<span>Hard Fail Over - If you use hard failover option on a functioning node it may result in data loss. This is because failover will immediately remove the node from the cluster and any data that has not yet been replicated to other nodes may be permanently lost if it had not been persisted to disk.</span>
</label>
<div
class="warning js_gracefull_failover_message"
style="margin-bottom: 15px;"
ng-if="!serversFailOverDialogCtl.status.gracefulFailoverPossible">
<strong>Attention</strong> – Graceful fail over option is not available either because node is unreachable or replica vbucket cannot be activated gracefully.
</div>
<div
class="warning js_warning"
style="margin-top: 15px;"
ng-show="serversFailOverDialogCtl.status.backfill && (serversFailOverDialogCtl.status.failOver === 'failOver')">
<strong>Attention</strong> – A significant amount of data stored on this node
does not yet have replica (backup) copies! Failing over the node now will
irrecoverably lose that data when the incomplete replica is
activated and this node is removed from the cluster. It is
recommended to select "Remove Server" and rebalance to
safely remove the node without any data loss.
<label>
<input
type="checkbox"
name="confirmation"
ng-model="serversFailOverDialogCtl.status.confirmation">
Please confirm Failover.
</label>
</div>
<div
class="warning js_warning"
style="margin-top: 15px;"
ng-show="!serversFailOverDialogCtl.status.backfill && (serversFailOverDialogCtl.status.failOver === 'failOver')">
<strong>Warning</strong> – Failing over the node will remove it from the cluster and activate a replica.
Operations currently in flight and not yet replicated, will be lost. Rebalancing will be required
to add the node back into the cluster. Consider using "Remove from Cluster" and rebalancing instead of Failover,
to avoid any loss of data. Please confirm Failover.
</div>
</div>
<div ng-show="serversFailOverDialogCtl.status.down && !serversFailOverDialogCtl.status.dataless">
<div
class="warning"
ng-show="serversFailOverDialogCtl.status.backfill">
<strong>Attention</strong> – There are not replica (backup) copies of all data
on this node! Failing over the node now will irrecoverably
lose that data when the incomplete replica is activated and
this node is removed from the cluster. If the node might
come back online, it is recommended to wait.
Check this box if you want to failover the node, despite the resulting data loss
<label>
<input
type="checkbox"
name="confirmation"
ng-model="serversFailOverDialogCtl.status.confirmation"> Please confirm Failover.</label>
</div>
<div class="warning" ng-show="!serversFailOverDialogCtl.status.backfill">
<strong>Warning</strong> – Failing over the node will remove it from the cluster and activate a replica.
Operations not replicated before the node became unresponsive, will be lost.
Rebalancing will be required to add the node back into the cluster. Please confirm Failover.
</div>
</div>
<div class="failover_warning pat_20" ng-if="serversFailOverDialogCtl.status.dataless">
<div class="warning">
<strong>Note</strong> – Failing over this node (which has no data) will remove it from the cluster.
Rebalancing will be required to add the node back into the cluster. Please confirm Failover.
</div>
</div>
</div>
</div>
<div class="right save_cancel">
<button
type="submit"
class="save_button float_right"
ng-click="serversFailOverDialogCtl.onSubmit()"
ng-model="button"
ng-disabled="serversFailOverDialogCtl.isFailOverBtnDisabled()">Fail Over</button>
<a class="close casper_close_failover_confirmation_dialog cancel_button float_right" ng-click="$dismiss()">Cancel</a>
</div>
</div>
</div>
</div> | ceejatec/ns_server | priv/public/ui/app-classic/mn_admin/mn_servers/failover_dialog/mn_servers_failover_dialog.html | HTML | apache-2.0 | 5,732 |
package eu.atos.sla.dao.jpa;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.EntityNotFoundException;
import javax.persistence.NoResultException;
import javax.persistence.PersistenceContext;
import javax.persistence.Query;
import javax.persistence.TypedQuery;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import eu.atos.sla.dao.ITemplateDAO;
import eu.atos.sla.datamodel.ITemplate;
import eu.atos.sla.datamodel.bean.Template;
@Repository("TemplateRepository")
public class TemplateDAOJpa implements ITemplateDAO {
private static Logger logger = LoggerFactory.getLogger(TemplateDAOJpa.class);
private EntityManager entityManager;
@PersistenceContext(unitName = "slarepositoryDB")
public void setEntityManager(EntityManager entityManager) {
this.entityManager = entityManager;
}
public EntityManager getEntityManager() {
return entityManager;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public Template getById(Long id) {
return entityManager.find(Template.class, id);
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public Template getByUuid(String uuid) {
try {
Query query = entityManager
.createNamedQuery(Template.QUERY_FIND_BY_UUID);
query.setParameter("uuid", uuid);
Template template = null;
template = (Template) query.getSingleResult();
return template;
} catch (NoResultException e) {
logger.debug("No Result found: " + e);
return null;
}
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public List<ITemplate> search(String providerId, String []serviceIds) {
TypedQuery<ITemplate> query = entityManager.createNamedQuery(
Template.QUERY_SEARCH, ITemplate.class);
query.setParameter("providerId", providerId);
query.setParameter("serviceIds", (serviceIds!=null)?Arrays.asList(serviceIds):null);
query.setParameter("flagServiceIds", (serviceIds!=null)?"flag":null);
logger.debug("providerId:{} - serviceIds:{}" , providerId, (serviceIds!=null)?Arrays.asList(serviceIds):null);
List<ITemplate> templates = new ArrayList<ITemplate>();
templates = (List<ITemplate>) query.getResultList();
if (templates != null) {
logger.debug("Number of templates:" + templates.size());
} else {
logger.debug("No Result found.");
}
return templates;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public List<ITemplate> getByAgreement(String agreement) {
TypedQuery<ITemplate> query = entityManager.createNamedQuery(
Template.QUERY_FIND_BY_AGREEMENT, ITemplate.class);
query.setParameter("agreement", agreement);
List<ITemplate> templates = new ArrayList<ITemplate>();
templates = (List<ITemplate>) query.getResultList();
if (templates != null) {
logger.debug("Number of templates:" + templates.size());
} else {
logger.debug("No Result found.");
}
return templates;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public List<ITemplate> getAll() {
TypedQuery<ITemplate> query = entityManager.createNamedQuery(
Template.QUERY_FIND_ALL, ITemplate.class);
List<ITemplate> templates = new ArrayList<ITemplate>();
templates = (List<ITemplate>) query.getResultList();
if (templates != null) {
logger.debug("Number of templates:" + templates.size());
} else {
logger.debug("No Result found.");
}
return templates;
}
@Override
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public ITemplate save(ITemplate template) {
logger.info("template.getUuid() "+template.getUuid());
entityManager.persist(template);
entityManager.flush();
return template;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public boolean update(String uuid, ITemplate template) {
Template templateDB = null;
try {
Query query = entityManager.createNamedQuery(Template.QUERY_FIND_BY_UUID);
query.setParameter("uuid", uuid);
templateDB = (Template)query.getSingleResult();
} catch (NoResultException e) {
logger.debug("No Result found: " + e);
}
if (templateDB!=null){
template.setId(templateDB.getId());
logger.info("template to update with id"+template.getId());
entityManager.merge(template);
entityManager.flush();
}else
return false;
return true;
}
@Transactional(readOnly = false, propagation = Propagation.REQUIRED)
public boolean delete(ITemplate template) {
try {
Template templateDeleted = entityManager.getReference(Template.class, template.getId());
entityManager.remove(templateDeleted);
entityManager.flush();
return true;
} catch (EntityNotFoundException e) {
logger.debug("Template[{}] not found", template.getId());
return false;
}
}
}
| Atos-FiwareOps/sla-framework | sla-core/sla-repository/src/main/java/eu/atos/sla/dao/jpa/TemplateDAOJpa.java | Java | apache-2.0 | 5,164 |
/*--------------------------------------------------------------------------
* linq.js - LINQ for JavaScript
* ver 3.0.3-Beta4 (Oct. 9th, 2012)
*
* created and maintained by neuecc <ils@neue.cc>
* licensed under MIT License
* http://linqjs.codeplex.com/
*------------------------------------------------------------------------*/
(function (root, undefined) {
// ReadOnly Function
var Functions = {
Identity: function (x) { return x; },
True: function () { return true; },
Blank: function () { }
};
// const Type
var Types = {
Boolean: typeof true,
Number: typeof 0,
String: typeof "",
Object: typeof {},
Undefined: typeof undefined,
Function: typeof function () { }
};
// private utility methods
var Utils = {
// Create anonymous function from lambda expression string
createLambda: function (expression) {
if (expression == null) return Functions.Identity;
if (typeof expression == Types.String) {
if (expression == "") {
return Functions.Identity;
}
else if (expression.indexOf("=>") == -1) {
var regexp = new RegExp("[$]+", "g");
var maxLength = 0;
var match;
while (match = regexp.exec(expression)) {
var paramNumber = match[0].length;
if (paramNumber > maxLength) {
maxLength = paramNumber;
}
}
var argArray = [];
for (var i = 1; i <= maxLength; i++) {
var dollar = "";
for (var j = 0; j < i; j++) {
dollar += "$";
}
argArray.push(dollar);
}
var args = Array.prototype.join.call(argArray, ",");
return new Function(args, "return " + expression);
}
else {
var expr = expression.match(/^[(\s]*([^()]*?)[)\s]*=>(.*)/);
return new Function(expr[1], "return " + expr[2]);
}
}
return expression;
},
isIEnumerable: function (obj) {
if (typeof Enumerator !== Types.Undefined) {
try {
new Enumerator(obj); // check JScript(IE)'s Enumerator
return true;
}
catch (e) { }
}
return false;
},
// IE8's defineProperty is defined but cannot use, therefore check defineProperties
defineProperty: (Object.defineProperties != null)
? function (target, methodName, value) {
Object.defineProperty(target, methodName, {
enumerable: false,
configurable: true,
writable: true,
value: value
})
}
: function (target, methodName, value) {
target[methodName] = value;
},
compare: function (a, b) {
return (a === b) ? 0
: (a > b) ? 1
: -1;
},
dispose: function (obj) {
if (obj != null) obj.dispose();
}
};
// IEnumerator State
var State = { Before: 0, Running: 1, After: 2 };
// "Enumerator" is conflict JScript's "Enumerator"
var IEnumerator = function (initialize, tryGetNext, dispose) {
var yielder = new Yielder();
var state = State.Before;
this.current = yielder.current;
this.moveNext = function () {
try {
switch (state) {
case State.Before:
state = State.Running;
initialize();
// fall through
case State.Running:
if (tryGetNext.apply(yielder)) {
return true;
}
else {
this.dispose();
return false;
}
case State.After:
return false;
}
}
catch (e) {
this.dispose();
throw e;
}
};
this.dispose = function () {
if (state != State.Running) return;
try {
dispose();
}
finally {
state = State.After;
}
};
};
// for tryGetNext
var Yielder = function () {
var current = null;
this.current = function () { return current; };
this.yieldReturn = function (value) {
current = value;
return true;
};
this.yieldBreak = function () {
return false;
};
};
// Enumerable constuctor
var Enumerable = function (getEnumerator) {
this.getEnumerator = getEnumerator;
};
// Utility
Enumerable.Utils = {}; // container
Enumerable.Utils.createLambda = function (expression) {
return Utils.createLambda(expression);
};
Enumerable.Utils.createEnumerable = function (getEnumerator) {
return new Enumerable(getEnumerator);
};
Enumerable.Utils.createEnumerator = function (initialize, tryGetNext, dispose) {
return new IEnumerator(initialize, tryGetNext, dispose);
};
Enumerable.Utils.extendTo = function (type) {
var typeProto = type.prototype;
var enumerableProto;
if (type === Array) {
enumerableProto = ArrayEnumerable.prototype;
Utils.defineProperty(typeProto, "getSource", function () {
return this;
});
}
else {
enumerableProto = Enumerable.prototype;
Utils.defineProperty(typeProto, "getEnumerator", function () {
return Enumerable.from(this).getEnumerator();
});
}
for (var methodName in enumerableProto) {
var func = enumerableProto[methodName];
// already extended
if (typeProto[methodName] == func) continue;
// already defined(example Array#reverse/join/forEach...)
if (typeProto[methodName] != null) {
methodName = methodName + "ByLinq";
if (typeProto[methodName] == func) continue; // recheck
}
if (func instanceof Function) {
Utils.defineProperty(typeProto, methodName, func);
}
}
};
// Generator
Enumerable.choice = function () // variable argument
{
var args = arguments;
return new Enumerable(function () {
return new IEnumerator(
function () {
args = (args[0] instanceof Array) ? args[0]
: (args[0].getEnumerator != null) ? args[0].toArray()
: args;
},
function () {
return this.yieldReturn(args[Math.floor(Math.random() * args.length)]);
},
Functions.Blank);
});
};
Enumerable.cycle = function () // variable argument
{
var args = arguments;
return new Enumerable(function () {
var index = 0;
return new IEnumerator(
function () {
args = (args[0] instanceof Array) ? args[0]
: (args[0].getEnumerator != null) ? args[0].toArray()
: args;
},
function () {
if (index >= args.length) index = 0;
return this.yieldReturn(args[index++]);
},
Functions.Blank);
});
};
Enumerable.empty = function () {
return new Enumerable(function () {
return new IEnumerator(
Functions.Blank,
function () { return false; },
Functions.Blank);
});
};
Enumerable.from = function (obj) {
if (obj == null) {
return Enumerable.empty();
}
if (obj instanceof Enumerable) {
return obj;
}
if (typeof obj == Types.Number || typeof obj == Types.Boolean) {
return Enumerable.repeat(obj, 1);
}
if (typeof obj == Types.String) {
return new Enumerable(function () {
var index = 0;
return new IEnumerator(
Functions.Blank,
function () {
return (index < obj.length) ? this.yieldReturn(obj.charAt(index++)) : false;
},
Functions.Blank);
});
}
if (typeof obj != Types.Function) {
// array or array like object
if (typeof obj.length == Types.Number) {
return new ArrayEnumerable(obj);
}
// JScript's IEnumerable
if (!(obj instanceof Object) && Utils.isIEnumerable(obj)) {
return new Enumerable(function () {
var isFirst = true;
var enumerator;
return new IEnumerator(
function () { enumerator = new Enumerator(obj); },
function () {
if (isFirst) isFirst = false;
else enumerator.moveNext();
return (enumerator.atEnd()) ? false : this.yieldReturn(enumerator.item());
},
Functions.Blank);
});
}
// WinMD IIterable<T>
if (typeof Windows === Types.Object && typeof obj.first === Types.Function) {
return new Enumerable(function () {
var isFirst = true;
var enumerator;
return new IEnumerator(
function () { enumerator = obj.first(); },
function () {
if (isFirst) isFirst = false;
else enumerator.moveNext();
return (enumerator.hasCurrent) ? this.yieldReturn(enumerator.current) : this.yieldBreak();
},
Functions.Blank);
});
}
}
// case function/object : Create keyValuePair[]
return new Enumerable(function () {
var array = [];
var index = 0;
return new IEnumerator(
function () {
for (var key in obj) {
var value = obj[key];
if (!(value instanceof Function) && Object.prototype.hasOwnProperty.call(obj, key)) {
array.push({ key: key, value: value });
}
}
},
function () {
return (index < array.length)
? this.yieldReturn(array[index++])
: false;
},
Functions.Blank);
});
},
Enumerable.make = function (element) {
return Enumerable.repeat(element, 1);
};
// Overload:function(input, pattern)
// Overload:function(input, pattern, flags)
Enumerable.matches = function (input, pattern, flags) {
if (flags == null) flags = "";
if (pattern instanceof RegExp) {
flags += (pattern.ignoreCase) ? "i" : "";
flags += (pattern.multiline) ? "m" : "";
pattern = pattern.source;
}
if (flags.indexOf("g") === -1) flags += "g";
return new Enumerable(function () {
var regex;
return new IEnumerator(
function () { regex = new RegExp(pattern, flags); },
function () {
var match = regex.exec(input);
return (match) ? this.yieldReturn(match) : false;
},
Functions.Blank);
});
};
// Overload:function(start, count)
// Overload:function(start, count, step)
Enumerable.range = function (start, count, step) {
if (step == null) step = 1;
return new Enumerable(function () {
var value;
var index = 0;
return new IEnumerator(
function () { value = start - step; },
function () {
return (index++ < count)
? this.yieldReturn(value += step)
: this.yieldBreak();
},
Functions.Blank);
});
};
// Overload:function(start, count)
// Overload:function(start, count, step)
Enumerable.rangeDown = function (start, count, step) {
if (step == null) step = 1;
return new Enumerable(function () {
var value;
var index = 0;
return new IEnumerator(
function () { value = start + step; },
function () {
return (index++ < count)
? this.yieldReturn(value -= step)
: this.yieldBreak();
},
Functions.Blank);
});
};
// Overload:function(start, to)
// Overload:function(start, to, step)
Enumerable.rangeTo = function (start, to, step) {
if (step == null) step = 1;
if (start < to) {
return new Enumerable(function () {
var value;
return new IEnumerator(
function () { value = start - step; },
function () {
var next = value += step;
return (next <= to)
? this.yieldReturn(next)
: this.yieldBreak();
},
Functions.Blank);
});
}
else {
return new Enumerable(function () {
var value;
return new IEnumerator(
function () { value = start + step; },
function () {
var next = value -= step;
return (next >= to)
? this.yieldReturn(next)
: this.yieldBreak();
},
Functions.Blank);
});
}
};
// Overload:function(element)
// Overload:function(element, count)
Enumerable.repeat = function (element, count) {
if (count != null) return Enumerable.repeat(element).take(count);
return new Enumerable(function () {
return new IEnumerator(
Functions.Blank,
function () { return this.yieldReturn(element); },
Functions.Blank);
});
};
Enumerable.repeatWithFinalize = function (initializer, finalizer) {
initializer = Utils.createLambda(initializer);
finalizer = Utils.createLambda(finalizer);
return new Enumerable(function () {
var element;
return new IEnumerator(
function () { element = initializer(); },
function () { return this.yieldReturn(element); },
function () {
if (element != null) {
finalizer(element);
element = null;
}
});
});
};
// Overload:function(func)
// Overload:function(func, count)
Enumerable.generate = function (func, count) {
if (count != null) return Enumerable.generate(func).take(count);
func = Utils.createLambda(func);
return new Enumerable(function () {
return new IEnumerator(
Functions.Blank,
function () { return this.yieldReturn(func()); },
Functions.Blank);
});
};
// Overload:function()
// Overload:function(start)
// Overload:function(start, step)
Enumerable.toInfinity = function (start, step) {
if (start == null) start = 0;
if (step == null) step = 1;
return new Enumerable(function () {
var value;
return new IEnumerator(
function () { value = start - step; },
function () { return this.yieldReturn(value += step); },
Functions.Blank);
});
};
// Overload:function()
// Overload:function(start)
// Overload:function(start, step)
Enumerable.toNegativeInfinity = function (start, step) {
if (start == null) start = 0;
if (step == null) step = 1;
return new Enumerable(function () {
var value;
return new IEnumerator(
function () { value = start + step; },
function () { return this.yieldReturn(value -= step); },
Functions.Blank);
});
};
Enumerable.unfold = function (seed, func) {
func = Utils.createLambda(func);
return new Enumerable(function () {
var isFirst = true;
var value;
return new IEnumerator(
Functions.Blank,
function () {
if (isFirst) {
isFirst = false;
value = seed;
return this.yieldReturn(value);
}
value = func(value);
return this.yieldReturn(value);
},
Functions.Blank);
});
};
Enumerable.defer = function (enumerableFactory) {
return new Enumerable(function () {
var enumerator;
return new IEnumerator(
function () { enumerator = Enumerable.from(enumerableFactory()).getEnumerator(); },
function () {
return (enumerator.moveNext())
? this.yieldReturn(enumerator.current())
: this.yieldBreak();
},
function () {
Utils.dispose(enumerator);
});
});
};
// Extension Methods
/* Projection and Filtering Methods */
// Overload:function(func)
// Overload:function(func, resultSelector<element>)
// Overload:function(func, resultSelector<element, nestLevel>)
Enumerable.prototype.traverseBreadthFirst = function (func, resultSelector) {
var source = this;
func = Utils.createLambda(func);
resultSelector = Utils.createLambda(resultSelector);
return new Enumerable(function () {
var enumerator;
var nestLevel = 0;
var buffer = [];
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
while (true) {
if (enumerator.moveNext()) {
buffer.push(enumerator.current());
return this.yieldReturn(resultSelector(enumerator.current(), nestLevel));
}
var next = Enumerable.from(buffer).selectMany(function (x) { return func(x); });
if (!next.any()) {
return false;
}
else {
nestLevel++;
buffer = [];
Utils.dispose(enumerator);
enumerator = next.getEnumerator();
}
}
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(func)
// Overload:function(func, resultSelector<element>)
// Overload:function(func, resultSelector<element, nestLevel>)
Enumerable.prototype.traverseDepthFirst = function (func, resultSelector) {
var source = this;
func = Utils.createLambda(func);
resultSelector = Utils.createLambda(resultSelector);
return new Enumerable(function () {
var enumeratorStack = [];
var enumerator;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
while (true) {
if (enumerator.moveNext()) {
var value = resultSelector(enumerator.current(), enumeratorStack.length);
enumeratorStack.push(enumerator);
enumerator = Enumerable.from(func(enumerator.current())).getEnumerator();
return this.yieldReturn(value);
}
if (enumeratorStack.length <= 0) return false;
Utils.dispose(enumerator);
enumerator = enumeratorStack.pop();
}
},
function () {
try {
Utils.dispose(enumerator);
}
finally {
Enumerable.from(enumeratorStack).forEach(function (s) { s.dispose(); });
}
});
});
};
Enumerable.prototype.flatten = function () {
var source = this;
return new Enumerable(function () {
var enumerator;
var middleEnumerator = null;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
while (true) {
if (middleEnumerator != null) {
if (middleEnumerator.moveNext()) {
return this.yieldReturn(middleEnumerator.current());
}
else {
middleEnumerator = null;
}
}
if (enumerator.moveNext()) {
if (enumerator.current() instanceof Array) {
Utils.dispose(middleEnumerator);
middleEnumerator = Enumerable.from(enumerator.current())
.selectMany(Functions.Identity)
.flatten()
.getEnumerator();
continue;
}
else {
return this.yieldReturn(enumerator.current());
}
}
return false;
}
},
function () {
try {
Utils.dispose(enumerator);
}
finally {
Utils.dispose(middleEnumerator);
}
});
});
};
Enumerable.prototype.pairwise = function (selector) {
var source = this;
selector = Utils.createLambda(selector);
return new Enumerable(function () {
var enumerator;
return new IEnumerator(
function () {
enumerator = source.getEnumerator();
enumerator.moveNext();
},
function () {
var prev = enumerator.current();
return (enumerator.moveNext())
? this.yieldReturn(selector(prev, enumerator.current()))
: false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(func)
// Overload:function(seed,func<value,element>)
Enumerable.prototype.scan = function (seed, func) {
var isUseSeed;
if (func == null) {
func = Utils.createLambda(seed); // arguments[0]
isUseSeed = false;
} else {
func = Utils.createLambda(func);
isUseSeed = true;
}
var source = this;
return new Enumerable(function () {
var enumerator;
var value;
var isFirst = true;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
if (isFirst) {
isFirst = false;
if (!isUseSeed) {
if (enumerator.moveNext()) {
return this.yieldReturn(value = enumerator.current());
}
}
else {
return this.yieldReturn(value = seed);
}
}
return (enumerator.moveNext())
? this.yieldReturn(value = func(value, enumerator.current()))
: false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(selector<element>)
// Overload:function(selector<element,index>)
Enumerable.prototype.select = function (selector) {
selector = Utils.createLambda(selector);
if (selector.length <= 1) {
return new WhereSelectEnumerable(this, null, selector);
}
else {
var source = this;
return new Enumerable(function () {
var enumerator;
var index = 0;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
return (enumerator.moveNext())
? this.yieldReturn(selector(enumerator.current(), index++))
: false;
},
function () { Utils.dispose(enumerator); });
});
}
};
// Overload:function(collectionSelector<element>)
// Overload:function(collectionSelector<element,index>)
// Overload:function(collectionSelector<element>,resultSelector)
// Overload:function(collectionSelector<element,index>,resultSelector)
Enumerable.prototype.selectMany = function (collectionSelector, resultSelector) {
var source = this;
collectionSelector = Utils.createLambda(collectionSelector);
if (resultSelector == null) resultSelector = function (a, b) { return b; };
resultSelector = Utils.createLambda(resultSelector);
return new Enumerable(function () {
var enumerator;
var middleEnumerator = undefined;
var index = 0;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
if (middleEnumerator === undefined) {
if (!enumerator.moveNext()) return false;
}
do {
if (middleEnumerator == null) {
var middleSeq = collectionSelector(enumerator.current(), index++);
middleEnumerator = Enumerable.from(middleSeq).getEnumerator();
}
if (middleEnumerator.moveNext()) {
return this.yieldReturn(resultSelector(enumerator.current(), middleEnumerator.current()));
}
Utils.dispose(middleEnumerator);
middleEnumerator = null;
} while (enumerator.moveNext());
return false;
},
function () {
try {
Utils.dispose(enumerator);
}
finally {
Utils.dispose(middleEnumerator);
}
});
});
};
// Overload:function(predicate<element>)
// Overload:function(predicate<element,index>)
Enumerable.prototype.where = function (predicate) {
predicate = Utils.createLambda(predicate);
if (predicate.length <= 1) {
return new WhereEnumerable(this, predicate);
}
else {
var source = this;
return new Enumerable(function () {
var enumerator;
var index = 0;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
while (enumerator.moveNext()) {
if (predicate(enumerator.current(), index++)) {
return this.yieldReturn(enumerator.current());
}
}
return false;
},
function () { Utils.dispose(enumerator); });
});
}
};
// Overload:function(selector<element>)
// Overload:function(selector<element,index>)
Enumerable.prototype.choose = function (selector) {
selector = Utils.createLambda(selector);
var source = this;
return new Enumerable(function () {
var enumerator;
var index = 0;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
while (enumerator.moveNext()) {
var result = selector(enumerator.current(), index++);
if (result != null) {
return this.yieldReturn(result);
}
}
return this.yieldBreak();
},
function () { Utils.dispose(enumerator); });
});
};
Enumerable.prototype.ofType = function (type) {
var typeName;
switch (type) {
case Number:
typeName = Types.Number;
break;
case String:
typeName = Types.String;
break;
case Boolean:
typeName = Types.Boolean;
break;
case Function:
typeName = Types.Function;
break;
default:
typeName = null;
break;
}
return (typeName === null)
? this.where(function (x) { return x instanceof type; })
: this.where(function (x) { return typeof x === typeName; });
};
// mutiple arguments, last one is selector, others are enumerable
Enumerable.prototype.zip = function () {
var args = arguments;
var selector = Utils.createLambda(arguments[arguments.length - 1]);
var source = this;
// optimized case:argument is 2
if (arguments.length == 2) {
var second = arguments[0];
return new Enumerable(function () {
var firstEnumerator;
var secondEnumerator;
var index = 0;
return new IEnumerator(
function () {
firstEnumerator = source.getEnumerator();
secondEnumerator = Enumerable.from(second).getEnumerator();
},
function () {
if (firstEnumerator.moveNext() && secondEnumerator.moveNext()) {
return this.yieldReturn(selector(firstEnumerator.current(), secondEnumerator.current(), index++));
}
return false;
},
function () {
try {
Utils.dispose(firstEnumerator);
} finally {
Utils.dispose(secondEnumerator);
}
});
});
}
else {
return new Enumerable(function () {
var enumerators;
var index = 0;
return new IEnumerator(
function () {
var array = Enumerable.make(source)
.concat(Enumerable.from(args).takeExceptLast().select(Enumerable.from))
.select(function (x) { return x.getEnumerator() })
.toArray();
enumerators = Enumerable.from(array);
},
function () {
if (enumerators.all(function (x) { return x.moveNext() })) {
var array = enumerators
.select(function (x) { return x.current() })
.toArray();
array.push(index++);
return this.yieldReturn(selector.apply(null, array));
}
else {
return this.yieldBreak();
}
},
function () {
Enumerable.from(enumerators).forEach(Utils.dispose);
});
});
}
};
// mutiple arguments
Enumerable.prototype.merge = function () {
var args = arguments;
var source = this;
return new Enumerable(function () {
var enumerators;
var index = -1;
return new IEnumerator(
function () {
enumerators = Enumerable.make(source)
.concat(Enumerable.from(args).select(Enumerable.from))
.select(function (x) { return x.getEnumerator() })
.toArray();
},
function () {
while (enumerators.length > 0) {
index = (index >= enumerators.length - 1) ? 0 : index + 1;
var enumerator = enumerators[index];
if (enumerator.moveNext()) {
return this.yieldReturn(enumerator.current());
}
else {
enumerator.dispose();
enumerators.splice(index--, 1);
}
}
return this.yieldBreak();
},
function () {
Enumerable.from(enumerators).forEach(Utils.dispose);
});
});
};
/* Join Methods */
// Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector)
// Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector)
Enumerable.prototype.join = function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) {
outerKeySelector = Utils.createLambda(outerKeySelector);
innerKeySelector = Utils.createLambda(innerKeySelector);
resultSelector = Utils.createLambda(resultSelector);
compareSelector = Utils.createLambda(compareSelector);
var source = this;
return new Enumerable(function () {
var outerEnumerator;
var lookup;
var innerElements = null;
var innerCount = 0;
return new IEnumerator(
function () {
outerEnumerator = source.getEnumerator();
lookup = Enumerable.from(inner).toLookup(innerKeySelector, Functions.Identity, compareSelector);
},
function () {
while (true) {
if (innerElements != null) {
var innerElement = innerElements[innerCount++];
if (innerElement !== undefined) {
return this.yieldReturn(resultSelector(outerEnumerator.current(), innerElement));
}
innerElement = null;
innerCount = 0;
}
if (outerEnumerator.moveNext()) {
var key = outerKeySelector(outerEnumerator.current());
innerElements = lookup.get(key).toArray();
} else {
return false;
}
}
},
function () { Utils.dispose(outerEnumerator); });
});
};
// Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector)
// Overload:function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector)
Enumerable.prototype.groupJoin = function (inner, outerKeySelector, innerKeySelector, resultSelector, compareSelector) {
outerKeySelector = Utils.createLambda(outerKeySelector);
innerKeySelector = Utils.createLambda(innerKeySelector);
resultSelector = Utils.createLambda(resultSelector);
compareSelector = Utils.createLambda(compareSelector);
var source = this;
return new Enumerable(function () {
var enumerator = source.getEnumerator();
var lookup = null;
return new IEnumerator(
function () {
enumerator = source.getEnumerator();
lookup = Enumerable.from(inner).toLookup(innerKeySelector, Functions.Identity, compareSelector);
},
function () {
if (enumerator.moveNext()) {
var innerElement = lookup.get(outerKeySelector(enumerator.current()));
return this.yieldReturn(resultSelector(enumerator.current(), innerElement));
}
return false;
},
function () { Utils.dispose(enumerator); });
});
};
/* Set Methods */
Enumerable.prototype.all = function (predicate) {
predicate = Utils.createLambda(predicate);
var result = true;
this.forEach(function (x) {
if (!predicate(x)) {
result = false;
return false; // break
}
});
return result;
};
// Overload:function()
// Overload:function(predicate)
Enumerable.prototype.any = function (predicate) {
predicate = Utils.createLambda(predicate);
var enumerator = this.getEnumerator();
try {
if (arguments.length == 0) return enumerator.moveNext(); // case:function()
while (enumerator.moveNext()) // case:function(predicate)
{
if (predicate(enumerator.current())) return true;
}
return false;
}
finally {
Utils.dispose(enumerator);
}
};
Enumerable.prototype.isEmpty = function () {
return !this.any();
};
// multiple arguments
Enumerable.prototype.concat = function () {
var source = this;
if (arguments.length == 1) {
var second = arguments[0];
return new Enumerable(function () {
var firstEnumerator;
var secondEnumerator;
return new IEnumerator(
function () { firstEnumerator = source.getEnumerator(); },
function () {
if (secondEnumerator == null) {
if (firstEnumerator.moveNext()) return this.yieldReturn(firstEnumerator.current());
secondEnumerator = Enumerable.from(second).getEnumerator();
}
if (secondEnumerator.moveNext()) return this.yieldReturn(secondEnumerator.current());
return false;
},
function () {
try {
Utils.dispose(firstEnumerator);
}
finally {
Utils.dispose(secondEnumerator);
}
});
});
}
else {
var args = arguments;
return new Enumerable(function () {
var enumerators;
return new IEnumerator(
function () {
enumerators = Enumerable.make(source)
.concat(Enumerable.from(args).select(Enumerable.from))
.select(function (x) { return x.getEnumerator() })
.toArray();
},
function () {
while (enumerators.length > 0) {
var enumerator = enumerators[0];
if (enumerator.moveNext()) {
return this.yieldReturn(enumerator.current());
}
else {
enumerator.dispose();
enumerators.splice(0, 1);
}
}
return this.yieldBreak();
},
function () {
Enumerable.from(enumerators).forEach(Utils.dispose);
});
});
}
};
Enumerable.prototype.insert = function (index, second) {
var source = this;
return new Enumerable(function () {
var firstEnumerator;
var secondEnumerator;
var count = 0;
var isEnumerated = false;
return new IEnumerator(
function () {
firstEnumerator = source.getEnumerator();
secondEnumerator = Enumerable.from(second).getEnumerator();
},
function () {
if (count == index && secondEnumerator.moveNext()) {
isEnumerated = true;
return this.yieldReturn(secondEnumerator.current());
}
if (firstEnumerator.moveNext()) {
count++;
return this.yieldReturn(firstEnumerator.current());
}
if (!isEnumerated && secondEnumerator.moveNext()) {
return this.yieldReturn(secondEnumerator.current());
}
return false;
},
function () {
try {
Utils.dispose(firstEnumerator);
}
finally {
Utils.dispose(secondEnumerator);
}
});
});
};
Enumerable.prototype.alternate = function (alternateValueOrSequence) {
var source = this;
return new Enumerable(function () {
var buffer;
var enumerator;
var alternateSequence;
var alternateEnumerator;
return new IEnumerator(
function () {
if (alternateValueOrSequence instanceof Array || alternateValueOrSequence.getEnumerator != null) {
alternateSequence = Enumerable.from(Enumerable.from(alternateValueOrSequence).toArray()); // freeze
}
else {
alternateSequence = Enumerable.make(alternateValueOrSequence);
}
enumerator = source.getEnumerator();
if (enumerator.moveNext()) buffer = enumerator.current();
},
function () {
while (true) {
if (alternateEnumerator != null) {
if (alternateEnumerator.moveNext()) {
return this.yieldReturn(alternateEnumerator.current());
}
else {
alternateEnumerator = null;
}
}
if (buffer == null && enumerator.moveNext()) {
buffer = enumerator.current(); // hasNext
alternateEnumerator = alternateSequence.getEnumerator();
continue; // GOTO
}
else if (buffer != null) {
var retVal = buffer;
buffer = null;
return this.yieldReturn(retVal);
}
return this.yieldBreak();
}
},
function () {
try {
Utils.dispose(enumerator);
}
finally {
Utils.dispose(alternateEnumerator);
}
});
});
};
// Overload:function(value)
// Overload:function(value, compareSelector)
Enumerable.prototype.contains = function (value, compareSelector) {
compareSelector = Utils.createLambda(compareSelector);
var enumerator = this.getEnumerator();
try {
while (enumerator.moveNext()) {
if (compareSelector(enumerator.current()) === value) return true;
}
return false;
}
finally {
Utils.dispose(enumerator);
}
};
Enumerable.prototype.defaultIfEmpty = function (defaultValue) {
var source = this;
if (defaultValue === undefined) defaultValue = null;
return new Enumerable(function () {
var enumerator;
var isFirst = true;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
if (enumerator.moveNext()) {
isFirst = false;
return this.yieldReturn(enumerator.current());
}
else if (isFirst) {
isFirst = false;
return this.yieldReturn(defaultValue);
}
return false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function()
// Overload:function(compareSelector)
Enumerable.prototype.distinct = function (compareSelector) {
return this.except(Enumerable.empty(), compareSelector);
};
Enumerable.prototype.distinctUntilChanged = function (compareSelector) {
compareSelector = Utils.createLambda(compareSelector);
var source = this;
return new Enumerable(function () {
var enumerator;
var compareKey;
var initial;
return new IEnumerator(
function () {
enumerator = source.getEnumerator();
},
function () {
while (enumerator.moveNext()) {
var key = compareSelector(enumerator.current());
if (initial) {
initial = false;
compareKey = key;
return this.yieldReturn(enumerator.current());
}
if (compareKey === key) {
continue;
}
compareKey = key;
return this.yieldReturn(enumerator.current());
}
return this.yieldBreak();
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(second)
// Overload:function(second, compareSelector)
Enumerable.prototype.except = function (second, compareSelector) {
compareSelector = Utils.createLambda(compareSelector);
var source = this;
return new Enumerable(function () {
var enumerator;
var keys;
return new IEnumerator(
function () {
enumerator = source.getEnumerator();
keys = new Dictionary(compareSelector);
Enumerable.from(second).forEach(function (key) { keys.add(key); });
},
function () {
while (enumerator.moveNext()) {
var current = enumerator.current();
if (!keys.contains(current)) {
keys.add(current);
return this.yieldReturn(current);
}
}
return false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(second)
// Overload:function(second, compareSelector)
Enumerable.prototype.intersect = function (second, compareSelector) {
compareSelector = Utils.createLambda(compareSelector);
var source = this;
return new Enumerable(function () {
var enumerator;
var keys;
var outs;
return new IEnumerator(
function () {
enumerator = source.getEnumerator();
keys = new Dictionary(compareSelector);
Enumerable.from(second).forEach(function (key) { keys.add(key); });
outs = new Dictionary(compareSelector);
},
function () {
while (enumerator.moveNext()) {
var current = enumerator.current();
if (!outs.contains(current) && keys.contains(current)) {
outs.add(current);
return this.yieldReturn(current);
}
}
return false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(second)
// Overload:function(second, compareSelector)
Enumerable.prototype.sequenceEqual = function (second, compareSelector) {
compareSelector = Utils.createLambda(compareSelector);
var firstEnumerator = this.getEnumerator();
try {
var secondEnumerator = Enumerable.from(second).getEnumerator();
try {
while (firstEnumerator.moveNext()) {
if (!secondEnumerator.moveNext()
|| compareSelector(firstEnumerator.current()) !== compareSelector(secondEnumerator.current())) {
return false;
}
}
if (secondEnumerator.moveNext()) return false;
return true;
}
finally {
Utils.dispose(secondEnumerator);
}
}
finally {
Utils.dispose(firstEnumerator);
}
};
Enumerable.prototype.union = function (second, compareSelector) {
compareSelector = Utils.createLambda(compareSelector);
var source = this;
return new Enumerable(function () {
var firstEnumerator;
var secondEnumerator;
var keys;
return new IEnumerator(
function () {
firstEnumerator = source.getEnumerator();
keys = new Dictionary(compareSelector);
},
function () {
var current;
if (secondEnumerator === undefined) {
while (firstEnumerator.moveNext()) {
current = firstEnumerator.current();
if (!keys.contains(current)) {
keys.add(current);
return this.yieldReturn(current);
}
}
secondEnumerator = Enumerable.from(second).getEnumerator();
}
while (secondEnumerator.moveNext()) {
current = secondEnumerator.current();
if (!keys.contains(current)) {
keys.add(current);
return this.yieldReturn(current);
}
}
return false;
},
function () {
try {
Utils.dispose(firstEnumerator);
}
finally {
Utils.dispose(secondEnumerator);
}
});
});
};
/* Ordering Methods */
Enumerable.prototype.orderBy = function (keySelector) {
return new OrderedEnumerable(this, keySelector, false);
};
Enumerable.prototype.orderByDescending = function (keySelector) {
return new OrderedEnumerable(this, keySelector, true);
};
Enumerable.prototype.reverse = function () {
var source = this;
return new Enumerable(function () {
var buffer;
var index;
return new IEnumerator(
function () {
buffer = source.toArray();
index = buffer.length;
},
function () {
return (index > 0)
? this.yieldReturn(buffer[--index])
: false;
},
Functions.Blank);
});
};
Enumerable.prototype.shuffle = function () {
var source = this;
return new Enumerable(function () {
var buffer;
return new IEnumerator(
function () { buffer = source.toArray(); },
function () {
if (buffer.length > 0) {
var i = Math.floor(Math.random() * buffer.length);
return this.yieldReturn(buffer.splice(i, 1)[0]);
}
return false;
},
Functions.Blank);
});
};
Enumerable.prototype.weightedSample = function (weightSelector) {
weightSelector = Utils.createLambda(weightSelector);
var source = this;
return new Enumerable(function () {
var sortedByBound;
var totalWeight = 0;
return new IEnumerator(
function () {
sortedByBound = source
.choose(function (x) {
var weight = weightSelector(x);
if (weight <= 0) return null; // ignore 0
totalWeight += weight;
return { value: x, bound: totalWeight };
})
.toArray();
},
function () {
if (sortedByBound.length > 0) {
var draw = Math.floor(Math.random() * totalWeight) + 1;
var lower = -1;
var upper = sortedByBound.length;
while (upper - lower > 1) {
var index = Math.floor((lower + upper) / 2);
if (sortedByBound[index].bound >= draw) {
upper = index;
}
else {
lower = index;
}
}
return this.yieldReturn(sortedByBound[upper].value);
}
return this.yieldBreak();
},
Functions.Blank);
});
};
/* Grouping Methods */
// Overload:function(keySelector)
// Overload:function(keySelector,elementSelector)
// Overload:function(keySelector,elementSelector,resultSelector)
// Overload:function(keySelector,elementSelector,resultSelector,compareSelector)
Enumerable.prototype.groupBy = function (keySelector, elementSelector, resultSelector, compareSelector) {
var source = this;
keySelector = Utils.createLambda(keySelector);
elementSelector = Utils.createLambda(elementSelector);
if (resultSelector != null) resultSelector = Utils.createLambda(resultSelector);
compareSelector = Utils.createLambda(compareSelector);
return new Enumerable(function () {
var enumerator;
return new IEnumerator(
function () {
enumerator = source.toLookup(keySelector, elementSelector, compareSelector)
.toEnumerable()
.getEnumerator();
},
function () {
while (enumerator.moveNext()) {
return (resultSelector == null)
? this.yieldReturn(enumerator.current())
: this.yieldReturn(resultSelector(enumerator.current().key(), enumerator.current()));
}
return false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(keySelector)
// Overload:function(keySelector,elementSelector)
// Overload:function(keySelector,elementSelector,resultSelector)
// Overload:function(keySelector,elementSelector,resultSelector,compareSelector)
Enumerable.prototype.partitionBy = function (keySelector, elementSelector, resultSelector, compareSelector) {
var source = this;
keySelector = Utils.createLambda(keySelector);
elementSelector = Utils.createLambda(elementSelector);
compareSelector = Utils.createLambda(compareSelector);
var hasResultSelector;
if (resultSelector == null) {
hasResultSelector = false;
resultSelector = function (key, group) { return new Grouping(key, group); };
}
else {
hasResultSelector = true;
resultSelector = Utils.createLambda(resultSelector);
}
return new Enumerable(function () {
var enumerator;
var key;
var compareKey;
var group = [];
return new IEnumerator(
function () {
enumerator = source.getEnumerator();
if (enumerator.moveNext()) {
key = keySelector(enumerator.current());
compareKey = compareSelector(key);
group.push(elementSelector(enumerator.current()));
}
},
function () {
var hasNext;
while ((hasNext = enumerator.moveNext()) == true) {
if (compareKey === compareSelector(keySelector(enumerator.current()))) {
group.push(elementSelector(enumerator.current()));
}
else break;
}
if (group.length > 0) {
var result = (hasResultSelector)
? resultSelector(key, Enumerable.from(group))
: resultSelector(key, group);
if (hasNext) {
key = keySelector(enumerator.current());
compareKey = compareSelector(key);
group = [elementSelector(enumerator.current())];
}
else group = [];
return this.yieldReturn(result);
}
return false;
},
function () { Utils.dispose(enumerator); });
});
};
Enumerable.prototype.buffer = function (count) {
var source = this;
return new Enumerable(function () {
var enumerator;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
var array = [];
var index = 0;
while (enumerator.moveNext()) {
array.push(enumerator.current());
if (++index >= count) return this.yieldReturn(array);
}
if (array.length > 0) return this.yieldReturn(array);
return false;
},
function () { Utils.dispose(enumerator); });
});
};
/* Aggregate Methods */
// Overload:function(func)
// Overload:function(seed,func)
// Overload:function(seed,func,resultSelector)
Enumerable.prototype.aggregate = function (seed, func, resultSelector) {
resultSelector = Utils.createLambda(resultSelector);
return resultSelector(this.scan(seed, func, resultSelector).last());
};
// Overload:function()
// Overload:function(selector)
Enumerable.prototype.average = function (selector) {
selector = Utils.createLambda(selector);
var sum = 0;
var count = 0;
this.forEach(function (x) {
sum += selector(x);
++count;
});
return sum / count;
};
// Overload:function()
// Overload:function(predicate)
Enumerable.prototype.count = function (predicate) {
predicate = (predicate == null) ? Functions.True : Utils.createLambda(predicate);
var count = 0;
this.forEach(function (x, i) {
if (predicate(x, i))++count;
});
return count;
};
// Overload:function()
// Overload:function(selector)
Enumerable.prototype.max = function (selector) {
if (selector == null) selector = Functions.Identity;
return this.select(selector).aggregate(function (a, b) { return (a > b) ? a : b; });
};
// Overload:function()
// Overload:function(selector)
Enumerable.prototype.min = function (selector) {
if (selector == null) selector = Functions.Identity;
return this.select(selector).aggregate(function (a, b) { return (a < b) ? a : b; });
};
Enumerable.prototype.maxBy = function (keySelector) {
keySelector = Utils.createLambda(keySelector);
return this.aggregate(function (a, b) { return (keySelector(a) > keySelector(b)) ? a : b; });
};
Enumerable.prototype.minBy = function (keySelector) {
keySelector = Utils.createLambda(keySelector);
return this.aggregate(function (a, b) { return (keySelector(a) < keySelector(b)) ? a : b; });
};
// Overload:function()
// Overload:function(selector)
Enumerable.prototype.sum = function (selector) {
if (selector == null) selector = Functions.Identity;
return this.select(selector).aggregate(0, function (a, b) { return a + b; });
};
/* Paging Methods */
Enumerable.prototype.elementAt = function (index) {
var value;
var found = false;
this.forEach(function (x, i) {
if (i == index) {
value = x;
found = true;
return false;
}
});
if (!found) throw new Error("index is less than 0 or greater than or equal to the number of elements in source.");
return value;
};
Enumerable.prototype.elementAtOrDefault = function (index, defaultValue) {
if (defaultValue === undefined) defaultValue = null;
var value;
var found = false;
this.forEach(function (x, i) {
if (i == index) {
value = x;
found = true;
return false;
}
});
return (!found) ? defaultValue : value;
};
// Overload:function()
// Overload:function(predicate)
Enumerable.prototype.first = function (predicate) {
if (predicate != null) return this.where(predicate).first();
var value;
var found = false;
this.forEach(function (x) {
value = x;
found = true;
return false;
});
if (!found) throw new Error("first:No element satisfies the condition.");
return value;
};
Enumerable.prototype.firstOrDefault = function (predicate, defaultValue) {
if (defaultValue === undefined) defaultValue = null;
if (predicate != null) return this.where(predicate).firstOrDefault(null, defaultValue);
var value;
var found = false;
this.forEach(function (x) {
value = x;
found = true;
return false;
});
return (!found) ? defaultValue : value;
};
// Overload:function()
// Overload:function(predicate)
Enumerable.prototype.last = function (predicate) {
if (predicate != null) return this.where(predicate).last();
var value;
var found = false;
this.forEach(function (x) {
found = true;
value = x;
});
if (!found) throw new Error("last:No element satisfies the condition.");
return value;
};
// Overload:function(defaultValue)
// Overload:function(defaultValue,predicate)
Enumerable.prototype.lastOrDefault = function (predicate, defaultValue) {
if (defaultValue === undefined) defaultValue = null;
if (predicate != null) return this.where(predicate).lastOrDefault(null, defaultValue);
var value;
var found = false;
this.forEach(function (x) {
found = true;
value = x;
});
return (!found) ? defaultValue : value;
};
// Overload:function()
// Overload:function(predicate)
Enumerable.prototype.single = function (predicate) {
if (predicate != null) return this.where(predicate).single();
var value;
var found = false;
this.forEach(function (x) {
if (!found) {
found = true;
value = x;
} else throw new Error("single:sequence contains more than one element.");
});
if (!found) throw new Error("single:No element satisfies the condition.");
return value;
};
// Overload:function(defaultValue)
// Overload:function(defaultValue,predicate)
Enumerable.prototype.singleOrDefault = function (predicate, defaultValue) {
if (defaultValue === undefined) defaultValue = null;
if (predicate != null) return this.where(predicate).singleOrDefault(null, defaultValue);
var value;
var found = false;
this.forEach(function (x) {
if (!found) {
found = true;
value = x;
} else throw new Error("single:sequence contains more than one element.");
});
return (!found) ? defaultValue : value;
};
Enumerable.prototype.skip = function (count) {
var source = this;
return new Enumerable(function () {
var enumerator;
var index = 0;
return new IEnumerator(
function () {
enumerator = source.getEnumerator();
while (index++ < count && enumerator.moveNext()) {
}
;
},
function () {
return (enumerator.moveNext())
? this.yieldReturn(enumerator.current())
: false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(predicate<element>)
// Overload:function(predicate<element,index>)
Enumerable.prototype.skipWhile = function (predicate) {
predicate = Utils.createLambda(predicate);
var source = this;
return new Enumerable(function () {
var enumerator;
var index = 0;
var isSkipEnd = false;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
while (!isSkipEnd) {
if (enumerator.moveNext()) {
if (!predicate(enumerator.current(), index++)) {
isSkipEnd = true;
return this.yieldReturn(enumerator.current());
}
continue;
} else return false;
}
return (enumerator.moveNext())
? this.yieldReturn(enumerator.current())
: false;
},
function () { Utils.dispose(enumerator); });
});
};
Enumerable.prototype.take = function (count) {
var source = this;
return new Enumerable(function () {
var enumerator;
var index = 0;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
return (index++ < count && enumerator.moveNext())
? this.yieldReturn(enumerator.current())
: false;
},
function () { Utils.dispose(enumerator); }
);
});
};
// Overload:function(predicate<element>)
// Overload:function(predicate<element,index>)
Enumerable.prototype.takeWhile = function (predicate) {
predicate = Utils.createLambda(predicate);
var source = this;
return new Enumerable(function () {
var enumerator;
var index = 0;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
return (enumerator.moveNext() && predicate(enumerator.current(), index++))
? this.yieldReturn(enumerator.current())
: false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function()
// Overload:function(count)
Enumerable.prototype.takeExceptLast = function (count) {
if (count == null) count = 1;
var source = this;
return new Enumerable(function () {
if (count <= 0) return source.getEnumerator(); // do nothing
var enumerator;
var q = [];
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
while (enumerator.moveNext()) {
if (q.length == count) {
q.push(enumerator.current());
return this.yieldReturn(q.shift());
}
q.push(enumerator.current());
}
return false;
},
function () { Utils.dispose(enumerator); });
});
};
Enumerable.prototype.takeFromLast = function (count) {
if (count <= 0 || count == null) return Enumerable.empty();
var source = this;
return new Enumerable(function () {
var sourceEnumerator;
var enumerator;
var q = [];
return new IEnumerator(
function () { sourceEnumerator = source.getEnumerator(); },
function () {
while (sourceEnumerator.moveNext()) {
if (q.length == count) q.shift();
q.push(sourceEnumerator.current());
}
if (enumerator == null) {
enumerator = Enumerable.from(q).getEnumerator();
}
return (enumerator.moveNext())
? this.yieldReturn(enumerator.current())
: false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(item)
// Overload:function(predicate)
Enumerable.prototype.indexOf = function (item) {
var found = null;
// item as predicate
if (typeof (item) === Types.Function) {
this.forEach(function (x, i) {
if (item(x, i)) {
found = i;
return false;
}
});
}
else {
this.forEach(function (x, i) {
if (x === item) {
found = i;
return false;
}
});
}
return (found !== null) ? found : -1;
};
// Overload:function(item)
// Overload:function(predicate)
Enumerable.prototype.lastIndexOf = function (item) {
var result = -1;
// item as predicate
if (typeof (item) === Types.Function) {
this.forEach(function (x, i) {
if (item(x, i)) result = i;
});
}
else {
this.forEach(function (x, i) {
if (x === item) result = i;
});
}
return result;
};
/* Convert Methods */
Enumerable.prototype.asEnumerable = function () {
return Enumerable.from(this);
};
Enumerable.prototype.toArray = function () {
var array = [];
this.forEach(function (x) { array.push(x); });
return array;
};
// Overload:function(keySelector)
// Overload:function(keySelector, elementSelector)
// Overload:function(keySelector, elementSelector, compareSelector)
Enumerable.prototype.toLookup = function (keySelector, elementSelector, compareSelector) {
keySelector = Utils.createLambda(keySelector);
elementSelector = Utils.createLambda(elementSelector);
compareSelector = Utils.createLambda(compareSelector);
var dict = new Dictionary(compareSelector);
this.forEach(function (x) {
var key = keySelector(x);
var element = elementSelector(x);
var array = dict.get(key);
if (array !== undefined) array.push(element);
else dict.add(key, [element]);
});
return new Lookup(dict);
};
Enumerable.prototype.toObject = function (keySelector, elementSelector) {
keySelector = Utils.createLambda(keySelector);
elementSelector = Utils.createLambda(elementSelector);
var obj = {};
this.forEach(function (x) {
obj[keySelector(x)] = elementSelector(x);
});
return obj;
};
// Overload:function(keySelector, elementSelector)
// Overload:function(keySelector, elementSelector, compareSelector)
Enumerable.prototype.toDictionary = function (keySelector, elementSelector, compareSelector) {
keySelector = Utils.createLambda(keySelector);
elementSelector = Utils.createLambda(elementSelector);
compareSelector = Utils.createLambda(compareSelector);
var dict = new Dictionary(compareSelector);
this.forEach(function (x) {
dict.add(keySelector(x), elementSelector(x));
});
return dict;
};
// Overload:function()
// Overload:function(replacer)
// Overload:function(replacer, space)
Enumerable.prototype.toJSONString = function (replacer, space) {
if (typeof JSON === Types.Undefined || JSON.stringify == null) {
throw new Error("toJSONString can't find JSON.stringify. This works native JSON support Browser or include json2.js");
}
return JSON.stringify(this.toArray(), replacer, space);
};
// Overload:function()
// Overload:function(separator)
// Overload:function(separator,selector)
Enumerable.prototype.toJoinedString = function (separator, selector) {
if (separator == null) separator = "";
if (selector == null) selector = Functions.Identity;
return this.select(selector).toArray().join(separator);
};
/* Action Methods */
// Overload:function(action<element>)
// Overload:function(action<element,index>)
Enumerable.prototype.doAction = function (action) {
var source = this;
action = Utils.createLambda(action);
return new Enumerable(function () {
var enumerator;
var index = 0;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
if (enumerator.moveNext()) {
action(enumerator.current(), index++);
return this.yieldReturn(enumerator.current());
}
return false;
},
function () { Utils.dispose(enumerator); });
});
};
// Overload:function(action<element>)
// Overload:function(action<element,index>)
// Overload:function(func<element,bool>)
// Overload:function(func<element,index,bool>)
Enumerable.prototype.forEach = function (action) {
action = Utils.createLambda(action);
var index = 0;
var enumerator = this.getEnumerator();
try {
while (enumerator.moveNext()) {
if (action(enumerator.current(), index++) === false) break;
}
} finally {
Utils.dispose(enumerator);
}
};
// Overload:function()
// Overload:function(separator)
// Overload:function(separator,selector)
Enumerable.prototype.write = function (separator, selector) {
if (separator == null) separator = "";
selector = Utils.createLambda(selector);
var isFirst = true;
this.forEach(function (item) {
if (isFirst) isFirst = false;
else document.write(separator);
document.write(selector(item));
});
};
// Overload:function()
// Overload:function(selector)
Enumerable.prototype.writeLine = function (selector) {
selector = Utils.createLambda(selector);
this.forEach(function (item) {
document.writeln(selector(item) + "<br />");
});
};
Enumerable.prototype.force = function () {
var enumerator = this.getEnumerator();
try {
while (enumerator.moveNext()) {
}
}
finally {
Utils.dispose(enumerator);
}
};
/* Functional Methods */
Enumerable.prototype.letBind = function (func) {
func = Utils.createLambda(func);
var source = this;
return new Enumerable(function () {
var enumerator;
return new IEnumerator(
function () {
enumerator = Enumerable.from(func(source)).getEnumerator();
},
function () {
return (enumerator.moveNext())
? this.yieldReturn(enumerator.current())
: false;
},
function () { Utils.dispose(enumerator); });
});
};
Enumerable.prototype.share = function () {
var source = this;
var sharedEnumerator;
var disposed = false;
return new DisposableEnumerable(function () {
return new IEnumerator(
function () {
if (sharedEnumerator == null) {
sharedEnumerator = source.getEnumerator();
}
},
function () {
if (disposed) throw new Error("enumerator is disposed");
return (sharedEnumerator.moveNext())
? this.yieldReturn(sharedEnumerator.current())
: false;
},
Functions.Blank
);
}, function () {
disposed = true;
Utils.dispose(sharedEnumerator);
});
};
Enumerable.prototype.memoize = function () {
var source = this;
var cache;
var enumerator;
var disposed = false;
return new DisposableEnumerable(function () {
var index = -1;
return new IEnumerator(
function () {
if (enumerator == null) {
enumerator = source.getEnumerator();
cache = [];
}
},
function () {
if (disposed) throw new Error("enumerator is disposed");
index++;
if (cache.length <= index) {
return (enumerator.moveNext())
? this.yieldReturn(cache[index] = enumerator.current())
: false;
}
return this.yieldReturn(cache[index]);
},
Functions.Blank
);
}, function () {
disposed = true;
Utils.dispose(enumerator);
cache = null;
});
};
/* Error Handling Methods */
Enumerable.prototype.catchError = function (handler) {
handler = Utils.createLambda(handler);
var source = this;
return new Enumerable(function () {
var enumerator;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
try {
return (enumerator.moveNext())
? this.yieldReturn(enumerator.current())
: false;
} catch (e) {
handler(e);
return false;
}
},
function () { Utils.dispose(enumerator); });
});
};
Enumerable.prototype.finallyAction = function (finallyAction) {
finallyAction = Utils.createLambda(finallyAction);
var source = this;
return new Enumerable(function () {
var enumerator;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
return (enumerator.moveNext())
? this.yieldReturn(enumerator.current())
: false;
},
function () {
try {
Utils.dispose(enumerator);
} finally {
finallyAction();
}
});
});
};
/* For Debug Methods */
// Overload:function()
// Overload:function(selector)
Enumerable.prototype.log = function (selector) {
selector = Utils.createLambda(selector);
return this.doAction(function (item) {
if (typeof console !== Types.Undefined) {
console.log(selector(item));
}
});
};
// Overload:function()
// Overload:function(message)
// Overload:function(message,selector)
Enumerable.prototype.trace = function (message, selector) {
if (message == null) message = "Trace";
selector = Utils.createLambda(selector);
return this.doAction(function (item) {
if (typeof console !== Types.Undefined) {
console.log(message, selector(item));
}
});
};
// private
var OrderedEnumerable = function (source, keySelector, descending, parent) {
this.source = source;
this.keySelector = Utils.createLambda(keySelector);
this.descending = descending;
this.parent = parent;
};
OrderedEnumerable.prototype = new Enumerable();
OrderedEnumerable.prototype.createOrderedEnumerable = function (keySelector, descending) {
return new OrderedEnumerable(this.source, keySelector, descending, this);
};
OrderedEnumerable.prototype.thenBy = function (keySelector) {
return this.createOrderedEnumerable(keySelector, false);
};
OrderedEnumerable.prototype.thenByDescending = function (keySelector) {
return this.createOrderedEnumerable(keySelector, true);
};
OrderedEnumerable.prototype.getEnumerator = function () {
var self = this;
var buffer;
var indexes;
var index = 0;
return new IEnumerator(
function () {
buffer = [];
indexes = [];
self.source.forEach(function (item, index) {
buffer.push(item);
indexes.push(index);
});
var sortContext = SortContext.create(self, null);
sortContext.GenerateKeys(buffer);
indexes.sort(function (a, b) { return sortContext.compare(a, b); });
},
function () {
return (index < indexes.length)
? this.yieldReturn(buffer[indexes[index++]])
: false;
},
Functions.Blank
);
};
var SortContext = function (keySelector, descending, child) {
this.keySelector = keySelector;
this.descending = descending;
this.child = child;
this.keys = null;
};
SortContext.create = function (orderedEnumerable, currentContext) {
var context = new SortContext(orderedEnumerable.keySelector, orderedEnumerable.descending, currentContext);
if (orderedEnumerable.parent != null) return SortContext.create(orderedEnumerable.parent, context);
return context;
};
SortContext.prototype.GenerateKeys = function (source) {
var len = source.length;
var keySelector = this.keySelector;
var keys = new Array(len);
for (var i = 0; i < len; i++) keys[i] = keySelector(source[i]);
this.keys = keys;
if (this.child != null) this.child.GenerateKeys(source);
};
SortContext.prototype.compare = function (index1, index2) {
var comparison = Utils.compare(this.keys[index1], this.keys[index2]);
if (comparison == 0) {
if (this.child != null) return this.child.compare(index1, index2);
return Utils.compare(index1, index2);
}
return (this.descending) ? -comparison : comparison;
};
var DisposableEnumerable = function (getEnumerator, dispose) {
this.dispose = dispose;
Enumerable.call(this, getEnumerator);
};
DisposableEnumerable.prototype = new Enumerable();
// optimize array or arraylike object
var ArrayEnumerable = function (source) {
this.getSource = function () { return source; };
};
ArrayEnumerable.prototype = new Enumerable();
ArrayEnumerable.prototype.any = function (predicate) {
return (predicate == null)
? (this.getSource().length > 0)
: Enumerable.prototype.any.apply(this, arguments);
};
ArrayEnumerable.prototype.count = function (predicate) {
return (predicate == null)
? this.getSource().length
: Enumerable.prototype.count.apply(this, arguments);
};
ArrayEnumerable.prototype.elementAt = function (index) {
var source = this.getSource();
return (0 <= index && index < source.length)
? source[index]
: Enumerable.prototype.elementAt.apply(this, arguments);
};
ArrayEnumerable.prototype.elementAtOrDefault = function (index, defaultValue) {
if (defaultValue === undefined) defaultValue = null;
var source = this.getSource();
return (0 <= index && index < source.length)
? source[index]
: defaultValue;
};
ArrayEnumerable.prototype.first = function (predicate) {
var source = this.getSource();
return (predicate == null && source.length > 0)
? source[0]
: Enumerable.prototype.first.apply(this, arguments);
};
ArrayEnumerable.prototype.firstOrDefault = function (predicate, defaultValue) {
if (defaultValue === undefined) defaultValue = null;
if (predicate != null) {
return Enumerable.prototype.firstOrDefault.apply(this, arguments);
}
var source = this.getSource();
return source.length > 0 ? source[0] : defaultValue;
};
ArrayEnumerable.prototype.last = function (predicate) {
var source = this.getSource();
return (predicate == null && source.length > 0)
? source[source.length - 1]
: Enumerable.prototype.last.apply(this, arguments);
};
ArrayEnumerable.prototype.lastOrDefault = function (predicate, defaultValue) {
if (defaultValue === undefined) defaultValue = null;
if (predicate != null) {
return Enumerable.prototype.lastOrDefault.apply(this, arguments);
}
var source = this.getSource();
return source.length > 0 ? source[source.length - 1] : defaultValue;
};
ArrayEnumerable.prototype.skip = function (count) {
var source = this.getSource();
return new Enumerable(function () {
var index;
return new IEnumerator(
function () { index = (count < 0) ? 0 : count; },
function () {
return (index < source.length)
? this.yieldReturn(source[index++])
: false;
},
Functions.Blank);
});
};
ArrayEnumerable.prototype.takeExceptLast = function (count) {
if (count == null) count = 1;
return this.take(this.getSource().length - count);
};
ArrayEnumerable.prototype.takeFromLast = function (count) {
return this.skip(this.getSource().length - count);
};
ArrayEnumerable.prototype.reverse = function () {
var source = this.getSource();
return new Enumerable(function () {
var index;
return new IEnumerator(
function () {
index = source.length;
},
function () {
return (index > 0)
? this.yieldReturn(source[--index])
: false;
},
Functions.Blank);
});
};
ArrayEnumerable.prototype.sequenceEqual = function (second, compareSelector) {
if ((second instanceof ArrayEnumerable || second instanceof Array)
&& compareSelector == null
&& Enumerable.from(second).count() != this.count()) {
return false;
}
return Enumerable.prototype.sequenceEqual.apply(this, arguments);
};
ArrayEnumerable.prototype.toJoinedString = function (separator, selector) {
var source = this.getSource();
if (selector != null || !(source instanceof Array)) {
return Enumerable.prototype.toJoinedString.apply(this, arguments);
}
if (separator == null) separator = "";
return source.join(separator);
};
ArrayEnumerable.prototype.getEnumerator = function () {
var source = this.getSource();
var index = -1;
// fast and simple enumerator
return {
current: function () { return source[index]; },
moveNext: function () {
return ++index < source.length;
},
dispose: Functions.Blank
};
};
// optimization for multiple where and multiple select and whereselect
var WhereEnumerable = function (source, predicate) {
this.prevSource = source;
this.prevPredicate = predicate; // predicate.length always <= 1
};
WhereEnumerable.prototype = new Enumerable();
WhereEnumerable.prototype.where = function (predicate) {
predicate = Utils.createLambda(predicate);
if (predicate.length <= 1) {
var prevPredicate = this.prevPredicate;
var composedPredicate = function (x) { return prevPredicate(x) && predicate(x); };
return new WhereEnumerable(this.prevSource, composedPredicate);
}
else {
// if predicate use index, can't compose
return Enumerable.prototype.where.call(this, predicate);
}
};
WhereEnumerable.prototype.select = function (selector) {
selector = Utils.createLambda(selector);
return (selector.length <= 1)
? new WhereSelectEnumerable(this.prevSource, this.prevPredicate, selector)
: Enumerable.prototype.select.call(this, selector);
};
WhereEnumerable.prototype.getEnumerator = function () {
var predicate = this.prevPredicate;
var source = this.prevSource;
var enumerator;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
while (enumerator.moveNext()) {
if (predicate(enumerator.current())) {
return this.yieldReturn(enumerator.current());
}
}
return false;
},
function () { Utils.dispose(enumerator); });
};
var WhereSelectEnumerable = function (source, predicate, selector) {
this.prevSource = source;
this.prevPredicate = predicate; // predicate.length always <= 1 or null
this.prevSelector = selector; // selector.length always <= 1
};
WhereSelectEnumerable.prototype = new Enumerable();
WhereSelectEnumerable.prototype.where = function (predicate) {
predicate = Utils.createLambda(predicate);
return (predicate.length <= 1)
? new WhereEnumerable(this, predicate)
: Enumerable.prototype.where.call(this, predicate);
};
WhereSelectEnumerable.prototype.select = function (selector) {
selector = Utils.createLambda(selector);
if (selector.length <= 1) {
var prevSelector = this.prevSelector;
var composedSelector = function (x) { return selector(prevSelector(x)); };
return new WhereSelectEnumerable(this.prevSource, this.prevPredicate, composedSelector);
}
else {
// if selector use index, can't compose
return Enumerable.prototype.select.call(this, selector);
}
};
WhereSelectEnumerable.prototype.getEnumerator = function () {
var predicate = this.prevPredicate;
var selector = this.prevSelector;
var source = this.prevSource;
var enumerator;
return new IEnumerator(
function () { enumerator = source.getEnumerator(); },
function () {
while (enumerator.moveNext()) {
if (predicate == null || predicate(enumerator.current())) {
return this.yieldReturn(selector(enumerator.current()));
}
}
return false;
},
function () { Utils.dispose(enumerator); });
};
// Collections
var Dictionary = (function () {
// static utility methods
var callHasOwnProperty = function (target, key) {
return Object.prototype.hasOwnProperty.call(target, key);
};
var computeHashCode = function (obj) {
if (obj === null) return "null";
if (obj === undefined) return "undefined";
return (typeof obj.toString === Types.Function)
? obj.toString()
: Object.prototype.toString.call(obj);
};
// LinkedList for Dictionary
var HashEntry = function (key, value) {
this.key = key;
this.value = value;
this.prev = null;
this.next = null;
};
var EntryList = function () {
this.first = null;
this.last = null;
};
EntryList.prototype =
{
addLast: function (entry) {
if (this.last != null) {
this.last.next = entry;
entry.prev = this.last;
this.last = entry;
} else this.first = this.last = entry;
},
replace: function (entry, newEntry) {
if (entry.prev != null) {
entry.prev.next = newEntry;
newEntry.prev = entry.prev;
} else this.first = newEntry;
if (entry.next != null) {
entry.next.prev = newEntry;
newEntry.next = entry.next;
} else this.last = newEntry;
},
remove: function (entry) {
if (entry.prev != null) entry.prev.next = entry.next;
else this.first = entry.next;
if (entry.next != null) entry.next.prev = entry.prev;
else this.last = entry.prev;
}
};
// Overload:function()
// Overload:function(compareSelector)
var Dictionary = function (compareSelector) {
this.countField = 0;
this.entryList = new EntryList();
this.buckets = {}; // as Dictionary<string,List<object>>
this.compareSelector = (compareSelector == null) ? Functions.Identity : compareSelector;
};
Dictionary.prototype =
{
add: function (key, value) {
var compareKey = this.compareSelector(key);
var hash = computeHashCode(compareKey);
var entry = new HashEntry(key, value);
if (callHasOwnProperty(this.buckets, hash)) {
var array = this.buckets[hash];
for (var i = 0; i < array.length; i++) {
if (this.compareSelector(array[i].key) === compareKey) {
this.entryList.replace(array[i], entry);
array[i] = entry;
return;
}
}
array.push(entry);
} else {
this.buckets[hash] = [entry];
}
this.countField++;
this.entryList.addLast(entry);
},
get: function (key) {
var compareKey = this.compareSelector(key);
var hash = computeHashCode(compareKey);
if (!callHasOwnProperty(this.buckets, hash)) return undefined;
var array = this.buckets[hash];
for (var i = 0; i < array.length; i++) {
var entry = array[i];
if (this.compareSelector(entry.key) === compareKey) return entry.value;
}
return undefined;
},
set: function (key, value) {
var compareKey = this.compareSelector(key);
var hash = computeHashCode(compareKey);
if (callHasOwnProperty(this.buckets, hash)) {
var array = this.buckets[hash];
for (var i = 0; i < array.length; i++) {
if (this.compareSelector(array[i].key) === compareKey) {
var newEntry = new HashEntry(key, value);
this.entryList.replace(array[i], newEntry);
array[i] = newEntry;
return true;
}
}
}
return false;
},
contains: function (key) {
var compareKey = this.compareSelector(key);
var hash = computeHashCode(compareKey);
if (!callHasOwnProperty(this.buckets, hash)) return false;
var array = this.buckets[hash];
for (var i = 0; i < array.length; i++) {
if (this.compareSelector(array[i].key) === compareKey) return true;
}
return false;
},
clear: function () {
this.countField = 0;
this.buckets = {};
this.entryList = new EntryList();
},
remove: function (key) {
var compareKey = this.compareSelector(key);
var hash = computeHashCode(compareKey);
if (!callHasOwnProperty(this.buckets, hash)) return;
var array = this.buckets[hash];
for (var i = 0; i < array.length; i++) {
if (this.compareSelector(array[i].key) === compareKey) {
this.entryList.remove(array[i]);
array.splice(i, 1);
if (array.length == 0) delete this.buckets[hash];
this.countField--;
return;
}
}
},
count: function () {
return this.countField;
},
toEnumerable: function () {
var self = this;
return new Enumerable(function () {
var currentEntry;
return new IEnumerator(
function () { currentEntry = self.entryList.first; },
function () {
if (currentEntry != null) {
var result = { key: currentEntry.key, value: currentEntry.value };
currentEntry = currentEntry.next;
return this.yieldReturn(result);
}
return false;
},
Functions.Blank);
});
}
};
return Dictionary;
})();
// dictionary = Dictionary<TKey, TValue[]>
var Lookup = function (dictionary) {
this.count = function () {
return dictionary.count();
};
this.get = function (key) {
return Enumerable.from(dictionary.get(key));
};
this.contains = function (key) {
return dictionary.contains(key);
};
this.toEnumerable = function () {
return dictionary.toEnumerable().select(function (kvp) {
return new Grouping(kvp.key, kvp.value);
});
};
};
var Grouping = function (groupKey, elements) {
this.key = function () {
return groupKey;
};
ArrayEnumerable.call(this, elements);
};
Grouping.prototype = new ArrayEnumerable();
// module export
if (typeof define === Types.Function && define.amd) { // AMD
define("linqjs", [], function () { return Enumerable; });
}
else if (typeof module !== Types.Undefined && module.exports) { // Node
module.exports = Enumerable;
}
else {
root.Enumerable = Enumerable;
}
})(this); | codemonkeychris/rainbow | linqjs/linq.js | JavaScript | apache-2.0 | 107,573 |
////////////////////////////////////////////////////////////////////////////
// Module : movement_manager.h
// Created : 02.10.2001
// Modified : 12.11.2003
// Author : Dmitriy Iassenev
// Description : Movement manager
////////////////////////////////////////////////////////////////////////////
#pragma once
#include "ai_monster_space.h"
#include "graph_engine_space.h"
#include "game_graph_space.h"
namespace MovementManager {
enum EPathType;
};
namespace DetailPathManager {
enum EDetailPathType;
};
template <
typename _Graph,
typename _VertexEvaluator,
typename _vertex_id_type
>
class CBaseLocationSelector;
template <
typename _Graph,
typename _VertexEvaluator,
typename _vertex_id_type,
typename _index_type
>
class CBasePathManager;
template <
typename _dist_type,
typename _index_type,
typename _iteration_type
>
struct SVertexType;
template <
typename _dist_type,
typename _index_type,
typename _iteration_type
>
struct SBaseParameters;
template <
typename _dist_type,
typename _index_type,
typename _iteration_type
>
struct SGameVertex;
class CEnemyLocationPredictor;
class CPatrolPathManager;
class CDetailPathManager;
class CPHMovementControl;
class CGameGraph;
class CLevelGraph;
class CRestrictedObject;
class CLocationManager;
class CCustomMonster;
namespace DetailPathManager {
struct STravelPathPoint;
};
class CLevelPathBuilder;
class CDetailPathBuilder;
class CMovementManager {
private:
friend class CLevelPathBuilder;
friend class CDetailPathBuilder;
protected:
typedef MonsterSpace::SBoneRotation CBoneRotation;
typedef MovementManager::EPathType EPathType;
typedef DetailPathManager::STravelPathPoint CTravelPathPoint;
typedef GraphEngineSpace::CBaseParameters CBaseParameters;
typedef GraphEngineSpace::CGameVertexParams CGameVertexParams;
typedef CBaseLocationSelector<
CGameGraph,
SGameVertex<
float,
u32,
u32
>,
u32
> CGameLocationSelector;
typedef CBasePathManager<
CGameGraph,
SGameVertex<
float,
u32,
u32
>,
u32,
u32
> CGamePathManager;
typedef CBasePathManager<
CLevelGraph,
SBaseParameters<
float,
u32,
u32
>,
u32,
u32
> CLevelPathManager;
private:
enum EPathState {
ePathStateSelectGameVertex = u32(0),
ePathStateBuildGamePath,
ePathStateContinueGamePath,
ePathStateSelectPatrolPoint,
ePathStateBuildLevelPath,
ePathStateContinueLevelPath,
ePathStateBuildDetailPath,
ePathStatePathVerification,
ePathStatePathCompleted,
ePathStateTeleport,
ePathStateDummy = u32(-1),
};
protected:
typedef xr_vector<CObject*> NEAREST_OBJECTS;
protected:
NEAREST_OBJECTS m_nearest_objects;
protected:
float m_speed;
public:
CBoneRotation m_body;
protected:
bool m_path_actuality;
private:
EPathState m_path_state;
EPathType m_path_type;
bool m_enabled;
Fvector m_on_disable_object_position;
float m_old_desirable_speed;
bool m_extrapolate_path;
bool m_build_at_once;
bool m_wait_for_distributed_computation;
public:
CGameVertexParams *m_base_game_selector;
CBaseParameters *m_base_level_selector;
CGameLocationSelector *m_game_location_selector;
CGamePathManager *m_game_path_manager;
CLevelPathManager *m_level_path_manager;
CDetailPathManager *m_detail_path_manager;
CPatrolPathManager *m_patrol_path_manager;
CRestrictedObject *m_restricted_object;
CLocationManager *m_location_manager;
CLevelPathBuilder *m_level_path_builder;
CDetailPathBuilder *m_detail_path_builder;
CCustomMonster *m_object;
private:
void process_game_path ();
void process_level_path ();
void process_patrol_path ();
#ifdef USE_FREE_IN_RESTRICTIONS
void verify_detail_path ();
#endif // USE_FREE_IN_RESTRICTIONS
void apply_collision_hit (CPHMovementControl *movement_control);
protected:
virtual void teleport (u32 game_vertex_id);
public:
CMovementManager (CCustomMonster *object);
virtual ~CMovementManager ();
virtual void Load (LPCSTR caSection);
virtual void reinit ();
virtual void reload (LPCSTR caSection);
virtual BOOL net_Spawn (CSE_Abstract* data);
virtual void net_Destroy ();
virtual void on_frame (CPHMovementControl *movement_control, Fvector &dest_position);
IC bool actual () const;
bool actual_all () const;
IC void set_path_type (EPathType path_type);
void set_game_dest_vertex (const GameGraph::_GRAPH_ID &game_vertex_id);
void set_level_dest_vertex (const u32 level_vertex_id);
IC void set_build_path_at_once ();
void enable_movement (bool enabled);
EPathType path_type () const;
GameGraph::_GRAPH_ID game_dest_vertex_id () const;
u32 level_dest_vertex_id () const;
IC bool enabled () const;
IC bool path_completed () const;
IC float old_desirable_speed () const;
IC void set_desirable_speed (float speed);
const xr_vector<CTravelPathPoint> &path () const;
IC void set_body_orientation (const MonsterSpace::SBoneRotation &orientation);
IC const CBoneRotation &body_orientation() const;
void update_path ();
virtual void move_along_path (CPHMovementControl *movement_control, Fvector &dest_position, float time_delta);
IC float speed () const;
float speed (CPHMovementControl *movement_control) const;
virtual void on_travel_point_change (const u32 &previous_travel_point_index);
virtual void on_build_path () {}
template <typename T>
IC bool accessible (T position_or_vertex_id, float radius = EPS_L) const;
IC void extrapolate_path (bool value);
IC bool extrapolate_path () const;
bool distance_to_destination_greater (const float &distance_to_check) const;
IC bool wait_for_distributed_computation () const;
virtual bool can_use_distributed_compuations (u32 option) const;
void clear_path ();
public:
IC CGameVertexParams *base_game_params () const;
IC CBaseParameters *base_level_params () const;
IC CGameLocationSelector &game_selector () const;
IC CGamePathManager &game_path () const;
IC CLevelPathManager &level_path () const;
IC CDetailPathManager &detail () const;
IC CPatrolPathManager &patrol () const;
IC CRestrictedObject &restrictions () const;
IC CLocationManager &locations () const;
IC CCustomMonster &object () const;
IC CLevelPathBuilder &level_path_builder () const;
IC CDetailPathBuilder &detail_path_builder () const;
public:
virtual void on_restrictions_change ();
};
#include "movement_manager_inline.h" | OLR-xray/OLR-3.0 | src/xray/xr_3da/xrGame/movement_manager.h | C | apache-2.0 | 6,619 |
import sbt._
object Version {
val logbackVer = "1.2.3"
val mUnitVer = "0.7.25"
val scalaVersion = "3.0.0-RC3"
}
object Dependencies {
private val logbackDeps = Seq (
"ch.qos.logback" % "logback-classic",
).map (_ % Version.logbackVer)
private val munitDeps = Seq(
"org.scalameta" %% "munit" % Version.mUnitVer % Test
)
val dependencies: Seq[ModuleID] =
logbackDeps ++
munitDeps
val crossDependencies: Seq[ModuleID] =
Seq.empty
}
| lightbend-training/course-management-tools | course-templates/scala3-cmt-template-no-common/project/Dependencies.scala | Scala | apache-2.0 | 511 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.0 Frameset//EN""http://www.w3.org/TR/REC-html40/frameset.dtd">
<HTML>
<HEAD>
<meta name="generator" content="JDiff v1.0.9">
<!-- Generated by the JDiff Javadoc doclet -->
<!-- (http://www.jdiff.org) -->
<meta name="description" content="JDiff is a Javadoc doclet which generates an HTML report of all the packages, classes, constructors, methods, and fields which have been removed, added or changed in any way, including their documentation, when two APIs are compared.">
<meta name="keywords" content="diff, jdiff, javadiff, java diff, java difference, API difference, difference between two APIs, API diff, Javadoc, doclet">
<TITLE>
Constructor Additions Index
</TITLE>
<LINK REL="stylesheet" TYPE="text/css" HREF="../stylesheet-jdiff.css" TITLE="Style">
</HEAD>
<BODY>
<a NAME="topheader"></a>
<table summary="Index for Constructors" width="100%" border="0" cellspacing="0" cellpadding="0">
<tr>
<td bgcolor="#FFFFCC">
<font size="+1"><a href="constructors_index_all.html" class="staysblack">All Constructors</a></font>
</td>
</tr>
<tr>
<td bgcolor="#FFFFFF">
<FONT SIZE="-1">
<A HREF="constructors_index_removals.html" class="hiddenlink">Removals</A>
</FONT>
</td>
</tr>
<tr>
<td bgcolor="#FFFFFF">
<FONT SIZE="-1">
<b>Additions</b>
</FONT>
</td>
</tr>
<tr>
<td bgcolor="#FFFFFF">
<FONT SIZE="-1">
<A HREF="constructors_index_changes.html"class="hiddenlink">Changes</A>
</FONT>
</td>
</tr>
<tr>
<td>
<font size="-2"><b>Bold</b> is New, <strike>strike</strike> is deleted</font>
</td>
</tr>
</table><br>
<A NAME="C"></A>
<br><font size="+2">C</font>
<a href="#F"><font size="-2">F</font></a>
<a href="#H"><font size="-2">H</font></a>
<a href="#I"><font size="-2">I</font></a>
<a href="#P"><font size="-2">P</font></a>
<a href="#R"><font size="-2">R</font></a>
<a href="#S"><font size="-2">S</font></a>
<a href="#Z"><font size="-2">Z</font></a>
<a href="#topheader"><font size="-2">TOP</font></a>
<br>
<nobr><A HREF="org.apache.hadoop.io.compress.bzip2.CBZip2InputStream.html#org.apache.hadoop.io.compress.bzip2.CBZip2InputStream.ctor_added(java.io.InputStream, org.apache.hadoop.io.compress.SplittableCompressionCodec.READ_MODE)" class="hiddenlink" target="rightframe"><b>CBZip2InputStream</b>
(<code>InputStream, READ_MODE</code>)</A></nobr> constructor<br>
<A NAME="F"></A>
<br><font size="+2">F</font>
<a href="#C"><font size="-2">C</font></a>
<a href="#H"><font size="-2">H</font></a>
<a href="#I"><font size="-2">I</font></a>
<a href="#P"><font size="-2">P</font></a>
<a href="#R"><font size="-2">R</font></a>
<a href="#S"><font size="-2">S</font></a>
<a href="#Z"><font size="-2">Z</font></a>
<a href="#topheader"><font size="-2">TOP</font></a>
<br>
<i>FsPermission</i><br>
<nobr><A HREF="org.apache.hadoop.fs.permission.FsPermission.html#org.apache.hadoop.fs.permission.FsPermission.ctor_added(java.lang.String)" class="hiddenlink" target="rightframe"><b>FsPermission</b>
(<code>String</code>)</A></nobr> constructor<br>
<nobr><A HREF="org.apache.hadoop.fs.permission.FsPermission.html#org.apache.hadoop.fs.permission.FsPermission.ctor_added(org.apache.hadoop.fs.permission.FsAction, org.apache.hadoop.fs.permission.FsAction, org.apache.hadoop.fs.permission.FsAction, boolean)" class="hiddenlink" target="rightframe"><b>FsPermission</b>
(<code>FsAction, FsAction, FsAction, boolean</code>)</A></nobr> constructor<br>
<A NAME="H"></A>
<br><font size="+2">H</font>
<a href="#C"><font size="-2">C</font></a>
<a href="#F"><font size="-2">F</font></a>
<a href="#I"><font size="-2">I</font></a>
<a href="#P"><font size="-2">P</font></a>
<a href="#R"><font size="-2">R</font></a>
<a href="#S"><font size="-2">S</font></a>
<a href="#Z"><font size="-2">Z</font></a>
<a href="#topheader"><font size="-2">TOP</font></a>
<br>
<i>HttpServer</i><br>
<nobr><A HREF="org.apache.hadoop.http.HttpServer.html#org.apache.hadoop.http.HttpServer.ctor_added(java.lang.String, java.lang.String, int, boolean, org.apache.hadoop.conf.Configuration, org.apache.hadoop.security.authorize.AccessControlList)" class="hiddenlink" target="rightframe"><b>HttpServer</b>
(<code>String, String, int, boolean, Configuration, AccessControlList</code>)</A></nobr> constructor<br>
<nobr><A HREF="org.apache.hadoop.http.HttpServer.html#org.apache.hadoop.http.HttpServer.ctor_added(java.lang.String, java.lang.String, int, boolean, org.apache.hadoop.conf.Configuration, org.apache.hadoop.security.authorize.AccessControlList, org.mortbay.jetty.Connector)" class="hiddenlink" target="rightframe"><b>HttpServer</b>
(<code>String, String, int, boolean, Configuration, AccessControlList, Connector</code>)</A></nobr> constructor<br>
<nobr><A HREF="org.apache.hadoop.http.HttpServer.html#org.apache.hadoop.http.HttpServer.ctor_added(java.lang.String, java.lang.String, int, boolean, org.apache.hadoop.conf.Configuration, org.mortbay.jetty.Connector)" class="hiddenlink" target="rightframe"><b>HttpServer</b>
(<code>String, String, int, boolean, Configuration, Connector</code>)</A></nobr> constructor<br>
<A NAME="I"></A>
<br><font size="+2">I</font>
<a href="#C"><font size="-2">C</font></a>
<a href="#F"><font size="-2">F</font></a>
<a href="#H"><font size="-2">H</font></a>
<a href="#P"><font size="-2">P</font></a>
<a href="#R"><font size="-2">R</font></a>
<a href="#S"><font size="-2">S</font></a>
<a href="#Z"><font size="-2">Z</font></a>
<a href="#topheader"><font size="-2">TOP</font></a>
<br>
<i>InvalidJobConfException</i><br>
<nobr><A HREF="org.apache.hadoop.mapred.InvalidJobConfException.html#org.apache.hadoop.mapred.InvalidJobConfException.ctor_added(java.lang.String, java.lang.Throwable)" class="hiddenlink" target="rightframe"><b>InvalidJobConfException</b>
(<code>String, Throwable</code>)</A></nobr> constructor<br>
<nobr><A HREF="org.apache.hadoop.mapred.InvalidJobConfException.html#org.apache.hadoop.mapred.InvalidJobConfException.ctor_added(java.lang.Throwable)" class="hiddenlink" target="rightframe"><b>InvalidJobConfException</b>
(<code>Throwable</code>)</A></nobr> constructor<br>
<A NAME="P"></A>
<br><font size="+2">P</font>
<a href="#C"><font size="-2">C</font></a>
<a href="#F"><font size="-2">F</font></a>
<a href="#H"><font size="-2">H</font></a>
<a href="#I"><font size="-2">I</font></a>
<a href="#R"><font size="-2">R</font></a>
<a href="#S"><font size="-2">S</font></a>
<a href="#Z"><font size="-2">Z</font></a>
<a href="#topheader"><font size="-2">TOP</font></a>
<br>
<nobr><A HREF="org.apache.hadoop.fs.Path.html#org.apache.hadoop.fs.Path.ctor_added(java.net.URI)" class="hiddenlink" target="rightframe"><b>Path</b>
(<code>URI</code>)</A></nobr> constructor<br>
<nobr><A HREF="org.apache.hadoop.util.ProcfsBasedProcessTree.html#org.apache.hadoop.util.ProcfsBasedProcessTree.ctor_added(java.lang.String, boolean)" class="hiddenlink" target="rightframe"><b>ProcfsBasedProcessTree</b>
(<code>String, boolean</code>)</A></nobr> constructor<br>
<A NAME="R"></A>
<br><font size="+2">R</font>
<a href="#C"><font size="-2">C</font></a>
<a href="#F"><font size="-2">F</font></a>
<a href="#H"><font size="-2">H</font></a>
<a href="#I"><font size="-2">I</font></a>
<a href="#P"><font size="-2">P</font></a>
<a href="#S"><font size="-2">S</font></a>
<a href="#Z"><font size="-2">Z</font></a>
<a href="#topheader"><font size="-2">TOP</font></a>
<br>
<nobr><A HREF="org.apache.hadoop.ipc.RPC.Server.html#org.apache.hadoop.ipc.RPC.Server.ctor_added(java.lang.Object, org.apache.hadoop.conf.Configuration, java.lang.String, int, int, boolean, org.apache.hadoop.security.token.SecretManager)" class="hiddenlink" target="rightframe"><b>RPC.Server</b>
(<code>Object, Configuration, String, int, int, boolean, SecretManager</code>)</A></nobr> constructor<br>
<A NAME="S"></A>
<br><font size="+2">S</font>
<a href="#C"><font size="-2">C</font></a>
<a href="#F"><font size="-2">F</font></a>
<a href="#H"><font size="-2">H</font></a>
<a href="#I"><font size="-2">I</font></a>
<a href="#P"><font size="-2">P</font></a>
<a href="#R"><font size="-2">R</font></a>
<a href="#Z"><font size="-2">Z</font></a>
<a href="#topheader"><font size="-2">TOP</font></a>
<br>
<nobr><A HREF="org.apache.hadoop.ipc.Server.html#org.apache.hadoop.ipc.Server.ctor_added(java.lang.String, int, java.lang.Class, int, org.apache.hadoop.conf.Configuration, java.lang.String, org.apache.hadoop.security.token.SecretManager)" class="hiddenlink" target="rightframe"><b>Server</b>
(<code>String, int, Class, int, Configuration, String, SecretManager</code>)</A></nobr> constructor<br>
<nobr><A HREF="org.apache.hadoop.util.Shell.ShellCommandExecutor.html#org.apache.hadoop.util.Shell.ShellCommandExecutor.ctor_added(java.lang.String[], java.io.File, java.util.Map, long)" class="hiddenlink" target="rightframe"><b>Shell.ShellCommandExecutor</b>
(<code>String[], File, Map, long</code>)</A></nobr> constructor<br>
<A NAME="Z"></A>
<br><font size="+2">Z</font>
<a href="#C"><font size="-2">C</font></a>
<a href="#F"><font size="-2">F</font></a>
<a href="#H"><font size="-2">H</font></a>
<a href="#I"><font size="-2">I</font></a>
<a href="#P"><font size="-2">P</font></a>
<a href="#R"><font size="-2">R</font></a>
<a href="#S"><font size="-2">S</font></a>
<a href="#topheader"><font size="-2">TOP</font></a>
<br>
<nobr><A HREF="org.apache.hadoop.io.compress.zlib.ZlibCompressor.html#org.apache.hadoop.io.compress.zlib.ZlibCompressor.ctor_added(org.apache.hadoop.conf.Configuration)" class="hiddenlink" target="rightframe"><b>ZlibCompressor</b>
(<code>Configuration</code>)</A></nobr> constructor<br>
</BODY>
</HTML>
| simplegeo/hadoop | docs/jdiff-cloudera/changes/constructors_index_additions.html | HTML | apache-2.0 | 9,849 |
package org.apereo.cas.ticket.code;
import org.apereo.cas.authentication.Authentication;
import org.apereo.cas.authentication.principal.Service;
import org.apereo.cas.ticket.ExpirationPolicy;
import org.apereo.cas.ticket.Ticket;
import org.apereo.cas.ticket.TicketFactory;
import org.apereo.cas.ticket.UniqueTicketIdGenerator;
import org.apereo.cas.util.DefaultUniqueTicketIdGenerator;
/**
* Default OAuth code factory.
*
* @author Jerome Leleu
* @since 5.0.0
*/
public class DefaultOAuthCodeFactory implements OAuthCodeFactory {
/** Default instance for the ticket id generator. */
protected final UniqueTicketIdGenerator oAuthCodeIdGenerator;
/** ExpirationPolicy for refresh tokens. */
protected final ExpirationPolicy expirationPolicy;
public DefaultOAuthCodeFactory(final ExpirationPolicy expirationPolicy) {
this(new DefaultUniqueTicketIdGenerator(), expirationPolicy);
}
public DefaultOAuthCodeFactory(final UniqueTicketIdGenerator refreshTokenIdGenerator, final ExpirationPolicy expirationPolicy) {
this.oAuthCodeIdGenerator = refreshTokenIdGenerator;
this.expirationPolicy = expirationPolicy;
}
@Override
public OAuthCode create(final Service service, final Authentication authentication) {
final String codeId = this.oAuthCodeIdGenerator.getNewTicketId(OAuthCode.PREFIX);
return new OAuthCodeImpl(codeId, service, authentication, this.expirationPolicy);
}
@Override
public <T extends TicketFactory> T get(final Class<? extends Ticket> clazz) {
return (T) this;
}
}
| gabedwrds/cas | support/cas-server-support-oauth/src/main/java/org/apereo/cas/ticket/code/DefaultOAuthCodeFactory.java | Java | apache-2.0 | 1,592 |
// Copyright 2000-2021 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.daemon.impl;
import com.intellij.codeHighlighting.EditorBoundHighlightingPass;
import com.intellij.codeHighlighting.HighlightingPass;
import com.intellij.codeHighlighting.TextEditorHighlightingPass;
import com.intellij.codeHighlighting.TextEditorHighlightingPassRegistrar;
import com.intellij.concurrency.Job;
import com.intellij.concurrency.JobLauncher;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.application.ex.ApplicationManagerEx;
import com.intellij.openapi.application.ex.ApplicationUtil;
import com.intellij.openapi.application.impl.ApplicationImpl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditor;
import com.intellij.openapi.fileEditor.TextEditor;
import com.intellij.openapi.fileTypes.FileType;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiManager;
import com.intellij.util.Functions;
import com.intellij.util.containers.CollectionFactory;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.containers.HashingStrategy;
import com.intellij.util.ui.UIUtil;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
final class PassExecutorService implements Disposable {
static final Logger LOG = Logger.getInstance(PassExecutorService.class);
private static final boolean CHECK_CONSISTENCY = ApplicationManager.getApplication().isUnitTestMode();
private final Map<ScheduledPass, Job<Void>> mySubmittedPasses = new ConcurrentHashMap<>();
private final Project myProject;
private volatile boolean isDisposed;
private final AtomicInteger nextAvailablePassId; // used to assign random id to a pass if not set
PassExecutorService(@NotNull Project project) {
myProject = project;
nextAvailablePassId = ((TextEditorHighlightingPassRegistrarImpl)TextEditorHighlightingPassRegistrar.getInstance(myProject)).getNextAvailableId();
}
@Override
public void dispose() {
cancelAll(true);
// some workers could, although idle, still retain some thread references for some time causing leak hunter to frown
ForkJoinPool.commonPool().awaitQuiescence(1, TimeUnit.SECONDS);
isDisposed = true;
}
void cancelAll(boolean waitForTermination) {
for (Map.Entry<ScheduledPass, Job<Void>> entry : mySubmittedPasses.entrySet()) {
Job<Void> job = entry.getValue();
ScheduledPass pass = entry.getKey();
pass.myUpdateProgress.cancel();
job.cancel();
}
try {
if (waitForTermination) {
while (!waitFor(50)) {
int i = 0;
}
}
}
catch (ProcessCanceledException ignored) {
}
catch (Error | RuntimeException e) {
throw e;
}
catch (Throwable throwable) {
LOG.error(throwable);
}
finally {
mySubmittedPasses.clear();
}
}
void submitPasses(@NotNull Map<FileEditor, HighlightingPass[]> passesMap,
// a list of opened FileEditors for each Document. The first FileEditor in the list is the preferred one
@NotNull Map<Document, List<FileEditor>> documentToEditors,
@NotNull DaemonProgressIndicator updateProgress) {
if (isDisposed()) return;
Map<FileEditor, List<TextEditorHighlightingPass>> documentBoundPasses = new HashMap<>();
Map<FileEditor, List<EditorBoundHighlightingPass>> editorBoundPasses = new HashMap<>();
Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass = new HashMap<>();
List<ScheduledPass> freePasses = new ArrayList<>(documentToEditors.size() * 5);
AtomicInteger threadsToStartCountdown = new AtomicInteger(0);
for (Map.Entry<FileEditor, HighlightingPass[]> entry : passesMap.entrySet()) {
FileEditor fileEditor = entry.getKey();
HighlightingPass[] passes = entry.getValue();
for (HighlightingPass pass : passes) {
Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(30));
if (pass instanceof EditorBoundHighlightingPass) {
EditorBoundHighlightingPass editorPass = (EditorBoundHighlightingPass)pass;
// have to make ids unique for this document
assignUniqueId(editorPass, thisEditorId2Pass);
editorBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(editorPass);
}
else if (pass instanceof TextEditorHighlightingPass) {
TextEditorHighlightingPass tePass = (TextEditorHighlightingPass)pass;
assignUniqueId(tePass, thisEditorId2Pass);
documentBoundPasses.computeIfAbsent(fileEditor, __->new ArrayList<>()).add(tePass);
}
else {
// generic HighlightingPass, run all of them concurrently
freePasses.add(new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown));
}
}
}
List<ScheduledPass> dependentPasses = new ArrayList<>(documentToEditors.size() * 10);
// fileEditor-> (passId -> created pass)
Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted = new HashMap<>(passesMap.size());
for (Map.Entry<Document, List<FileEditor>> entry : documentToEditors.entrySet()) {
List<FileEditor> fileEditors = entry.getValue();
FileEditor preferredFileEditor = fileEditors.get(0); // assumption: the preferred fileEditor is stored first
List<TextEditorHighlightingPass> passes = documentBoundPasses.get(preferredFileEditor);
if (passes == null || passes.isEmpty()) {
continue;
}
sortById(passes);
for (TextEditorHighlightingPass pass : passes) {
createScheduledPass(preferredFileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress,
threadsToStartCountdown);
}
}
for (Map.Entry<FileEditor, List<EditorBoundHighlightingPass>> entry : editorBoundPasses.entrySet()) {
FileEditor fileEditor = entry.getKey();
Collection<EditorBoundHighlightingPass> createdEditorBoundPasses = entry.getValue();
for (EditorBoundHighlightingPass pass : createdEditorBoundPasses) {
createScheduledPass(fileEditor, pass, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown);
}
}
if (CHECK_CONSISTENCY && !ApplicationManagerEx.isInStressTest()) {
assertConsistency(freePasses, toBeSubmitted, threadsToStartCountdown);
}
if (LOG.isDebugEnabled()) {
Set<VirtualFile> vFiles = ContainerUtil.map2Set(passesMap.keySet(), FileEditor::getFile);
log(updateProgress, null, vFiles + " ----- starting " + threadsToStartCountdown.get(), freePasses);
}
for (ScheduledPass dependentPass : dependentPasses) {
mySubmittedPasses.put(dependentPass, Job.nullJob());
}
for (ScheduledPass freePass : freePasses) {
submit(freePass);
}
}
private void assignUniqueId(@NotNull TextEditorHighlightingPass pass, @NotNull Int2ObjectMap<TextEditorHighlightingPass> id2Pass) {
int id = pass.getId();
if (id == -1 || id == 0) {
id = nextAvailablePassId.incrementAndGet();
pass.setId(id);
}
TextEditorHighlightingPass prevPass = id2Pass.put(id, pass);
if (prevPass != null) {
LOG.error("Duplicate pass id found: "+id+". Both passes returned the same getId(): "+prevPass+" ("+prevPass.getClass() +") and "+pass+" ("+pass.getClass()+")");
}
}
private void assertConsistency(@NotNull List<ScheduledPass> freePasses,
@NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted,
@NotNull AtomicInteger threadsToStartCountdown) {
assert threadsToStartCountdown.get() == toBeSubmitted.values().stream().mapToInt(m->m.size()).sum();
Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits = CollectionFactory.createCustomHashingStrategyMap(new HashingStrategy<>() {
@Override
public int hashCode(@Nullable PassExecutorService.ScheduledPass sp) {
if (sp == null) return 0;
return ((TextEditorHighlightingPass)sp.myPass).getId() * 31 + sp.myFileEditor.hashCode();
}
@Override
public boolean equals(@Nullable PassExecutorService.ScheduledPass sp1, @Nullable PassExecutorService.ScheduledPass sp2) {
if (sp1 == null || sp2 == null) return sp1 == sp2;
int id1 = ((TextEditorHighlightingPass)sp1.myPass).getId();
int id2 = ((TextEditorHighlightingPass)sp2.myPass).getId();
return id1 == id2 && sp1.myFileEditor == sp2.myFileEditor;
}
});
for (ScheduledPass freePass : freePasses) {
HighlightingPass pass = freePass.myPass;
if (pass instanceof TextEditorHighlightingPass) {
id2Visits.put(freePass, Pair.create(freePass, 0));
checkConsistency(freePass, id2Visits);
}
}
for (Map.Entry<ScheduledPass, Pair<ScheduledPass, Integer>> entry : id2Visits.entrySet()) {
int count = entry.getValue().second;
assert count == 0 : entry.getKey();
}
assert id2Visits.size() == threadsToStartCountdown.get() : "Expected "+threadsToStartCountdown+" but got "+id2Visits.size()+": "+id2Visits;
}
private void checkConsistency(@NotNull ScheduledPass pass, Map<ScheduledPass, Pair<ScheduledPass, Integer>> id2Visits) {
for (ScheduledPass succ : ContainerUtil.concat(pass.mySuccessorsOnCompletion, pass.mySuccessorsOnSubmit)) {
Pair<ScheduledPass, Integer> succPair = id2Visits.get(succ);
if (succPair == null) {
succPair = Pair.create(succ, succ.myRunningPredecessorsCount.get());
id2Visits.put(succ, succPair);
}
int newPred = succPair.second - 1;
id2Visits.put(succ, Pair.create(succ, newPred));
assert newPred >= 0;
if (newPred == 0) {
checkConsistency(succ, id2Visits);
}
}
}
@NotNull
private ScheduledPass createScheduledPass(@NotNull FileEditor fileEditor,
@NotNull TextEditorHighlightingPass pass,
@NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted,
@NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass,
@NotNull List<ScheduledPass> freePasses,
@NotNull List<ScheduledPass> dependentPasses,
@NotNull DaemonProgressIndicator updateProgress,
@NotNull AtomicInteger threadsToStartCountdown) {
Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass = toBeSubmitted.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20));
Int2ObjectMap<TextEditorHighlightingPass> thisEditorId2Pass = id2Pass.computeIfAbsent(fileEditor, __ -> new Int2ObjectOpenHashMap<>(20));
int passId = pass.getId();
ScheduledPass scheduledPass = thisEditorId2ScheduledPass.get(passId);
if (scheduledPass != null) return scheduledPass;
scheduledPass = new ScheduledPass(fileEditor, pass, updateProgress, threadsToStartCountdown);
threadsToStartCountdown.incrementAndGet();
thisEditorId2ScheduledPass.put(passId, scheduledPass);
for (int predecessorId : pass.getCompletionPredecessorIds()) {
ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses,
updateProgress, threadsToStartCountdown, predecessorId,
thisEditorId2ScheduledPass, thisEditorId2Pass);
if (predecessor != null) {
predecessor.addSuccessorOnCompletion(scheduledPass);
}
}
for (int predecessorId : pass.getStartingPredecessorIds()) {
ScheduledPass predecessor = findOrCreatePredecessorPass(fileEditor, toBeSubmitted, id2Pass, freePasses, dependentPasses,
updateProgress, threadsToStartCountdown, predecessorId,
thisEditorId2ScheduledPass, thisEditorId2Pass);
if (predecessor != null) {
predecessor.addSuccessorOnSubmit(scheduledPass);
}
}
if (scheduledPass.myRunningPredecessorsCount.get() == 0 && !freePasses.contains(scheduledPass)) {
freePasses.add(scheduledPass);
}
else if (!dependentPasses.contains(scheduledPass)) {
dependentPasses.add(scheduledPass);
}
if (pass.isRunIntentionPassAfter() && fileEditor instanceof TextEditor) {
Editor editor = ((TextEditor)fileEditor).getEditor();
VirtualFile virtualFile = fileEditor.getFile();
PsiFile psiFile = virtualFile == null ? null : ReadAction.compute(() -> PsiManager.getInstance(myProject).findFile(virtualFile));
if (psiFile != null) {
ShowIntentionsPass ip = new ShowIntentionsPass(psiFile, editor, false);
assignUniqueId(ip, thisEditorId2Pass);
ip.setCompletionPredecessorIds(new int[]{passId});
createScheduledPass(fileEditor, ip, toBeSubmitted, id2Pass, freePasses, dependentPasses, updateProgress, threadsToStartCountdown);
}
}
return scheduledPass;
}
private ScheduledPass findOrCreatePredecessorPass(@NotNull FileEditor fileEditor,
@NotNull Map<FileEditor, Int2ObjectMap<ScheduledPass>> toBeSubmitted,
@NotNull Map<FileEditor, Int2ObjectMap<TextEditorHighlightingPass>> id2Pass,
@NotNull List<ScheduledPass> freePasses,
@NotNull List<ScheduledPass> dependentPasses,
@NotNull DaemonProgressIndicator updateProgress,
@NotNull AtomicInteger myThreadsToStartCountdown,
int predecessorId,
@NotNull Int2ObjectMap<ScheduledPass> thisEditorId2ScheduledPass,
@NotNull Int2ObjectMap<? extends TextEditorHighlightingPass> thisEditorId2Pass) {
ScheduledPass predecessor = thisEditorId2ScheduledPass.get(predecessorId);
if (predecessor == null) {
TextEditorHighlightingPass textEditorPass = thisEditorId2Pass.get(predecessorId);
predecessor = textEditorPass == null ? null : createScheduledPass(fileEditor, textEditorPass, toBeSubmitted,
id2Pass, freePasses,
dependentPasses, updateProgress, myThreadsToStartCountdown);
}
return predecessor;
}
private void submit(@NotNull ScheduledPass pass) {
if (!pass.myUpdateProgress.isCanceled()) {
Job<Void> job = JobLauncher.getInstance().submitToJobThread(pass, future -> {
try {
if (!future.isCancelled()) { // for canceled task .get() generates CancellationException which is expensive
future.get();
}
}
catch (CancellationException | InterruptedException ignored) {
}
catch (ExecutionException e) {
LOG.error(e.getCause());
}
});
mySubmittedPasses.put(pass, job);
}
}
private final class ScheduledPass implements Runnable {
private final FileEditor myFileEditor;
private final HighlightingPass myPass;
private final AtomicInteger myThreadsToStartCountdown;
private final AtomicInteger myRunningPredecessorsCount = new AtomicInteger(0);
private final List<ScheduledPass> mySuccessorsOnCompletion = new ArrayList<>();
private final List<ScheduledPass> mySuccessorsOnSubmit = new ArrayList<>();
@NotNull private final DaemonProgressIndicator myUpdateProgress;
private ScheduledPass(@NotNull FileEditor fileEditor,
@NotNull HighlightingPass pass,
@NotNull DaemonProgressIndicator progressIndicator,
@NotNull AtomicInteger threadsToStartCountdown) {
myFileEditor = fileEditor;
myPass = pass;
myThreadsToStartCountdown = threadsToStartCountdown;
myUpdateProgress = progressIndicator;
}
@Override
public void run() {
((ApplicationImpl)ApplicationManager.getApplication()).executeByImpatientReader(() -> {
try {
doRun();
}
catch (ApplicationUtil.CannotRunReadActionException e) {
myUpdateProgress.cancel();
}
catch (RuntimeException | Error e) {
saveException(e, myUpdateProgress);
throw e;
}
});
}
private void doRun() {
if (myUpdateProgress.isCanceled()) return;
log(myUpdateProgress, myPass, "Started. ");
for (ScheduledPass successor : mySuccessorsOnSubmit) {
int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet();
if (predecessorsToRun == 0) {
submit(successor);
}
}
ProgressManager.getInstance().executeProcessUnderProgress(() -> {
boolean success = ApplicationManagerEx.getApplicationEx().tryRunReadAction(() -> {
try {
if (DumbService.getInstance(myProject).isDumb() && !DumbService.isDumbAware(myPass)) {
return;
}
if (!myUpdateProgress.isCanceled() && !myProject.isDisposed()) {
myPass.collectInformation(myUpdateProgress);
}
}
catch (ProcessCanceledException e) {
log(myUpdateProgress, myPass, "Canceled ");
if (!myUpdateProgress.isCanceled()) {
myUpdateProgress.cancel(e); //in case when some smart asses throw PCE just for fun
}
}
catch (RuntimeException | Error e) {
myUpdateProgress.cancel(e);
LOG.error(e);
throw e;
}
});
if (!success) {
myUpdateProgress.cancel();
}
}, myUpdateProgress);
log(myUpdateProgress, myPass, "Finished. ");
if (!myUpdateProgress.isCanceled()) {
applyInformationToEditorsLater(myFileEditor, myPass, myUpdateProgress, myThreadsToStartCountdown, ()->{
for (ScheduledPass successor : mySuccessorsOnCompletion) {
int predecessorsToRun = successor.myRunningPredecessorsCount.decrementAndGet();
if (predecessorsToRun == 0) {
submit(successor);
}
}
});
}
}
@NonNls
@Override
public String toString() {
return "SP: " + myPass;
}
private void addSuccessorOnCompletion(@NotNull ScheduledPass successor) {
mySuccessorsOnCompletion.add(successor);
successor.myRunningPredecessorsCount.incrementAndGet();
}
private void addSuccessorOnSubmit(@NotNull ScheduledPass successor) {
mySuccessorsOnSubmit.add(successor);
successor.myRunningPredecessorsCount.incrementAndGet();
}
}
private void applyInformationToEditorsLater(@NotNull FileEditor fileEditor,
@NotNull HighlightingPass pass,
@NotNull DaemonProgressIndicator updateProgress,
@NotNull AtomicInteger threadsToStartCountdown,
@NotNull Runnable callbackOnApplied) {
ApplicationManager.getApplication().invokeLater(() -> {
if (isDisposed() || !fileEditor.isValid()) {
updateProgress.cancel();
}
if (updateProgress.isCanceled()) {
log(updateProgress, pass, " is canceled during apply, sorry");
return;
}
try {
if (UIUtil.isShowing(fileEditor.getComponent())) {
pass.applyInformationToEditor();
repaintErrorStripeAndIcon(fileEditor);
if (pass instanceof TextEditorHighlightingPass) {
FileStatusMap fileStatusMap = DaemonCodeAnalyzerEx.getInstanceEx(myProject).getFileStatusMap();
Document document = ((TextEditorHighlightingPass)pass).getDocument();
int passId = ((TextEditorHighlightingPass)pass).getId();
fileStatusMap.markFileUpToDate(document, passId);
}
log(updateProgress, pass, " Applied");
}
}
catch (ProcessCanceledException e) {
log(updateProgress, pass, "Error " + e);
throw e;
}
catch (RuntimeException e) {
VirtualFile file = fileEditor.getFile();
FileType fileType = file == null ? null : file.getFileType();
String message = "Exception while applying information to " + fileEditor + "("+fileType+")";
log(updateProgress, pass, message + e);
throw new RuntimeException(message, e);
}
if (threadsToStartCountdown.decrementAndGet() == 0) {
HighlightingSessionImpl.waitForAllSessionsHighlightInfosApplied(updateProgress);
log(updateProgress, pass, "Stopping ");
updateProgress.stopIfRunning();
clearStaleEntries();
}
else {
log(updateProgress, pass, "Finished but there are passes in the queue: " + threadsToStartCountdown.get());
}
callbackOnApplied.run();
}, updateProgress.getModalityState(), pass.getExpiredCondition());
}
private void clearStaleEntries() {
mySubmittedPasses.keySet().removeIf(pass -> pass.myUpdateProgress.isCanceled());
}
private void repaintErrorStripeAndIcon(@NotNull FileEditor fileEditor) {
if (fileEditor instanceof TextEditor) {
DefaultHighlightInfoProcessor.repaintErrorStripeAndIcon(((TextEditor)fileEditor).getEditor(), myProject);
}
}
private boolean isDisposed() {
return isDisposed || myProject.isDisposed();
}
@NotNull
List<HighlightingPass> getAllSubmittedPasses() {
List<HighlightingPass> result = new ArrayList<>(mySubmittedPasses.size());
for (ScheduledPass scheduledPass : mySubmittedPasses.keySet()) {
if (!scheduledPass.myUpdateProgress.isCanceled()) {
result.add(scheduledPass.myPass);
}
}
return result;
}
private static void sortById(@NotNull List<? extends TextEditorHighlightingPass> result) {
ContainerUtil.quickSort(result, Comparator.comparingInt(TextEditorHighlightingPass::getId));
}
private static int getThreadNum() {
Matcher matcher = Pattern.compile("JobScheduler FJ pool (\\d*)/(\\d*)").matcher(Thread.currentThread().getName());
String num = matcher.matches() ? matcher.group(1) : null;
return StringUtil.parseInt(num, 0);
}
static void log(ProgressIndicator progressIndicator, HighlightingPass pass, @NonNls Object @NotNull ... info) {
if (LOG.isDebugEnabled()) {
Document document = pass instanceof TextEditorHighlightingPass ? ((TextEditorHighlightingPass)pass).getDocument() : null;
CharSequence docText = document == null ? "" : ": '" + StringUtil.first(document.getCharsSequence(), 10, true)+ "'";
synchronized (PassExecutorService.class) {
String infos = StringUtil.join(info, Functions.TO_STRING(), " ");
String message = StringUtil.repeatSymbol(' ', getThreadNum() * 4)
+ " " + pass + " "
+ infos
+ "; progress=" + (progressIndicator == null ? null : progressIndicator.hashCode())
+ " " + (progressIndicator == null ? "?" : progressIndicator.isCanceled() ? "X" : "V")
+ docText;
LOG.debug(message);
//System.out.println(message);
}
}
}
private static final Key<Throwable> THROWABLE_KEY = Key.create("THROWABLE_KEY");
static void saveException(@NotNull Throwable e, @NotNull DaemonProgressIndicator indicator) {
indicator.putUserDataIfAbsent(THROWABLE_KEY, e);
}
@TestOnly
static Throwable getSavedException(@NotNull DaemonProgressIndicator indicator) {
return indicator.getUserData(THROWABLE_KEY);
}
// return true if terminated
boolean waitFor(int millis) throws Throwable {
try {
for (Job<Void> job : mySubmittedPasses.values()) {
job.waitForCompletion(millis);
}
return true;
}
catch (TimeoutException ignored) {
return false;
}
catch (InterruptedException e) {
return true;
}
catch (ExecutionException e) {
throw e.getCause();
}
}
}
| jwren/intellij-community | platform/lang-impl/src/com/intellij/codeInsight/daemon/impl/PassExecutorService.java | Java | apache-2.0 | 25,837 |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Channel.V1.Snippets
{
// [START cloudchannel_v1_generated_CloudChannelService_DeleteCustomer_sync]
using Google.Cloud.Channel.V1;
public sealed partial class GeneratedCloudChannelServiceClientSnippets
{
/// <summary>Snippet for DeleteCustomer</summary>
/// <remarks>
/// This snippet has been automatically generated for illustrative purposes only.
/// It may require modifications to work in your environment.
/// </remarks>
public void DeleteCustomerRequestObject()
{
// Create client
CloudChannelServiceClient cloudChannelServiceClient = CloudChannelServiceClient.Create();
// Initialize request argument(s)
DeleteCustomerRequest request = new DeleteCustomerRequest
{
CustomerName = CustomerName.FromAccountCustomer("[ACCOUNT]", "[CUSTOMER]"),
};
// Make the request
cloudChannelServiceClient.DeleteCustomer(request);
}
}
// [END cloudchannel_v1_generated_CloudChannelService_DeleteCustomer_sync]
}
| googleapis/google-cloud-dotnet | apis/Google.Cloud.Channel.V1/Google.Cloud.Channel.V1.GeneratedSnippets/CloudChannelServiceClient.DeleteCustomerRequestObjectSnippet.g.cs | C# | apache-2.0 | 1,748 |
###Build a starter map
This lab covers the basics for creating a basic starter mapping application.
The starter map simply loads a default base map, and centers and zooms it in in a [MapView](https://developers.arcgis.com/javascript/latest/api-reference/esri-views-MapView.html).
If you are new to ArcGIS and need a full set of instructions on building a basic mapping application
visit the [Getting Started with MapView](https://developers.arcgis.com/javascript/latest/sample-code/get-started-mapview/index.html) tutorial.
1. Copy and paste the code below into a new [jsbin.com](http://jsbin.com).
```html
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="initial-scale=1,maximum-scale=1,user-scalable=no">
<title>JS API Starter App</title>
<link rel="stylesheet" href="https://js.arcgis.com/4.0/esri/css/main.css">
<style>
html, body, #viewDiv {
padding: 0;
margin: 0;
height: 100%;
}
</style>
<script src="https://js.arcgis.com/4.0/"></script>
<script>
require([
"esri/Map",
"esri/views/MapView",
"dojo/domReady!"
], function(Map, MapView) {
var map = new Map({
basemap: "dark-gray"
});
var view = new MapView({
container: "viewDiv",
map: map,
center: [-122.68, 45.52],
zoom: 10
});
});
</script>
</head>
<body>
<div id="viewDiv"></div>
</body>
</html>
```
2. The JSBin `Output` panel should show a dark-grey map centered on Portland, Oregon.
Your app should look something like this:
* [Code](index.html)
* [Live App](https://esri.github.io/geodev-hackerlabs/develop/jsapi/create_starter_map/index.html)
###Bonus
* Experiment with different basemaps such as `topo` or `gray`.
* Declare the `view` variable globally instead and open your browser's javascript console ([see some instructions here](https://www.wickedlysmart.com/hfjsconsole/)). You can then interactively control the view from your browser console by referring to the `view` global variable. Many browsers will autocomplete once you've typed `view.`. For example, change the view extent, center point, zoom level or scale. See [here](https://developers.arcgis.com/javascript/latest/api-reference/esri-views-MapView.html) for some examples.
**Hint:** If you're in a JS Bin, pop the Output into a separate window/tab to get direct access from the console.
``` javascript
var view; // DECLARE the 'view' variable globally.
require([
"esri/Map",
"esri/views/MapView",
"dojo/domReady!"
], function(
Map, MapView) {
...
view = new MapView({ // REMOVE the 'var' so we're setting the new global 'view' variable.
container: "viewDiv",
map: map,
center: [-122.68, 45.52],
zoom: 10
});
```
Try changing the map's basemap by drilling down through the `view.map` property. E.g. `view.map.basemap = "streets"`.
**Note:** You typically don't want to declare global variables like we do here, but it's good to know how to do it for debugging and exploring the API. Plus you're learning about JavaScript variable scope!
* Run the code locally on your machine. Eventually if your app gets larger you'll want to migrate it from JSBin to your desktop.
| nixta/geodev-hackerlabs | develop/jsapi/create_starter_map/lab.md | Markdown | apache-2.0 | 3,342 |
.container-fluid,
.container {
padding-right: 0px;
padding-left: 0px;
margin-right: auto;
margin-left: auto;
}
.col-lg-1,
.col-lg-10,
.col-lg-11,
.col-lg-12,
.col-lg-2,
.col-lg-3,
.col-lg-4,
.col-lg-5,
.col-lg-6,
.col-lg-7,
.col-lg-8,
.col-lg-9,
.col-md-1,
.col-md-10,
.col-md-11,
.col-md-12,
.col-md-2,
.col-md-3,
.col-md-4,
.col-md-5,
.col-md-6,
.col-md-7,
.col-md-8,
.col-md-9,
.col-sm-1,
.col-sm-10,
.col-sm-11,
.col-sm-12,
.col-sm-2,
.col-sm-3,
.col-sm-4,
.col-sm-5,
.col-sm-6,
.col-sm-7,
.col-sm-8,
.col-sm-9,
.col-xs-1,
.col-xs-10,
.col-xs-11,
.col-xs-12,
.col-xs-2,
.col-xs-3,
.col-xs-4,
.col-xs-5,
.col-xs-6,
.col-xs-7,
.col-xs-8,
.col-xs-9 {
position: relative;
min-height: 1px;
padding-right: 0px;
padding-left: 0px;
}
.wrap__content {
position: relative;
}
/*big bootstrap overwrite*/
.row {
margin-right: 0px;
margin-left: 0px;
}
@media only screen and (min-width: 768px) {
.container {
width: inherit;
}
}
@media only screen and (min-width: 992px) {
.container {
width: 970px;
}
}
@media only screen and (min-width: 1200px) {
.container {
width: 1170px;
}
}
/* Sticky footer styles
-------------------------------------------------- */
html,
body {
height: 100%;
background-color: white;
font-family: 'Nunito', sans-serif;
}
/* Wrapper for page content to push down footer */
#wrap {
min-height: 100%;
height: auto;
/* Negative indent footer by its height */
margin: 0 auto -100px;
/* Pad bottom by footer height */
padding: 0 0 100px;
}
/* Set the fixed height of the footer here */
#footer {
height: 100px;
background-color: #DC73FF;
}
| javimosch/bastack | src/client/hglove/css/layout.css | CSS | apache-2.0 | 1,705 |
/*
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.hal.client.runtime.subsystem.elytron.wizardpassword;
public enum PasswordState {
CHOOSE_PASSWORD_TYPE,
CONFIGURATION,
REVIEW
}
| hpehl/hal.next | app/src/main/java/org/jboss/hal/client/runtime/subsystem/elytron/wizardpassword/PasswordState.java | Java | apache-2.0 | 787 |
# V1SecurityContextConstraintsList
## Properties
Name | Type | Description | Notes
------------ | ------------- | ------------- | -------------
**kind** | **str** | Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: http://releases.k8s.io/release-1.2/docs/devel/api-conventions.md#types-kinds | [optional]
**api_version** | **str** | APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: http://releases.k8s.io/release-1.2/docs/devel/api-conventions.md#resources | [optional]
**metadata** | [**UnversionedListMeta**](UnversionedListMeta.md) | | [optional]
**items** | [**list[V1SecurityContextConstraints]**](V1SecurityContextConstraints.md) | |
[[Back to Model list]](../README.md#documentation-for-models) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to README]](../README.md)
| detiber/lib_openshift | docs/V1SecurityContextConstraintsList.md | Markdown | apache-2.0 | 1,104 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.rave.portal.repository.impl;
import org.apache.rave.exception.NotSupportedException;
import org.apache.commons.lang3.StringUtils;
import org.apache.rave.exception.DataSerializationException;
import org.apache.rave.model.ApplicationData;
import org.apache.rave.portal.model.JpaApplicationData;
import org.apache.rave.portal.model.conversion.JpaApplicationDataConverter;
import org.apache.rave.portal.repository.ApplicationDataRepository;
import org.apache.rave.util.CollectionUtils;
import org.apache.rave.util.JsonUtils;
import org.json.JSONException;
import org.json.JSONObject;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import org.springframework.transaction.annotation.Transactional;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EntityManager;
import javax.persistence.Lob;
import javax.persistence.PersistenceContext;
import javax.persistence.TypedQuery;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.apache.rave.persistence.jpa.util.JpaUtil.getSingleResult;
import static org.apache.rave.persistence.jpa.util.JpaUtil.saveOrUpdate;
@Repository
public class JpaApplicationDataRepository implements ApplicationDataRepository {
@PersistenceContext
private EntityManager manager;
@Autowired
private JpaApplicationDataConverter converter;
@Override
public Class<? extends ApplicationData> getType() {
return JpaApplicationData.class;
}
@Override
public ApplicationData get(String id) {
JpaSerializableApplicationData applicationData = (JpaSerializableApplicationData) manager.find(JpaApplicationData.class, Long.parseLong(id));
if (applicationData != null) {
applicationData.deserializeData();
}
return applicationData;
}
@Override
@Transactional
public JpaApplicationData save(ApplicationData item) {
JpaApplicationData jpaAppData = converter.convert(item);
JpaSerializableApplicationData jpaSerializableApplicationData = getJpaSerializableApplicationData(jpaAppData);
jpaSerializableApplicationData.serializeData();
return saveOrUpdate(jpaSerializableApplicationData.getEntityId(), manager, jpaSerializableApplicationData);
}
@Override
public void delete(ApplicationData item) {
manager.remove(item instanceof JpaApplicationData ? item : get(item.getId()));
}
@Override
public List<ApplicationData> getAll() {
throw new NotSupportedException("This function is not yet implemented for this class.");
}
@Override
public List<ApplicationData> getLimitedList(int offset, int limit) {
throw new NotSupportedException("This function is not yet implemented for this class.");
}
@Override
public int getCountAll() {
throw new NotSupportedException("This function is not yet implemented for this class.");
}
@Override
public List<ApplicationData> getApplicationData(List<String> userIds, String appId) {
//if the call is only looking for data for a single user use the more efficient single user variant transparently
if (userIds.size() == 1) {
List<ApplicationData> data = new ArrayList<ApplicationData>();
ApplicationData applicationData = getApplicationData(userIds.get(0), appId);
if (applicationData != null) {
data.add(applicationData);
}
return data;
}
TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_IDS_AND_APP_ID,
JpaSerializableApplicationData.class);
query.setParameter(JpaApplicationData.USER_IDS_PARAM, userIds);
query.setParameter(JpaApplicationData.APP_URL_PARAM, appId);
List<JpaSerializableApplicationData> results = query.getResultList();
for (JpaSerializableApplicationData applicationData : results) {
applicationData.deserializeData();
}
return CollectionUtils.<ApplicationData>toBaseTypedList(results);
}
@Override
public JpaApplicationData getApplicationData(String personId, String appId) {
TypedQuery<JpaSerializableApplicationData> query = manager.createNamedQuery(JpaApplicationData.FIND_BY_USER_ID_AND_APP_ID,
JpaSerializableApplicationData.class);
query.setParameter(JpaApplicationData.USER_ID_PARAM, personId);
query.setParameter(JpaApplicationData.APP_URL_PARAM, appId);
JpaSerializableApplicationData applicationData = getSingleResult(query.getResultList());
if (applicationData != null) {
applicationData.deserializeData();
}
return applicationData;
}
private JpaSerializableApplicationData getJpaSerializableApplicationData(JpaApplicationData applicationData) {
if (applicationData instanceof JpaSerializableApplicationData) {
return (JpaSerializableApplicationData) applicationData;
}
return new JpaSerializableApplicationData(applicationData.getEntityId(), applicationData.getUserId(),
applicationData.getAppUrl(), applicationData.getData());
}
/**
* This class is here so that the details of the persistence strategy in use for serializing the appdata map to a
* JSON string doesnt end up being reflected in any public API of the ApplicationData object itself.
* <p/>
* This allows the public API of this repository to deal in clean ApplicationData models, but under the covers it
* uses this model for the actual persistence to the database.
*/
@Entity
public static class JpaSerializableApplicationData extends JpaApplicationData {
@Lob
@Column(name = "serialized_data")
private String serializedData;
public JpaSerializableApplicationData() {
super();
}
public JpaSerializableApplicationData(Long entityId, String userId, String appUrl, Map<String, Object> data) {
super(entityId, userId, appUrl, data);
}
public void serializeData() {
Map<String, Object> data = this.getData();
if (data != null) {
serializedData = JsonUtils.stringify(data);
}
}
@SuppressWarnings("unchecked")
public void deserializeData() {
if (serializedData != null && StringUtils.isNotBlank(serializedData)) {
this.setData(JsonUtils.parse(serializedData, Map.class));
}
}
}
}
| kidaa/rave | rave-components/rave-jpa/src/main/java/org/apache/rave/portal/repository/impl/JpaApplicationDataRepository.java | Java | apache-2.0 | 7,525 |
include ../../../mk/pitchfork.mk
# Local variables
_NAME = pbsvtools
$(_NAME)_REPO ?= git://github.com/PacificBiosciences/$(_NAME)
_WRKSRC = $(WORKDIR)/$(_NAME)
$(_NAME)_VERSION ?= HEAD
_REVISION = $(shell cd $(_WRKSRC) && $(GIT) rev-parse --short $($(_NAME)_VERSION) || true)
# Local works
do-fetch: $(_WRKSRC)
$(_WRKSRC):
ifeq ($(wildcard $($(_NAME)_REPO)),)
$(GIT) clone $($(_NAME)_REPO) $@
cd $(_WRKSRC) && $(GIT) checkout $($(_NAME)_VERSION)
DEVOPT =
else
ln -sfn $($(_NAME)_REPO) $(_WRKSRC)
DEVOPT = -e
endif
do-install: $(PREFIX)/var/pkg/$(_NAME)
$(PREFIX)/var/pkg/$(_NAME): | do-fetch
$(PIP) install --no-deps $(DEVOPT) $(_WRKSRC)/
@echo pip uninstall $(_NAME) > $@
@echo "# $(_REVISION)" >> $@
do-clean:
do-distclean:
cd $(_WRKSRC) && $(GIT) clean -xdf || true
do-flush:
rm -rf $(_WRKSRC)
| mhsieh/pitchfork | ports/pacbio/pbsvtools/Makefile | Makefile | apache-2.0 | 841 |
# Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Jan 21, 2020
@author: alfoa, wangc
Lasso model fit with Lars using BIC or AIC for model selection.
"""
#Internal Modules (Lazy Importer)--------------------------------------------------------------------
#Internal Modules (Lazy Importer) End----------------------------------------------------------------
#External Modules------------------------------------------------------------------------------------
from numpy import finfo
#External Modules End--------------------------------------------------------------------------------
#Internal Modules------------------------------------------------------------------------------------
from SupervisedLearning.ScikitLearn import ScikitLearnBase
from utils import InputData, InputTypes
#Internal Modules End--------------------------------------------------------------------------------
class LassoLarsIC(ScikitLearnBase):
"""
Lasso model fit with Lars using BIC or AIC for model selection
"""
info = {'problemtype':'regression', 'normalize':False}
def __init__(self):
"""
Constructor that will appropriately initialize a supervised learning object
@ In, None
@ Out, None
"""
super().__init__()
import sklearn
import sklearn.linear_model
self.model = sklearn.linear_model.LassoLarsIC
@classmethod
def getInputSpecification(cls):
"""
Method to get a reference to a class that specifies the input data for
class cls.
@ In, cls, the class for which we are retrieving the specification
@ Out, inputSpecification, InputData.ParameterInput, class to use for
specifying input of cls.
"""
specs = super(LassoLarsIC, cls).getInputSpecification()
specs.description = r"""The \xmlNode{LassoLarsIC} (\textit{Lasso model fit with Lars using BIC or AIC for model selection})
is a Lasso model fit with Lars using BIC or AIC for model selection.
The optimization objective for Lasso is:
$(1 / (2 * n\_samples)) * ||y - Xw||^2_2 + alpha * ||w||_1$
AIC is the Akaike information criterion and BIC is the Bayes Information criterion. Such criteria
are useful to select the value of the regularization parameter by making a trade-off between the
goodness of fit and the complexity of the model. A good model should explain well the data
while being simple.
\zNormalizationNotPerformed{LassoLarsIC}
"""
specs.addSub(InputData.parameterInputFactory("criterion", contentType=InputTypes.makeEnumType("criterion", "criterionType",['bic', 'aic']),
descr=r"""The type of criterion to use.""", default='aic'))
specs.addSub(InputData.parameterInputFactory("fit_intercept", contentType=InputTypes.BoolType,
descr=r"""Whether the intercept should be estimated or not. If False,
the data is assumed to be already centered.""", default=True))
specs.addSub(InputData.parameterInputFactory("normalize", contentType=InputTypes.BoolType,
descr=r"""This parameter is ignored when fit_intercept is set to False. If True,
the regressors X will be normalized before regression by subtracting the mean and
dividing by the l2-norm.""", default=True))
specs.addSub(InputData.parameterInputFactory("max_iter", contentType=InputTypes.IntegerType,
descr=r"""The maximum number of iterations.""", default=500))
specs.addSub(InputData.parameterInputFactory("precompute", contentType=InputTypes.StringType,
descr=r"""Whether to use a precomputed Gram matrix to speed up calculations.
For sparse input this option is always True to preserve sparsity.""", default='auto'))
specs.addSub(InputData.parameterInputFactory("eps", contentType=InputTypes.FloatType,
descr=r"""The machine-precision regularization in the computation of the Cholesky
diagonal factors. Increase this for very ill-conditioned systems. Unlike the tol
parameter in some iterative optimization-based algorithms, this parameter does not
control the tolerance of the optimization.""", default=finfo(float).eps))
specs.addSub(InputData.parameterInputFactory("positive", contentType=InputTypes.BoolType,
descr=r"""When set to True, forces the coefficients to be positive.""", default=False))
specs.addSub(InputData.parameterInputFactory("verbose", contentType=InputTypes.BoolType,
descr=r"""Amount of verbosity.""", default=False))
return specs
def _handleInput(self, paramInput):
"""
Function to handle the common parts of the distribution parameter input.
@ In, paramInput, ParameterInput, the already parsed input.
@ Out, None
"""
super()._handleInput(paramInput)
settings, notFound = paramInput.findNodesAndExtractValues(['fit_intercept','max_iter', 'normalize', 'precompute',
'eps','positive','criterion', 'verbose'])
# notFound must be empty
assert(not notFound)
self.initializeModel(settings)
| idaholab/raven | framework/SupervisedLearning/ScikitLearn/LinearModel/LassoLarsIC.py | Python | apache-2.0 | 6,386 |
/*
* Copyright 2015 John Ahlroos
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.tabsheet;
import com.vaadin.shared.ui.tabsheet.TabsheetState;
import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DDLayoutState;
import com.haulmont.cuba.web.widgets.client.addons.dragdroplayouts.ui.interfaces.DragAndDropAwareState;
public class DDTabSheetState extends TabsheetState
implements DragAndDropAwareState {
public static final float DEFAULT_HORIZONTAL_DROP_RATIO = 0.2f;
public float tabLeftRightDropRatio = DEFAULT_HORIZONTAL_DROP_RATIO;
public DDLayoutState ddState = new DDLayoutState();
@Override
public DDLayoutState getDragAndDropState() {
return ddState;
}
}
| dimone-kun/cuba | modules/web-widgets/src/com/haulmont/cuba/web/widgets/client/addons/dragdroplayouts/ui/tabsheet/DDTabSheetState.java | Java | apache-2.0 | 1,315 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.fontbox.ttf;
import java.io.IOException;
/**
* A table in a true type font.
*
* @author Ben Litchfield
*/
public class HorizontalMetricsTable extends TTFTable
{
/**
* A tag that identifies this table type.
*/
public static final String TAG = "hmtx";
private int[] advanceWidth;
private short[] leftSideBearing;
private short[] nonHorizontalLeftSideBearing;
private int numHMetrics;
HorizontalMetricsTable(TrueTypeFont font)
{
super(font);
}
/**
* This will read the required data from the stream.
*
* @param ttf The font that is being read.
* @param data The stream to read the data from.
* @throws IOException If there is an error reading the data.
*/
@Override
void read(TrueTypeFont ttf, TTFDataStream data) throws IOException
{
HorizontalHeaderTable hHeader = ttf.getHorizontalHeader();
if (hHeader == null)
{
throw new IOException("Could not get hmtx table");
}
numHMetrics = hHeader.getNumberOfHMetrics();
int numGlyphs = ttf.getNumberOfGlyphs();
int bytesRead = 0;
advanceWidth = new int[ numHMetrics ];
leftSideBearing = new short[ numHMetrics ];
for( int i=0; i<numHMetrics; i++ )
{
advanceWidth[i] = data.readUnsignedShort();
leftSideBearing[i] = data.readSignedShort();
bytesRead += 4;
}
int numberNonHorizontal = numGlyphs - numHMetrics;
// handle bad fonts with too many hmetrics
if (numberNonHorizontal < 0)
{
numberNonHorizontal = numGlyphs;
}
// make sure that table is never null and correct size, even with bad fonts that have no
// "leftSideBearing" table although they should
nonHorizontalLeftSideBearing = new short[numberNonHorizontal];
if (bytesRead < getLength())
{
for( int i=0; i<numberNonHorizontal; i++ )
{
if (bytesRead < getLength())
{
nonHorizontalLeftSideBearing[i] = data.readSignedShort();
bytesRead += 2;
}
}
}
initialized = true;
}
/**
* Returns the advance width for the given GID.
*
* @param gid GID
*/
public int getAdvanceWidth(int gid)
{
if (advanceWidth.length == 0)
{
return 250;
}
if (gid < numHMetrics)
{
return advanceWidth[gid];
}
else
{
// monospaced fonts may not have a width for every glyph
// the last one is for subsequent glyphs
return advanceWidth[advanceWidth.length -1];
}
}
/**
* Returns the left side bearing for the given GID.
*
* @param gid GID
*/
public int getLeftSideBearing(int gid)
{
if (leftSideBearing.length == 0)
{
return 0;
}
if (gid < numHMetrics)
{
return leftSideBearing[gid];
}
else
{
return nonHorizontalLeftSideBearing[gid - numHMetrics];
}
}
}
| apache/pdfbox | fontbox/src/main/java/org/apache/fontbox/ttf/HorizontalMetricsTable.java | Java | apache-2.0 | 4,062 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.asterix.external.library.java.base;
import org.apache.asterix.external.api.IJObject;
import org.apache.asterix.om.types.IAType;
import org.apache.asterix.om.util.container.IObjectPool;
public abstract class JComplexObject<T> implements IJObject<T> {
protected IObjectPool<IJObject, IAType> pool;
public void setPool(IObjectPool<IJObject, IAType> pool) {
this.pool = pool;
}
}
| apache/incubator-asterixdb | asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/library/java/base/JComplexObject.java | Java | apache-2.0 | 1,245 |
/*
Copyright AppsCode Inc. and Contributors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package v1alpha1
import (
"fmt"
"strings"
)
const (
KindDeployment = "Deployment"
KindReplicaSet = "ReplicaSet"
KindReplicationController = "ReplicationController"
KindStatefulSet = "StatefulSet"
KindDaemonSet = "DaemonSet"
KindPod = "Pod"
KindPersistentVolumeClaim = "PersistentVolumeClaim"
KindAppBinding = "AppBinding"
KindDeploymentConfig = "DeploymentConfig"
KindSecret = "Secret"
)
// LocalTypedReference contains enough information to let you inspect or modify the referred object.
type LocalTypedReference struct {
// Kind of the referent.
// More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds
// +optional
Kind string `json:"kind,omitempty" protobuf:"bytes,1,opt,name=kind"`
// Name of the referent.
// More info: https://kubernetes.io/docs/concepts/overview/working-with-objects/names/#names
// +optional
Name string `json:"name,omitempty" protobuf:"bytes,2,opt,name=name"`
// API version of the referent.
// +optional
APIVersion string `json:"apiVersion,omitempty" protobuf:"bytes,3,opt,name=apiVersion"`
}
func (workload *LocalTypedReference) Canonicalize() error {
if workload.Name == "" || workload.Kind == "" {
return fmt.Errorf("missing workload name or kind")
}
switch strings.ToLower(workload.Kind) {
case "deployments", "deployment", "deploy":
workload.Kind = KindDeployment
case "replicasets", "replicaset", "rs":
workload.Kind = KindReplicaSet
case "replicationcontrollers", "replicationcontroller", "rc":
workload.Kind = KindReplicationController
case "statefulsets", "statefulset":
workload.Kind = KindStatefulSet
case "daemonsets", "daemonset", "ds":
workload.Kind = KindDaemonSet
default:
return fmt.Errorf(`unrecognized workload "Kind" %v`, workload.Kind)
}
return nil
}
func (workload LocalTypedReference) GetRepositoryCRDName(podName, nodeName string) string {
name := ""
switch workload.Kind {
case KindDeployment, KindReplicaSet, KindReplicationController:
name = strings.ToLower(workload.Kind) + "." + workload.Name
case KindStatefulSet:
name = strings.ToLower(workload.Kind) + "." + podName
case KindDaemonSet:
name = strings.ToLower(workload.Kind) + "." + workload.Name + "." + nodeName
}
return name
}
func (workload LocalTypedReference) HostnamePrefix(podName, nodeName string) (hostname, prefix string, err error) {
if err := workload.Canonicalize(); err != nil {
return "", "", err
}
if workload.Name == "" || workload.Kind == "" {
return "", "", fmt.Errorf("missing workload name or kind")
}
switch workload.Kind {
case KindDeployment, KindReplicaSet, KindReplicationController:
return workload.Name, strings.ToLower(workload.Kind) + "/" + workload.Name, nil
case KindStatefulSet:
if podName == "" {
return "", "", fmt.Errorf("missing podName for %s", KindStatefulSet)
}
return podName, strings.ToLower(workload.Kind) + "/" + podName, nil
case KindDaemonSet:
if nodeName == "" {
return "", "", fmt.Errorf("missing nodeName for %s", KindDaemonSet)
}
return nodeName, strings.ToLower(workload.Kind) + "/" + workload.Name + "/" + nodeName, nil
default:
return "", "", fmt.Errorf(`unrecognized workload "Kind" %v`, workload.Kind)
}
}
func StatefulSetPodName(appName, podOrdinal string) (string, error) {
if appName == "" || podOrdinal == "" {
return "", fmt.Errorf("missing appName or podOrdinal")
}
return appName + "-" + podOrdinal, nil
}
| k8sdb/apimachinery | vendor/stash.appscode.dev/apimachinery/apis/stash/v1alpha1/workload.go | GO | apache-2.0 | 4,090 |
package com.cloudhopper.commons.charset.demo;
/*
* #%L
* ch-commons-charset
* %%
* Copyright (C) 2012 Cloudhopper by Twitter
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.cloudhopper.commons.charset.CharsetUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
*
* @author joelauer
*/
public class Charset5Main {
private static final Logger logger = LoggerFactory.getLogger(Charset5Main.class);
static public void main(String[] args) throws Exception {
String sourceString = "h\u6025\u20ACllo";
String targetString = CharsetUtil.normalize(sourceString, CharsetUtil.CHARSET_UTF_8);
logger.debug("source string: " + sourceString);
logger.debug("target string: " + targetString);
}
}
| twitter/cloudhopper-commons | ch-commons-charset/src/test/java/com/cloudhopper/commons/charset/demo/Charset5Main.java | Java | apache-2.0 | 1,291 |
/* COPYRIGHT (c) 2014 Umut Acar, Arthur Chargueraud, and Michael
* Rainey
* All rights reserved.
*
* \file adjlist.hpp
* \brief Adjacency-list graph format
*
*/
#ifndef _PASL_GRAPH_ADJLIST_H_
#define _PASL_GRAPH_ADJLIST_H_
#include "../../graph/include/graph.hpp"
/***********************************************************************/
namespace pasl {
namespace graph {
/*---------------------------------------------------------------------*/
/* Symmetric vertex */
template <class Vertex_id_bag>
class symmetric_vertex {
public:
typedef Vertex_id_bag vtxid_bag_type;
typedef typename vtxid_bag_type::value_type vtxid_type;
symmetric_vertex() { }
symmetric_vertex(vtxid_bag_type neighbors)
: neighbors(neighbors) { }
vtxid_bag_type neighbors;
vtxid_type get_in_neighbor(vtxid_type j) const {
return neighbors[j];
}
vtxid_type get_out_neighbor(vtxid_type j) const {
return neighbors[j];
}
vtxid_type* get_in_neighbors() const {
return neighbors.data();
}
vtxid_type* get_out_neighbors() const {
return neighbors.data();
}
void set_in_neighbor(vtxid_type j, vtxid_type nbr) {
neighbors[j] = nbr;
}
void set_out_neighbor(vtxid_type j, vtxid_type nbr) {
neighbors[j] = nbr;
}
vtxid_type get_in_degree() const {
return vtxid_type(neighbors.size());
}
vtxid_type get_out_degree() const {
return vtxid_type(neighbors.size());
}
void set_in_degree(vtxid_type j) {
neighbors.alloc(j);
}
// todo: use neighbors.resize()
void set_out_degree(vtxid_type j) {
neighbors.alloc(j);
}
void swap_in_neighbors(vtxid_bag_type& other) {
neighbors.swap(other);
}
void swap_out_neighbors(vtxid_bag_type& other) {
neighbors.swap(other);
}
void check(vtxid_type nb_vertices) const {
#ifndef NDEBUG
for (vtxid_type i = 0; i < neighbors.size(); i++)
check_vertex(neighbors[i], nb_vertices);
#endif
}
};
/*---------------------------------------------------------------------*/
/* Asymmetric vertex */
template <class Vertex_id_bag>
class asymmetric_vertex {
public:
typedef Vertex_id_bag vtxid_bag_type;
typedef typename vtxid_bag_type::value_type vtxid_type;
vtxid_bag_type in_neighbors;
vtxid_bag_type out_neighbors;
vtxid_type get_in_neighbor(vtxid_type j) const {
return in_neighbors[j];
}
vtxid_type get_out_neighbor(vtxid_type j) const {
return out_neighbors[j];
}
vtxid_type* get_in_neighbors() const {
return in_neighbors.data();
}
vtxid_type* get_out_neighbors() const {
return out_neighbors.data();
}
void set_in_neighbor(vtxid_type j, vtxid_type nbr) {
in_neighbors[j] = nbr;
}
void set_out_neighbor(vtxid_type j, vtxid_type nbr) {
out_neighbors[j] = nbr;
}
vtxid_type get_in_degree() const {
return vtxid_type(in_neighbors.size());
}
vtxid_type get_out_degree() const {
return vtxid_type(out_neighbors.size());
}
void set_in_degree(vtxid_type j) {
in_neighbors.alloc(j);
}
void set_out_degree(vtxid_type j) {
out_neighbors.alloc(j);
}
void swap_in_neighbors(vtxid_bag_type& other) {
in_neighbors.swap(other);
}
void swap_out_neighbors(vtxid_bag_type& other) {
out_neighbors.swap(other);
}
void check(vtxid_type nb_vertices) const {
for (vtxid_type i = 0; i < in_neighbors.size(); i++)
check_vertex(in_neighbors[i], nb_vertices);
for (vtxid_type i = 0; i < out_neighbors.size(); i++)
check_vertex(out_neighbors[i], nb_vertices);
}
};
/*---------------------------------------------------------------------*/
/* Adjacency-list format */
template <class Adjlist_seq>
class adjlist {
public:
typedef Adjlist_seq adjlist_seq_type;
typedef typename adjlist_seq_type::value_type vertex_type;
typedef typename vertex_type::vtxid_bag_type::value_type vtxid_type;
typedef typename adjlist_seq_type::alias_type adjlist_seq_alias_type;
typedef adjlist<adjlist_seq_alias_type> alias_type;
edgeid_type nb_edges;
adjlist_seq_type adjlists;
adjlist()
: nb_edges(0) { }
adjlist(edgeid_type nb_edges)
: nb_edges(nb_edges) { }
vtxid_type get_nb_vertices() const {
return vtxid_type(adjlists.size());
}
void check() const {
#ifndef NDEBUG
for (vtxid_type i = 0; i < adjlists.size(); i++)
adjlists[i].check(get_nb_vertices());
size_t m = 0;
for (vtxid_type i = 0; i < adjlists.size(); i++)
m += adjlists[i].get_in_degree();
assert(m == nb_edges);
m = 0;
for (vtxid_type i = 0; i < adjlists.size(); i++)
m += adjlists[i].get_out_degree();
assert(m == nb_edges);
#endif
}
};
/*---------------------------------------------------------------------*/
/* Equality operators */
template <class Vertex_id_bag>
bool operator==(const symmetric_vertex<Vertex_id_bag>& v1,
const symmetric_vertex<Vertex_id_bag>& v2) {
using vtxid_type = typename symmetric_vertex<Vertex_id_bag>::vtxid_type;
if (v1.get_out_degree() != v2.get_out_degree())
return false;
for (vtxid_type i = 0; i < v1.get_out_degree(); i++)
if (v1.get_out_neighbor(i) != v2.get_out_neighbor(i))
return false;
return true;
}
template <class Vertex_id_bag>
bool operator!=(const symmetric_vertex<Vertex_id_bag>& v1,
const symmetric_vertex<Vertex_id_bag>& v2) {
return ! (v1 == v2);
}
template <class Adjlist_seq>
bool operator==(const adjlist<Adjlist_seq>& g1,
const adjlist<Adjlist_seq>& g2) {
using vtxid_type = typename adjlist<Adjlist_seq>::vtxid_type;
if (g1.get_nb_vertices() != g2.get_nb_vertices())
return false;
if (g1.nb_edges != g2.nb_edges)
return false;
for (vtxid_type i = 0; i < g1.get_nb_vertices(); i++)
if (g1.adjlists[i] != g2.adjlists[i])
return false;
return true;
}
template <class Adjlist_seq>
bool operator!=(const adjlist<Adjlist_seq>& g1,
const adjlist<Adjlist_seq>& g2) {
return ! (g1 == g2);
}
/*---------------------------------------------------------------------*/
/* Flat adjacency-list format */
template <class Vertex_id, bool Is_alias = false>
class flat_adjlist_seq {
public:
typedef flat_adjlist_seq<Vertex_id> self_type;
typedef Vertex_id vtxid_type;
typedef size_t size_type;
typedef data::pointer_seq<vtxid_type> vertex_seq_type;
typedef symmetric_vertex<vertex_seq_type> value_type;
typedef flat_adjlist_seq<vtxid_type, true> alias_type;
char* underlying_array;
vtxid_type* offsets;
vtxid_type nb_offsets;
vtxid_type* edges;
flat_adjlist_seq()
: underlying_array(NULL), offsets(NULL),
nb_offsets(0), edges(NULL) { }
flat_adjlist_seq(const flat_adjlist_seq& other) {
if (Is_alias) {
underlying_array = other.underlying_array;
offsets = other.offsets;
nb_offsets = other.nb_offsets;
edges = other.edges;
} else {
util::atomic::die("todo");
}
}
//! \todo instead of using Is_alias, pass either ptr_seq or array_seq as underlying_array
~flat_adjlist_seq() {
if (! Is_alias)
clear();
}
void get_alias(alias_type& alias) const {
alias.underlying_array = NULL;
alias.offsets = offsets;
alias.nb_offsets = nb_offsets;
alias.edges = edges;
}
alias_type get_alias() const {
alias_type alias;
alias.underlying_array = NULL;
alias.offsets = offsets;
alias.nb_offsets = nb_offsets;
alias.edges = edges;
return alias;
}
void clear() {
if (underlying_array != NULL)
data::myfree(underlying_array);
offsets = NULL;
edges = NULL;
}
vtxid_type degree(vtxid_type v) const {
assert(v >= 0);
assert(v < size());
return offsets[v + 1] - offsets[v];
}
value_type operator[](vtxid_type ix) const {
assert(ix >= 0);
assert(ix < size());
return value_type(vertex_seq_type(&edges[offsets[ix]], degree(ix)));
}
vtxid_type size() const {
return nb_offsets - 1;
}
void swap(self_type& other) {
std::swap(underlying_array, other.underlying_array);
std::swap(offsets, other.offsets);
std::swap(nb_offsets, other.nb_offsets);
std::swap(edges, other.edges);
}
void alloc(size_type) {
util::atomic::die("unsupported");
}
void init(char* bytes, vtxid_type nb_vertices, edgeid_type nb_edges) {
nb_offsets = nb_vertices + 1;
underlying_array = bytes;
offsets = (vtxid_type*)bytes;
edges = &offsets[nb_offsets];
}
value_type* data() {
util::atomic::die("unsupported");
return NULL;
}
};
template <class Vertex_id, bool Is_alias = false>
using flat_adjlist = adjlist<flat_adjlist_seq<Vertex_id, Is_alias>>;
template <class Vertex_id>
using flat_adjlist_alias = flat_adjlist<Vertex_id, true>;
} // end namespace
} // end namespace
/***********************************************************************/
#endif /*! _PASL_GRAPH_ADJLIST_H_ */
| laxmandhulipala/PWSA-Star | xytheta/pwsa_project/PWSA/pasl/graph/include/adjlist.hpp | C++ | apache-2.0 | 8,982 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="pt">
<head>
<!-- Generated by javadoc (version 1.7.0_71) on Tue Jun 16 10:36:54 BRT 2015 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>Uses of Class opennlp.tools.ml.model.SequenceStreamEventStream (Apache OpenNLP Tools 1.6.0 API)</title>
<meta name="date" content="2015-06-16">
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
</head>
<body>
<script type="text/javascript"><!--
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class opennlp.tools.ml.model.SequenceStreamEventStream (Apache OpenNLP Tools 1.6.0 API)";
}
//-->
</script>
<noscript>
<div>JavaScript is disabled on your browser.</div>
</noscript>
<!-- ========= START OF TOP NAVBAR ======= -->
<div class="topNav"><a name="navbar_top">
<!-- -->
</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../opennlp/tools/ml/model/SequenceStreamEventStream.html" title="class in opennlp.tools.ml.model">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?opennlp/tools/ml/model/class-use/SequenceStreamEventStream.html" target="_top">Frames</a></li>
<li><a href="SequenceStreamEventStream.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_top">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_top");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_top">
<!-- -->
</a></div>
<!-- ========= END OF TOP NAVBAR ========= -->
<div class="header">
<h2 title="Uses of Class opennlp.tools.ml.model.SequenceStreamEventStream" class="title">Uses of Class<br>opennlp.tools.ml.model.SequenceStreamEventStream</h2>
</div>
<div class="classUseContainer">No usage of opennlp.tools.ml.model.SequenceStreamEventStream</div>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<div class="bottomNav"><a name="navbar_bottom">
<!-- -->
</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow">
<!-- -->
</a>
<ul class="navList" title="Navigation">
<li><a href="../../../../../overview-summary.html">Overview</a></li>
<li><a href="../package-summary.html">Package</a></li>
<li><a href="../../../../../opennlp/tools/ml/model/SequenceStreamEventStream.html" title="class in opennlp.tools.ml.model">Class</a></li>
<li class="navBarCell1Rev">Use</li>
<li><a href="../package-tree.html">Tree</a></li>
<li><a href="../../../../../deprecated-list.html">Deprecated</a></li>
<li><a href="../../../../../index-all.html">Index</a></li>
<li><a href="../../../../../help-doc.html">Help</a></li>
</ul>
</div>
<div class="subNav">
<ul class="navList">
<li>Prev</li>
<li>Next</li>
</ul>
<ul class="navList">
<li><a href="../../../../../index.html?opennlp/tools/ml/model/class-use/SequenceStreamEventStream.html" target="_top">Frames</a></li>
<li><a href="SequenceStreamEventStream.html" target="_top">No Frames</a></li>
</ul>
<ul class="navList" id="allclasses_navbar_bottom">
<li><a href="../../../../../allclasses-noframe.html">All Classes</a></li>
</ul>
<div>
<script type="text/javascript"><!--
allClassesLink = document.getElementById("allclasses_navbar_bottom");
if(window==top) {
allClassesLink.style.display = "block";
}
else {
allClassesLink.style.display = "none";
}
//-->
</script>
</div>
<a name="skip-navbar_bottom">
<!-- -->
</a></div>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<p class="legalCopy"><small>Copyright © 2015 <a href="http://www.apache.org/">The Apache Software Foundation</a>. All rights reserved.</small></p>
</body>
</html>
| ericmguimaraes/COMP0378 | references/opennlp-docs/apidocs/opennlp-tools/opennlp/tools/ml/model/class-use/SequenceStreamEventStream.html | HTML | apache-2.0 | 4,540 |
package org.anddev.andengine.opengl.texture;
import java.util.*;
import org.anddev.andengine.opengl.texture.source.*;
import org.anddev.andengine.util.*;
import org.anddev.andengine.opengl.texture.builder.*;
import android.graphics.*;
public class BuildableTexture extends Texture
{
private final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace;
public BuildableTexture(final int n, final int n2) {
super(n, n2, TextureOptions.DEFAULT, null);
this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>();
}
public BuildableTexture(final int n, final int n2, final ITextureStateListener textureStateListener) {
super(n, n2, TextureOptions.DEFAULT, textureStateListener);
this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>();
}
public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions) throws IllegalArgumentException {
super(n, n2, textureOptions, null);
this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>();
}
public BuildableTexture(final int n, final int n2, final TextureOptions textureOptions, final ITextureStateListener textureStateListener) throws IllegalArgumentException {
super(n, n2, textureOptions, textureStateListener);
this.mTextureSourcesToPlace = new ArrayList<TextureSourceWithWithLocationCallback>();
}
@Deprecated
@Override
public TextureSourceWithLocation addTextureSource(final ITextureSource textureSource, final int n, final int n2) {
return super.addTextureSource(textureSource, n, n2);
}
public void addTextureSource(final ITextureSource textureSource, final Callback<TextureSourceWithLocation> callback) {
this.mTextureSourcesToPlace.add(new TextureSourceWithWithLocationCallback(textureSource, callback));
}
public void build(final ITextureBuilder textureBuilder) throws ITextureBuilder.TextureSourcePackingException {
textureBuilder.pack(this, this.mTextureSourcesToPlace);
this.mTextureSourcesToPlace.clear();
this.mUpdateOnHardwareNeeded = true;
}
@Override
public void clearTextureSources() {
super.clearTextureSources();
this.mTextureSourcesToPlace.clear();
}
public void removeTextureSource(final ITextureSource textureSource) {
final ArrayList<TextureSourceWithWithLocationCallback> mTextureSourcesToPlace = this.mTextureSourcesToPlace;
for (int i = -1 + mTextureSourcesToPlace.size(); i >= 0; --i) {
if (mTextureSourcesToPlace.get(i).mTextureSource == textureSource) {
mTextureSourcesToPlace.remove(i);
this.mUpdateOnHardwareNeeded = true;
return;
}
}
}
public static class TextureSourceWithWithLocationCallback implements ITextureSource
{
private final Callback<TextureSourceWithLocation> mCallback;
private final ITextureSource mTextureSource;
public TextureSourceWithWithLocationCallback(final ITextureSource mTextureSource, final Callback<TextureSourceWithLocation> mCallback) {
super();
this.mTextureSource = mTextureSource;
this.mCallback = mCallback;
}
@Override
public TextureSourceWithWithLocationCallback clone() {
return null;
}
public Callback<TextureSourceWithLocation> getCallback() {
return this.mCallback;
}
@Override
public int getHeight() {
return this.mTextureSource.getHeight();
}
public ITextureSource getTextureSource() {
return this.mTextureSource;
}
@Override
public int getWidth() {
return this.mTextureSource.getWidth();
}
@Override
public Bitmap onLoadBitmap() {
return this.mTextureSource.onLoadBitmap();
}
@Override
public String toString() {
return this.mTextureSource.toString();
}
}
}
| rLadia/AttacknidPatch | decompiled_src/Procyon/org/anddev/andengine/opengl/texture/BuildableTexture.java | Java | apache-2.0 | 4,239 |
/** vim: et:ts=4:sw=4:sts=4
* @license RequireJS 2.1.15 Copyright (c) 2010-2014, The Dojo Foundation All Rights Reserved.
* Available via the MIT or new BSD license.
* see: http://github.com/jrburke/requirejs for details
*/
//Not using strict: uneven strict support in browsers, #392, and causes
//problems with requirejs.exec()/transpiler plugins that may not be strict.
/*jslint regexp: true, nomen: true, sloppy: true */
/*global window, navigator, document, importScripts, setTimeout, opera */
var requirejs, require, define;
(function (global) {
var req, s, head, baseElement, dataMain, src,
interactiveScript, currentlyAddingScript, mainScript, subPath,
version = '2.1.15',
commentRegExp = /(\/\*([\s\S]*?)\*\/|([^:]|^)\/\/(.*)$)/mg,
cjsRequireRegExp = /[^.]\s*require\s*\(\s*["']([^'"\s]+)["']\s*\)/g,
jsSuffixRegExp = /\.js$/,
currDirRegExp = /^\.\//,
op = Object.prototype,
ostring = op.toString,
hasOwn = op.hasOwnProperty,
ap = Array.prototype,
apsp = ap.splice,
isBrowser = !!(typeof window !== 'undefined' && typeof navigator !== 'undefined' && window.document),
isWebWorker = !isBrowser && typeof importScripts !== 'undefined',
//PS3 indicates loaded and complete, but need to wait for complete
//specifically. Sequence is 'loading', 'loaded', execution,
// then 'complete'. The UA check is unfortunate, but not sure how
//to feature test w/o causing perf issues.
readyRegExp = isBrowser && navigator.platform === 'PLAYSTATION 3' ?
/^complete$/ : /^(complete|loaded)$/,
defContextName = '_',
//Oh the tragedy, detecting opera. See the usage of isOpera for reason.
isOpera = typeof opera !== 'undefined' && opera.toString() === '[object Opera]',
contexts = {},
cfg = {},
globalDefQueue = [],
useInteractive = false;
function isFunction(it) {
return ostring.call(it) === '[object Function]';
}
function isArray(it) {
return ostring.call(it) === '[object Array]';
}
/**
* Helper function for iterating over an array. If the func returns
* a true value, it will break out of the loop.
*/
function each(ary, func) {
if (ary) {
var i;
for (i = 0; i < ary.length; i += 1) {
if (ary[i] && func(ary[i], i, ary)) {
break;
}
}
}
}
/**
* Helper function for iterating over an array backwards. If the func
* returns a true value, it will break out of the loop.
*/
function eachReverse(ary, func) {
if (ary) {
var i;
for (i = ary.length - 1; i > -1; i -= 1) {
if (ary[i] && func(ary[i], i, ary)) {
break;
}
}
}
}
function hasProp(obj, prop) {
return hasOwn.call(obj, prop);
}
function getOwn(obj, prop) {
return hasProp(obj, prop) && obj[prop];
}
/**
* Cycles over properties in an object and calls a function for each
* property value. If the function returns a truthy value, then the
* iteration is stopped.
*/
function eachProp(obj, func) {
var prop;
for (prop in obj) {
if (hasProp(obj, prop)) {
if (func(obj[prop], prop)) {
break;
}
}
}
}
/**
* Simple function to mix in properties from source into target,
* but only if target does not already have a property of the same name.
*/
function mixin(target, source, force, deepStringMixin) {
if (source) {
eachProp(source, function (value, prop) {
if (force || !hasProp(target, prop)) {
if (deepStringMixin && typeof value === 'object' && value &&
!isArray(value) && !isFunction(value) &&
!(value instanceof RegExp)) {
if (!target[prop]) {
target[prop] = {};
}
mixin(target[prop], value, force, deepStringMixin);
} else {
target[prop] = value;
}
}
});
}
return target;
}
//Similar to Function.prototype.bind, but the 'this' object is specified
//first, since it is easier to read/figure out what 'this' will be.
function bind(obj, fn) {
return function () {
return fn.apply(obj, arguments);
};
}
function scripts() {
return document.getElementsByTagName('script');
}
function defaultOnError(err) {
throw err;
}
//Allow getting a global that is expressed in
//dot notation, like 'a.b.c'.
function getGlobal(value) {
if (!value) {
return value;
}
var g = global;
each(value.split('.'), function (part) {
g = g[part];
});
return g;
}
/**
* Constructs an error with a pointer to an URL with more information.
* @param {String} id the error ID that maps to an ID on a web page.
* @param {String} message human readable error.
* @param {Error} [err] the original error, if there is one.
*
* @returns {Error}
*/
function makeError(id, msg, err, requireModules) {
var e = new Error(msg + '\nhttp://requirejs.org/docs/errors.html#' + id);
e.requireType = id;
e.requireModules = requireModules;
if (err) {
e.originalError = err;
}
return e;
}
if (typeof define !== 'undefined') {
//If a define is already in play via another AMD loader,
//do not overwrite.
return;
}
if (typeof requirejs !== 'undefined') {
if (isFunction(requirejs)) {
//Do not overwrite an existing requirejs instance.
return;
}
cfg = requirejs;
requirejs = undefined;
}
//Allow for a require config object
if (typeof require !== 'undefined' && !isFunction(require)) {
//assume it is a config object.
cfg = require;
require = undefined;
}
function newContext(contextName) {
var inCheckLoaded, Module, context, handlers,
checkLoadedTimeoutId,
config = {
//Defaults. Do not set a default for map
//config to speed up normalize(), which
//will run faster if there is no default.
waitSeconds: 7,
baseUrl: './',
paths: {},
bundles: {},
pkgs: {},
shim: {},
config: {}
},
registry = {},
//registry of just enabled modules, to speed
//cycle breaking code when lots of modules
//are registered, but not activated.
enabledRegistry = {},
undefEvents = {},
defQueue = [],
defined = {},
urlFetched = {},
bundlesMap = {},
requireCounter = 1,
unnormalizedCounter = 1;
/**
* Trims the . and .. from an array of path segments.
* It will keep a leading path segment if a .. will become
* the first path segment, to help with module name lookups,
* which act like paths, but can be remapped. But the end result,
* all paths that use this function should look normalized.
* NOTE: this method MODIFIES the input array.
* @param {Array} ary the array of path segments.
*/
function trimDots(ary) {
var i, part;
for (i = 0; i < ary.length; i++) {
part = ary[i];
if (part === '.') {
ary.splice(i, 1);
i -= 1;
} else if (part === '..') {
// If at the start, or previous value is still ..,
// keep them so that when converted to a path it may
// still work when converted to a path, even though
// as an ID it is less than ideal. In larger point
// releases, may be better to just kick out an error.
if (i === 0 || (i == 1 && ary[2] === '..') || ary[i - 1] === '..') {
continue;
} else if (i > 0) {
ary.splice(i - 1, 2);
i -= 2;
}
}
}
}
/**
* Given a relative module name, like ./something, normalize it to
* a real name that can be mapped to a path.
* @param {String} name the relative name
* @param {String} baseName a real name that the name arg is relative
* to.
* @param {Boolean} applyMap apply the map config to the value. Should
* only be done if this normalization is for a dependency ID.
* @returns {String} normalized name
*/
function normalize(name, baseName, applyMap) {
var pkgMain, mapValue, nameParts, i, j, nameSegment, lastIndex,
foundMap, foundI, foundStarMap, starI, normalizedBaseParts,
baseParts = (baseName && baseName.split('/')),
map = config.map,
starMap = map && map['*'];
//Adjust any relative paths.
if (name) {
name = name.split('/');
lastIndex = name.length - 1;
// If wanting node ID compatibility, strip .js from end
// of IDs. Have to do this here, and not in nameToUrl
// because node allows either .js or non .js to map
// to same file.
if (config.nodeIdCompat && jsSuffixRegExp.test(name[lastIndex])) {
name[lastIndex] = name[lastIndex].replace(jsSuffixRegExp, '');
}
// Starts with a '.' so need the baseName
if (name[0].charAt(0) === '.' && baseParts) {
//Convert baseName to array, and lop off the last part,
//so that . matches that 'directory' and not name of the baseName's
//module. For instance, baseName of 'one/two/three', maps to
//'one/two/three.js', but we want the directory, 'one/two' for
//this normalization.
normalizedBaseParts = baseParts.slice(0, baseParts.length - 1);
name = normalizedBaseParts.concat(name);
}
trimDots(name);
name = name.join('/');
}
//Apply map config if available.
if (applyMap && map && (baseParts || starMap)) {
nameParts = name.split('/');
outerLoop: for (i = nameParts.length; i > 0; i -= 1) {
nameSegment = nameParts.slice(0, i).join('/');
if (baseParts) {
//Find the longest baseName segment match in the config.
//So, do joins on the biggest to smallest lengths of baseParts.
for (j = baseParts.length; j > 0; j -= 1) {
mapValue = getOwn(map, baseParts.slice(0, j).join('/'));
//baseName segment has config, find if it has one for
//this name.
if (mapValue) {
mapValue = getOwn(mapValue, nameSegment);
if (mapValue) {
//Match, update name to the new value.
foundMap = mapValue;
foundI = i;
break outerLoop;
}
}
}
}
//Check for a star map match, but just hold on to it,
//if there is a shorter segment match later in a matching
//config, then favor over this star map.
if (!foundStarMap && starMap && getOwn(starMap, nameSegment)) {
foundStarMap = getOwn(starMap, nameSegment);
starI = i;
}
}
if (!foundMap && foundStarMap) {
foundMap = foundStarMap;
foundI = starI;
}
if (foundMap) {
nameParts.splice(0, foundI, foundMap);
name = nameParts.join('/');
}
}
// If the name points to a package's name, use
// the package main instead.
pkgMain = getOwn(config.pkgs, name);
return pkgMain ? pkgMain : name;
}
function removeScript(name) {
if (isBrowser) {
each(scripts(), function (scriptNode) {
if (scriptNode.getAttribute('data-requiremodule') === name &&
scriptNode.getAttribute('data-requirecontext') === context.contextName) {
scriptNode.parentNode.removeChild(scriptNode);
return true;
}
});
}
}
function hasPathFallback(id) {
var pathConfig = getOwn(config.paths, id);
if (pathConfig && isArray(pathConfig) && pathConfig.length > 1) {
//Pop off the first array value, since it failed, and
//retry
pathConfig.shift();
context.require.undef(id);
//Custom require that does not do map translation, since
//ID is "absolute", already mapped/resolved.
context.makeRequire(null, {
skipMap: true
})([id]);
return true;
}
}
//Turns a plugin!resource to [plugin, resource]
//with the plugin being undefined if the name
//did not have a plugin prefix.
function splitPrefix(name) {
var prefix,
index = name ? name.indexOf('!') : -1;
if (index > -1) {
prefix = name.substring(0, index);
name = name.substring(index + 1, name.length);
}
return [prefix, name];
}
/**
* Creates a module mapping that includes plugin prefix, module
* name, and path. If parentModuleMap is provided it will
* also normalize the name via require.normalize()
*
* @param {String} name the module name
* @param {String} [parentModuleMap] parent module map
* for the module name, used to resolve relative names.
* @param {Boolean} isNormalized: is the ID already normalized.
* This is true if this call is done for a define() module ID.
* @param {Boolean} applyMap: apply the map config to the ID.
* Should only be true if this map is for a dependency.
*
* @returns {Object}
*/
function makeModuleMap(name, parentModuleMap, isNormalized, applyMap) {
var url, pluginModule, suffix, nameParts,
prefix = null,
parentName = parentModuleMap ? parentModuleMap.name : null,
originalName = name,
isDefine = true,
normalizedName = '';
//If no name, then it means it is a require call, generate an
//internal name.
if (!name) {
isDefine = false;
name = '_@r' + (requireCounter += 1);
}
nameParts = splitPrefix(name);
prefix = nameParts[0];
name = nameParts[1];
if (prefix) {
prefix = normalize(prefix, parentName, applyMap);
pluginModule = getOwn(defined, prefix);
}
//Account for relative paths if there is a base name.
if (name) {
if (prefix) {
if (pluginModule && pluginModule.normalize) {
//Plugin is loaded, use its normalize method.
normalizedName = pluginModule.normalize(name, function (name) {
return normalize(name, parentName, applyMap);
});
} else {
// If nested plugin references, then do not try to
// normalize, as it will not normalize correctly. This
// places a restriction on resourceIds, and the longer
// term solution is not to normalize until plugins are
// loaded and all normalizations to allow for async
// loading of a loader plugin. But for now, fixes the
// common uses. Details in #1131
normalizedName = name.indexOf('!') === -1 ?
normalize(name, parentName, applyMap) :
name;
}
} else {
//A regular module.
normalizedName = normalize(name, parentName, applyMap);
//Normalized name may be a plugin ID due to map config
//application in normalize. The map config values must
//already be normalized, so do not need to redo that part.
nameParts = splitPrefix(normalizedName);
prefix = nameParts[0];
normalizedName = nameParts[1];
isNormalized = true;
url = context.nameToUrl(normalizedName);
}
}
//If the id is a plugin id that cannot be determined if it needs
//normalization, stamp it with a unique ID so two matching relative
//ids that may conflict can be separate.
suffix = prefix && !pluginModule && !isNormalized ?
'_unnormalized' + (unnormalizedCounter += 1) :
'';
return {
prefix: prefix,
name: normalizedName,
parentMap: parentModuleMap,
unnormalized: !!suffix,
url: url,
originalName: originalName,
isDefine: isDefine,
id: (prefix ?
prefix + '!' + normalizedName :
normalizedName) + suffix
};
}
function getModule(depMap) {
var id = depMap.id,
mod = getOwn(registry, id);
if (!mod) {
mod = registry[id] = new context.Module(depMap);
}
return mod;
}
function on(depMap, name, fn) {
var id = depMap.id,
mod = getOwn(registry, id);
if (hasProp(defined, id) &&
(!mod || mod.defineEmitComplete)) {
if (name === 'defined') {
fn(defined[id]);
}
} else {
mod = getModule(depMap);
if (mod.error && name === 'error') {
fn(mod.error);
} else {
mod.on(name, fn);
}
}
}
function onError(err, errback) {
var ids = err.requireModules,
notified = false;
if (errback) {
errback(err);
} else {
each(ids, function (id) {
var mod = getOwn(registry, id);
if (mod) {
//Set error on module, so it skips timeout checks.
mod.error = err;
if (mod.events.error) {
notified = true;
mod.emit('error', err);
}
}
});
if (!notified) {
req.onError(err);
}
}
}
/**
* Internal method to transfer globalQueue items to this context's
* defQueue.
*/
function takeGlobalQueue() {
//Push all the globalDefQueue items into the context's defQueue
if (globalDefQueue.length) {
//Array splice in the values since the context code has a
//local var ref to defQueue, so cannot just reassign the one
//on context.
apsp.apply(defQueue,
[defQueue.length, 0].concat(globalDefQueue));
globalDefQueue = [];
}
}
handlers = {
'require': function (mod) {
if (mod.require) {
return mod.require;
} else {
return (mod.require = context.makeRequire(mod.map));
}
},
'exports': function (mod) {
mod.usingExports = true;
if (mod.map.isDefine) {
if (mod.exports) {
return (defined[mod.map.id] = mod.exports);
} else {
return (mod.exports = defined[mod.map.id] = {});
}
}
},
'module': function (mod) {
if (mod.module) {
return mod.module;
} else {
return (mod.module = {
id: mod.map.id,
uri: mod.map.url,
config: function () {
return getOwn(config.config, mod.map.id) || {};
},
exports: mod.exports || (mod.exports = {})
});
}
}
};
function cleanRegistry(id) {
//Clean up machinery used for waiting modules.
delete registry[id];
delete enabledRegistry[id];
}
function breakCycle(mod, traced, processed) {
var id = mod.map.id;
if (mod.error) {
mod.emit('error', mod.error);
} else {
traced[id] = true;
each(mod.depMaps, function (depMap, i) {
var depId = depMap.id,
dep = getOwn(registry, depId);
//Only force things that have not completed
//being defined, so still in the registry,
//and only if it has not been matched up
//in the module already.
if (dep && !mod.depMatched[i] && !processed[depId]) {
if (getOwn(traced, depId)) {
mod.defineDep(i, defined[depId]);
mod.check(); //pass false?
} else {
breakCycle(dep, traced, processed);
}
}
});
processed[id] = true;
}
}
function checkLoaded() {
var err, usingPathFallback,
waitInterval = config.waitSeconds * 1000,
//It is possible to disable the wait interval by using waitSeconds of 0.
expired = waitInterval && (context.startTime + waitInterval) < new Date().getTime(),
noLoads = [],
reqCalls = [],
stillLoading = false,
needCycleCheck = true;
//Do not bother if this call was a result of a cycle break.
if (inCheckLoaded) {
return;
}
inCheckLoaded = true;
//Figure out the state of all the modules.
eachProp(enabledRegistry, function (mod) {
var map = mod.map,
modId = map.id;
//Skip things that are not enabled or in error state.
if (!mod.enabled) {
return;
}
if (!map.isDefine) {
reqCalls.push(mod);
}
if (!mod.error) {
//If the module should be executed, and it has not
//been inited and time is up, remember it.
if (!mod.inited && expired) {
if (hasPathFallback(modId)) {
usingPathFallback = true;
stillLoading = true;
} else {
noLoads.push(modId);
removeScript(modId);
}
} else if (!mod.inited && mod.fetched && map.isDefine) {
stillLoading = true;
if (!map.prefix) {
//No reason to keep looking for unfinished
//loading. If the only stillLoading is a
//plugin resource though, keep going,
//because it may be that a plugin resource
//is waiting on a non-plugin cycle.
return (needCycleCheck = false);
}
}
}
});
if (expired && noLoads.length) {
//If wait time expired, throw error of unloaded modules.
err = makeError('timeout', 'Load timeout for modules: ' + noLoads, null, noLoads);
err.contextName = context.contextName;
return onError(err);
}
//Not expired, check for a cycle.
if (needCycleCheck) {
each(reqCalls, function (mod) {
breakCycle(mod, {}, {});
});
}
//If still waiting on loads, and the waiting load is something
//other than a plugin resource, or there are still outstanding
//scripts, then just try back later.
if ((!expired || usingPathFallback) && stillLoading) {
//Something is still waiting to load. Wait for it, but only
//if a timeout is not already in effect.
if ((isBrowser || isWebWorker) && !checkLoadedTimeoutId) {
checkLoadedTimeoutId = setTimeout(function () {
checkLoadedTimeoutId = 0;
checkLoaded();
}, 50);
}
}
inCheckLoaded = false;
}
Module = function (map) {
this.events = getOwn(undefEvents, map.id) || {};
this.map = map;
this.shim = getOwn(config.shim, map.id);
this.depExports = [];
this.depMaps = [];
this.depMatched = [];
this.pluginMaps = {};
this.depCount = 0;
/* this.exports this.factory
this.depMaps = [],
this.enabled, this.fetched
*/
};
Module.prototype = {
init: function (depMaps, factory, errback, options) {
options = options || {};
//Do not do more inits if already done. Can happen if there
//are multiple define calls for the same module. That is not
//a normal, common case, but it is also not unexpected.
if (this.inited) {
return;
}
this.factory = factory;
if (errback) {
//Register for errors on this module.
this.on('error', errback);
} else if (this.events.error) {
//If no errback already, but there are error listeners
//on this module, set up an errback to pass to the deps.
errback = bind(this, function (err) {
this.emit('error', err);
});
}
//Do a copy of the dependency array, so that
//source inputs are not modified. For example
//"shim" deps are passed in here directly, and
//doing a direct modification of the depMaps array
//would affect that config.
this.depMaps = depMaps && depMaps.slice(0);
this.errback = errback;
//Indicate this module has be initialized
this.inited = true;
this.ignore = options.ignore;
//Could have option to init this module in enabled mode,
//or could have been previously marked as enabled. However,
//the dependencies are not known until init is called. So
//if enabled previously, now trigger dependencies as enabled.
if (options.enabled || this.enabled) {
//Enable this module and dependencies.
//Will call this.check()
this.enable();
} else {
this.check();
}
},
defineDep: function (i, depExports) {
//Because of cycles, defined callback for a given
//export can be called more than once.
if (!this.depMatched[i]) {
this.depMatched[i] = true;
this.depCount -= 1;
this.depExports[i] = depExports;
}
},
fetch: function () {
if (this.fetched) {
return;
}
this.fetched = true;
context.startTime = (new Date()).getTime();
var map = this.map;
//If the manager is for a plugin managed resource,
//ask the plugin to load it now.
if (this.shim) {
context.makeRequire(this.map, {
enableBuildCallback: true
})(this.shim.deps || [], bind(this, function () {
return map.prefix ? this.callPlugin() : this.load();
}));
} else {
//Regular dependency.
return map.prefix ? this.callPlugin() : this.load();
}
},
load: function () {
var url = this.map.url;
//Regular dependency.
if (!urlFetched[url]) {
urlFetched[url] = true;
context.load(this.map.id, url);
}
},
/**
* Checks if the module is ready to define itself, and if so,
* define it.
*/
check: function () {
if (!this.enabled || this.enabling) {
return;
}
var err, cjsModule,
id = this.map.id,
depExports = this.depExports,
exports = this.exports,
factory = this.factory;
if (!this.inited) {
this.fetch();
} else if (this.error) {
this.emit('error', this.error);
} else if (!this.defining) {
//The factory could trigger another require call
//that would result in checking this module to
//define itself again. If already in the process
//of doing that, skip this work.
this.defining = true;
if (this.depCount < 1 && !this.defined) {
if (isFunction(factory)) {
//If there is an error listener, favor passing
//to that instead of throwing an error. However,
//only do it for define()'d modules. require
//errbacks should not be called for failures in
//their callbacks (#699). However if a global
//onError is set, use that.
if ((this.events.error && this.map.isDefine) ||
req.onError !== defaultOnError) {
try {
exports = context.execCb(id, factory, depExports, exports);
} catch (e) {
err = e;
}
} else {
exports = context.execCb(id, factory, depExports, exports);
}
// Favor return value over exports. If node/cjs in play,
// then will not have a return value anyway. Favor
// module.exports assignment over exports object.
if (this.map.isDefine && exports === undefined) {
cjsModule = this.module;
if (cjsModule) {
exports = cjsModule.exports;
} else if (this.usingExports) {
//exports already set the defined value.
exports = this.exports;
}
}
if (err) {
err.requireMap = this.map;
err.requireModules = this.map.isDefine ? [this.map.id] : null;
err.requireType = this.map.isDefine ? 'define' : 'require';
return onError((this.error = err));
}
} else {
//Just a literal value
exports = factory;
}
this.exports = exports;
if (this.map.isDefine && !this.ignore) {
defined[id] = exports;
if (req.onResourceLoad) {
req.onResourceLoad(context, this.map, this.depMaps);
}
}
//Clean up
cleanRegistry(id);
this.defined = true;
}
//Finished the define stage. Allow calling check again
//to allow define notifications below in the case of a
//cycle.
this.defining = false;
if (this.defined && !this.defineEmitted) {
this.defineEmitted = true;
this.emit('defined', this.exports);
this.defineEmitComplete = true;
}
}
},
callPlugin: function () {
var map = this.map,
id = map.id,
//Map already normalized the prefix.
pluginMap = makeModuleMap(map.prefix);
//Mark this as a dependency for this plugin, so it
//can be traced for cycles.
this.depMaps.push(pluginMap);
on(pluginMap, 'defined', bind(this, function (plugin) {
var load, normalizedMap, normalizedMod,
bundleId = getOwn(bundlesMap, this.map.id),
name = this.map.name,
parentName = this.map.parentMap ? this.map.parentMap.name : null,
localRequire = context.makeRequire(map.parentMap, {
enableBuildCallback: true
});
//If current map is not normalized, wait for that
//normalized name to load instead of continuing.
if (this.map.unnormalized) {
//Normalize the ID if the plugin allows it.
if (plugin.normalize) {
name = plugin.normalize(name, function (name) {
return normalize(name, parentName, true);
}) || '';
}
//prefix and name should already be normalized, no need
//for applying map config again either.
normalizedMap = makeModuleMap(map.prefix + '!' + name,
this.map.parentMap);
on(normalizedMap,
'defined', bind(this, function (value) {
this.init([], function () { return value; }, null, {
enabled: true,
ignore: true
});
}));
normalizedMod = getOwn(registry, normalizedMap.id);
if (normalizedMod) {
//Mark this as a dependency for this plugin, so it
//can be traced for cycles.
this.depMaps.push(normalizedMap);
if (this.events.error) {
normalizedMod.on('error', bind(this, function (err) {
this.emit('error', err);
}));
}
normalizedMod.enable();
}
return;
}
//If a paths config, then just load that file instead to
//resolve the plugin, as it is built into that paths layer.
if (bundleId) {
this.map.url = context.nameToUrl(bundleId);
this.load();
return;
}
load = bind(this, function (value) {
this.init([], function () { return value; }, null, {
enabled: true
});
});
load.error = bind(this, function (err) {
this.inited = true;
this.error = err;
err.requireModules = [id];
//Remove temp unnormalized modules for this module,
//since they will never be resolved otherwise now.
eachProp(registry, function (mod) {
if (mod.map.id.indexOf(id + '_unnormalized') === 0) {
cleanRegistry(mod.map.id);
}
});
onError(err);
});
//Allow plugins to load other code without having to know the
//context or how to 'complete' the load.
load.fromText = bind(this, function (text, textAlt) {
/*jslint evil: true */
var moduleName = map.name,
moduleMap = makeModuleMap(moduleName),
hasInteractive = useInteractive;
//As of 2.1.0, support just passing the text, to reinforce
//fromText only being called once per resource. Still
//support old style of passing moduleName but discard
//that moduleName in favor of the internal ref.
if (textAlt) {
text = textAlt;
}
//Turn off interactive script matching for IE for any define
//calls in the text, then turn it back on at the end.
if (hasInteractive) {
useInteractive = false;
}
//Prime the system by creating a module instance for
//it.
getModule(moduleMap);
//Transfer any config to this other module.
if (hasProp(config.config, id)) {
config.config[moduleName] = config.config[id];
}
try {
req.exec(text);
} catch (e) {
return onError(makeError('fromtexteval',
'fromText eval for ' + id +
' failed: ' + e,
e,
[id]));
}
if (hasInteractive) {
useInteractive = true;
}
//Mark this as a dependency for the plugin
//resource
this.depMaps.push(moduleMap);
//Support anonymous modules.
context.completeLoad(moduleName);
//Bind the value of that module to the value for this
//resource ID.
localRequire([moduleName], load);
});
//Use parentName here since the plugin's name is not reliable,
//could be some weird string with no path that actually wants to
//reference the parentName's path.
plugin.load(map.name, localRequire, load, config);
}));
context.enable(pluginMap, this);
this.pluginMaps[pluginMap.id] = pluginMap;
},
enable: function () {
enabledRegistry[this.map.id] = this;
this.enabled = true;
//Set flag mentioning that the module is enabling,
//so that immediate calls to the defined callbacks
//for dependencies do not trigger inadvertent load
//with the depCount still being zero.
this.enabling = true;
//Enable each dependency
each(this.depMaps, bind(this, function (depMap, i) {
var id, mod, handler;
if (typeof depMap === 'string') {
//Dependency needs to be converted to a depMap
//and wired up to this module.
depMap = makeModuleMap(depMap,
(this.map.isDefine ? this.map : this.map.parentMap),
false,
!this.skipMap);
this.depMaps[i] = depMap;
handler = getOwn(handlers, depMap.id);
if (handler) {
this.depExports[i] = handler(this);
return;
}
this.depCount += 1;
on(depMap, 'defined', bind(this, function (depExports) {
this.defineDep(i, depExports);
this.check();
}));
if (this.errback) {
on(depMap, 'error', bind(this, this.errback));
}
}
id = depMap.id;
mod = registry[id];
//Skip special modules like 'require', 'exports', 'module'
//Also, don't call enable if it is already enabled,
//important in circular dependency cases.
if (!hasProp(handlers, id) && mod && !mod.enabled) {
context.enable(depMap, this);
}
}));
//Enable each plugin that is used in
//a dependency
eachProp(this.pluginMaps, bind(this, function (pluginMap) {
var mod = getOwn(registry, pluginMap.id);
if (mod && !mod.enabled) {
context.enable(pluginMap, this);
}
}));
this.enabling = false;
this.check();
},
on: function (name, cb) {
var cbs = this.events[name];
if (!cbs) {
cbs = this.events[name] = [];
}
cbs.push(cb);
},
emit: function (name, evt) {
each(this.events[name], function (cb) {
cb(evt);
});
if (name === 'error') {
//Now that the error handler was triggered, remove
//the listeners, since this broken Module instance
//can stay around for a while in the registry.
delete this.events[name];
}
}
};
function callGetModule(args) {
//Skip modules already defined.
if (!hasProp(defined, args[0])) {
getModule(makeModuleMap(args[0], null, true)).init(args[1], args[2]);
}
}
function removeListener(node, func, name, ieName) {
//Favor detachEvent because of IE9
//issue, see attachEvent/addEventListener comment elsewhere
//in this file.
if (node.detachEvent && !isOpera) {
//Probably IE. If not it will throw an error, which will be
//useful to know.
if (ieName) {
node.detachEvent(ieName, func);
}
} else {
node.removeEventListener(name, func, false);
}
}
/**
* Given an event from a script node, get the requirejs info from it,
* and then removes the event listeners on the node.
* @param {Event} evt
* @returns {Object}
*/
function getScriptData(evt) {
//Using currentTarget instead of target for Firefox 2.0's sake. Not
//all old browsers will be supported, but this one was easy enough
//to support and still makes sense.
var node = evt.currentTarget || evt.srcElement;
//Remove the listeners once here.
removeListener(node, context.onScriptLoad, 'load', 'onreadystatechange');
removeListener(node, context.onScriptError, 'error');
return {
node: node,
id: node && node.getAttribute('data-requiremodule')
};
}
function intakeDefines() {
var args;
//Any defined modules in the global queue, intake them now.
takeGlobalQueue();
//Make sure any remaining defQueue items get properly processed.
while (defQueue.length) {
args = defQueue.shift();
if (args[0] === null) {
return onError(makeError('mismatch', 'Mismatched anonymous define() module: ' + args[args.length - 1]));
} else {
//args are id, deps, factory. Should be normalized by the
//define() function.
callGetModule(args);
}
}
}
context = {
config: config,
contextName: contextName,
registry: registry,
defined: defined,
urlFetched: urlFetched,
defQueue: defQueue,
Module: Module,
makeModuleMap: makeModuleMap,
nextTick: req.nextTick,
onError: onError,
/**
* Set a configuration for the context.
* @param {Object} cfg config object to integrate.
*/
configure: function (cfg) {
//Make sure the baseUrl ends in a slash.
if (cfg.baseUrl) {
if (cfg.baseUrl.charAt(cfg.baseUrl.length - 1) !== '/') {
cfg.baseUrl += '/';
}
}
//Save off the paths since they require special processing,
//they are additive.
var shim = config.shim,
objs = {
paths: true,
bundles: true,
config: true,
map: true
};
eachProp(cfg, function (value, prop) {
if (objs[prop]) {
if (!config[prop]) {
config[prop] = {};
}
mixin(config[prop], value, true, true);
} else {
config[prop] = value;
}
});
//Reverse map the bundles
if (cfg.bundles) {
eachProp(cfg.bundles, function (value, prop) {
each(value, function (v) {
if (v !== prop) {
bundlesMap[v] = prop;
}
});
});
}
//Merge shim
if (cfg.shim) {
eachProp(cfg.shim, function (value, id) {
//Normalize the structure
if (isArray(value)) {
value = {
deps: value
};
}
if ((value.exports || value.init) && !value.exportsFn) {
value.exportsFn = context.makeShimExports(value);
}
shim[id] = value;
});
config.shim = shim;
}
//Adjust packages if necessary.
if (cfg.packages) {
each(cfg.packages, function (pkgObj) {
var location, name;
pkgObj = typeof pkgObj === 'string' ? { name: pkgObj } : pkgObj;
name = pkgObj.name;
location = pkgObj.location;
if (location) {
config.paths[name] = pkgObj.location;
}
//Save pointer to main module ID for pkg name.
//Remove leading dot in main, so main paths are normalized,
//and remove any trailing .js, since different package
//envs have different conventions: some use a module name,
//some use a file name.
config.pkgs[name] = pkgObj.name + '/' + (pkgObj.main || 'main')
.replace(currDirRegExp, '')
.replace(jsSuffixRegExp, '');
});
}
//If there are any "waiting to execute" modules in the registry,
//update the maps for them, since their info, like URLs to load,
//may have changed.
eachProp(registry, function (mod, id) {
//If module already has init called, since it is too
//late to modify them, and ignore unnormalized ones
//since they are transient.
if (!mod.inited && !mod.map.unnormalized) {
mod.map = makeModuleMap(id);
}
});
//If a deps array or a config callback is specified, then call
//require with those args. This is useful when require is defined as a
//config object before require.js is loaded.
if (cfg.deps || cfg.callback) {
context.require(cfg.deps || [], cfg.callback);
}
},
makeShimExports: function (value) {
function fn() {
var ret;
if (value.init) {
ret = value.init.apply(global, arguments);
}
return ret || (value.exports && getGlobal(value.exports));
}
return fn;
},
makeRequire: function (relMap, options) {
options = options || {};
function localRequire(deps, callback, errback) {
var id, map, requireMod;
if (options.enableBuildCallback && callback && isFunction(callback)) {
callback.__requireJsBuild = true;
}
if (typeof deps === 'string') {
if (isFunction(callback)) {
//Invalid call
return onError(makeError('requireargs', 'Invalid require call'), errback);
}
//If require|exports|module are requested, get the
//value for them from the special handlers. Caveat:
//this only works while module is being defined.
if (relMap && hasProp(handlers, deps)) {
return handlers[deps](registry[relMap.id]);
}
//Synchronous access to one module. If require.get is
//available (as in the Node adapter), prefer that.
if (req.get) {
return req.get(context, deps, relMap, localRequire);
}
//Normalize module name, if it contains . or ..
map = makeModuleMap(deps, relMap, false, true);
id = map.id;
if (!hasProp(defined, id)) {
return onError(makeError('notloaded', 'Module name "' +
id +
'" has not been loaded yet for context: ' +
contextName +
(relMap ? '' : '. Use require([])')));
}
return defined[id];
}
//Grab defines waiting in the global queue.
intakeDefines();
//Mark all the dependencies as needing to be loaded.
context.nextTick(function () {
//Some defines could have been added since the
//require call, collect them.
intakeDefines();
requireMod = getModule(makeModuleMap(null, relMap));
//Store if map config should be applied to this require
//call for dependencies.
requireMod.skipMap = options.skipMap;
requireMod.init(deps, callback, errback, {
enabled: true
});
checkLoaded();
});
return localRequire;
}
mixin(localRequire, {
isBrowser: isBrowser,
/**
* Converts a module name + .extension into an URL path.
* *Requires* the use of a module name. It does not support using
* plain URLs like nameToUrl.
*/
toUrl: function (moduleNamePlusExt) {
var ext,
index = moduleNamePlusExt.lastIndexOf('.'),
segment = moduleNamePlusExt.split('/')[0],
isRelative = segment === '.' || segment === '..';
//Have a file extension alias, and it is not the
//dots from a relative path.
if (index !== -1 && (!isRelative || index > 1)) {
ext = moduleNamePlusExt.substring(index, moduleNamePlusExt.length);
moduleNamePlusExt = moduleNamePlusExt.substring(0, index);
}
return context.nameToUrl(normalize(moduleNamePlusExt,
relMap && relMap.id, true), ext, true);
},
defined: function (id) {
return hasProp(defined, makeModuleMap(id, relMap, false, true).id);
},
specified: function (id) {
id = makeModuleMap(id, relMap, false, true).id;
return hasProp(defined, id) || hasProp(registry, id);
}
});
//Only allow undef on top level require calls
if (!relMap) {
localRequire.undef = function (id) {
//Bind any waiting define() calls to this context,
//fix for #408
takeGlobalQueue();
var map = makeModuleMap(id, relMap, true),
mod = getOwn(registry, id);
removeScript(id);
delete defined[id];
delete urlFetched[map.url];
delete undefEvents[id];
//Clean queued defines too. Go backwards
//in array so that the splices do not
//mess up the iteration.
eachReverse(defQueue, function (args, i) {
if (args[0] === id) {
defQueue.splice(i, 1);
}
});
if (mod) {
//Hold on to listeners in case the
//module will be attempted to be reloaded
//using a different config.
if (mod.events.defined) {
undefEvents[id] = mod.events;
}
cleanRegistry(id);
}
};
}
return localRequire;
},
/**
* Called to enable a module if it is still in the registry
* awaiting enablement. A second arg, parent, the parent module,
* is passed in for context, when this method is overridden by
* the optimizer. Not shown here to keep code compact.
*/
enable: function (depMap) {
var mod = getOwn(registry, depMap.id);
if (mod) {
getModule(depMap).enable();
}
},
/**
* Internal method used by environment adapters to complete a load event.
* A load event could be a script load or just a load pass from a synchronous
* load call.
* @param {String} moduleName the name of the module to potentially complete.
*/
completeLoad: function (moduleName) {
var found, args, mod,
shim = getOwn(config.shim, moduleName) || {},
shExports = shim.exports;
takeGlobalQueue();
while (defQueue.length) {
args = defQueue.shift();
if (args[0] === null) {
args[0] = moduleName;
//If already found an anonymous module and bound it
//to this name, then this is some other anon module
//waiting for its completeLoad to fire.
if (found) {
break;
}
found = true;
} else if (args[0] === moduleName) {
//Found matching define call for this script!
found = true;
}
callGetModule(args);
}
//Do this after the cycle of callGetModule in case the result
//of those calls/init calls changes the registry.
mod = getOwn(registry, moduleName);
if (!found && !hasProp(defined, moduleName) && mod && !mod.inited) {
if (config.enforceDefine && (!shExports || !getGlobal(shExports))) {
if (hasPathFallback(moduleName)) {
return;
} else {
return onError(makeError('nodefine',
'No define call for ' + moduleName,
null,
[moduleName]));
}
} else {
//A script that does not call define(), so just simulate
//the call for it.
callGetModule([moduleName, (shim.deps || []), shim.exportsFn]);
}
}
checkLoaded();
},
/**
* Converts a module name to a file path. Supports cases where
* moduleName may actually be just an URL.
* Note that it **does not** call normalize on the moduleName,
* it is assumed to have already been normalized. This is an
* internal API, not a public one. Use toUrl for the public API.
*/
nameToUrl: function (moduleName, ext, skipExt) {
var paths, syms, i, parentModule, url,
parentPath, bundleId,
pkgMain = getOwn(config.pkgs, moduleName);
if (pkgMain) {
moduleName = pkgMain;
}
bundleId = getOwn(bundlesMap, moduleName);
if (bundleId) {
return context.nameToUrl(bundleId, ext, skipExt);
}
//If a colon is in the URL, it indicates a protocol is used and it is just
//an URL to a file, or if it starts with a slash, contains a query arg (i.e. ?)
//or ends with .js, then assume the user meant to use an url and not a module id.
//The slash is important for protocol-less URLs as well as full paths.
if (req.jsExtRegExp.test(moduleName)) {
//Just a plain path, not module name lookup, so just return it.
//Add extension if it is included. This is a bit wonky, only non-.js things pass
//an extension, this method probably needs to be reworked.
url = moduleName + (ext || '');
} else {
//A module that needs to be converted to a path.
paths = config.paths;
syms = moduleName.split('/');
//For each module name segment, see if there is a path
//registered for it. Start with most specific name
//and work up from it.
for (i = syms.length; i > 0; i -= 1) {
parentModule = syms.slice(0, i).join('/');
parentPath = getOwn(paths, parentModule);
if (parentPath) {
//If an array, it means there are a few choices,
//Choose the one that is desired
if (isArray(parentPath)) {
parentPath = parentPath[0];
}
syms.splice(0, i, parentPath);
break;
}
}
//Join the path parts together, then figure out if baseUrl is needed.
url = syms.join('/');
url += (ext || (/^data\:|\?/.test(url) || skipExt ? '' : '.js'));
url = (url.charAt(0) === '/' || url.match(/^[\w\+\.\-]+:/) ? '' : config.baseUrl) + url;
}
return config.urlArgs ? url +
((url.indexOf('?') === -1 ? '?' : '&') +
config.urlArgs) : url;
},
//Delegates to req.load. Broken out as a separate function to
//allow overriding in the optimizer.
load: function (id, url) {
req.load(context, id, url);
},
/**
* Executes a module callback function. Broken out as a separate function
* solely to allow the build system to sequence the files in the built
* layer in the right sequence.
*
* @private
*/
execCb: function (name, callback, args, exports) {
return callback.apply(exports, args);
},
/**
* callback for script loads, used to check status of loading.
*
* @param {Event} evt the event from the browser for the script
* that was loaded.
*/
onScriptLoad: function (evt) {
//Using currentTarget instead of target for Firefox 2.0's sake. Not
//all old browsers will be supported, but this one was easy enough
//to support and still makes sense.
if (evt.type === 'load' ||
(readyRegExp.test((evt.currentTarget || evt.srcElement).readyState))) {
//Reset interactive script so a script node is not held onto for
//to long.
interactiveScript = null;
//Pull out the name of the module and the context.
var data = getScriptData(evt);
context.completeLoad(data.id);
}
},
/**
* Callback for script errors.
*/
onScriptError: function (evt) {
var data = getScriptData(evt);
if (!hasPathFallback(data.id)) {
return onError(makeError('scripterror', 'Script error for: ' + data.id, evt, [data.id]));
}
}
};
context.require = context.makeRequire();
return context;
}
/**
* Main entry point.
*
* If the only argument to require is a string, then the module that
* is represented by that string is fetched for the appropriate context.
*
* If the first argument is an array, then it will be treated as an array
* of dependency string names to fetch. An optional function callback can
* be specified to execute when all of those dependencies are available.
*
* Make a local req variable to help Caja compliance (it assumes things
* on a require that are not standardized), and to give a short
* name for minification/local scope use.
*/
req = requirejs = function (deps, callback, errback, optional) {
//Find the right context, use default
var context, config,
contextName = defContextName;
// Determine if have config object in the call.
if (!isArray(deps) && typeof deps !== 'string') {
// deps is a config object
config = deps;
if (isArray(callback)) {
// Adjust args if there are dependencies
deps = callback;
callback = errback;
errback = optional;
} else {
deps = [];
}
}
if (config && config.context) {
contextName = config.context;
}
context = getOwn(contexts, contextName);
if (!context) {
context = contexts[contextName] = req.s.newContext(contextName);
}
if (config) {
context.configure(config);
}
return context.require(deps, callback, errback);
};
/**
* Support require.config() to make it easier to cooperate with other
* AMD loaders on globally agreed names.
*/
req.config = function (config) {
return req(config);
};
/**
* Execute something after the current tick
* of the event loop. Override for other envs
* that have a better solution than setTimeout.
* @param {Function} fn function to execute later.
*/
req.nextTick = typeof setTimeout !== 'undefined' ? function (fn) {
setTimeout(fn, 4);
} : function (fn) { fn(); };
/**
* Export require as a global, but only if it does not already exist.
*/
if (!require) {
require = req;
}
req.version = version;
//Used to filter out dependencies that are already paths.
req.jsExtRegExp = /^\/|:|\?|\.js$/;
req.isBrowser = isBrowser;
s = req.s = {
contexts: contexts,
newContext: newContext
};
//Create default context.
req({});
//Exports some context-sensitive methods on global require.
each([
'toUrl',
'undef',
'defined',
'specified'
], function (prop) {
//Reference from contexts instead of early binding to default context,
//so that during builds, the latest instance of the default context
//with its config gets used.
req[prop] = function () {
var ctx = contexts[defContextName];
return ctx.require[prop].apply(ctx, arguments);
};
});
if (isBrowser) {
head = s.head = document.getElementsByTagName('head')[0];
//If BASE tag is in play, using appendChild is a problem for IE6.
//When that browser dies, this can be removed. Details in this jQuery bug:
//http://dev.jquery.com/ticket/2709
baseElement = document.getElementsByTagName('base')[0];
if (baseElement) {
head = s.head = baseElement.parentNode;
}
}
/**
* Any errors that require explicitly generates will be passed to this
* function. Intercept/override it if you want custom error handling.
* @param {Error} err the error object.
*/
req.onError = defaultOnError;
/**
* Creates the node for the load command. Only used in browser envs.
*/
req.createNode = function (config, moduleName, url) {
var node = config.xhtml ?
document.createElementNS('http://www.w3.org/1999/xhtml', 'html:script') :
document.createElement('script');
node.type = config.scriptType || 'text/javascript';
node.charset = 'utf-8';
node.async = true;
return node;
};
/**
* Does the request to load a module for the browser case.
* Make this a separate function to allow other environments
* to override it.
*
* @param {Object} context the require context to find state.
* @param {String} moduleName the name of the module.
* @param {Object} url the URL to the module.
*/
req.load = function (context, moduleName, url) {
var config = (context && context.config) || {},
node;
if (isBrowser) {
//In the browser so use a script tag
node = req.createNode(config, moduleName, url);
node.setAttribute('data-requirecontext', context.contextName);
node.setAttribute('data-requiremodule', moduleName);
//Set up load listener. Test attachEvent first because IE9 has
//a subtle issue in its addEventListener and script onload firings
//that do not match the behavior of all other browsers with
//addEventListener support, which fire the onload event for a
//script right after the script execution. See:
//https://connect.microsoft.com/IE/feedback/details/648057/script-onload-event-is-not-fired-immediately-after-script-execution
//UNFORTUNATELY Opera implements attachEvent but does not follow the script
//script execution mode.
if (node.attachEvent &&
//Check if node.attachEvent is artificially added by custom script or
//natively supported by browser
//read https://github.com/jrburke/requirejs/issues/187
//if we can NOT find [native code] then it must NOT natively supported.
//in IE8, node.attachEvent does not have toString()
//Note the test for "[native code" with no closing brace, see:
//https://github.com/jrburke/requirejs/issues/273
!(node.attachEvent.toString && node.attachEvent.toString().indexOf('[native code') < 0) &&
!isOpera) {
//Probably IE. IE (at least 6-8) do not fire
//script onload right after executing the script, so
//we cannot tie the anonymous define call to a name.
//However, IE reports the script as being in 'interactive'
//readyState at the time of the define call.
useInteractive = true;
node.attachEvent('onreadystatechange', context.onScriptLoad);
//It would be great to add an error handler here to catch
//404s in IE9+. However, onreadystatechange will fire before
//the error handler, so that does not help. If addEventListener
//is used, then IE will fire error before load, but we cannot
//use that pathway given the connect.microsoft.com issue
//mentioned above about not doing the 'script execute,
//then fire the script load event listener before execute
//next script' that other browsers do.
//Best hope: IE10 fixes the issues,
//and then destroys all installs of IE 6-9.
//node.attachEvent('onerror', context.onScriptError);
} else {
node.addEventListener('load', context.onScriptLoad, false);
node.addEventListener('error', context.onScriptError, false);
}
node.src = url;
//For some cache cases in IE 6-8, the script executes before the end
//of the appendChild execution, so to tie an anonymous define
//call to the module name (which is stored on the node), hold on
//to a reference to this node, but clear after the DOM insertion.
currentlyAddingScript = node;
if (baseElement) {
head.insertBefore(node, baseElement);
} else {
head.appendChild(node);
}
currentlyAddingScript = null;
return node;
} else if (isWebWorker) {
try {
//In a web worker, use importScripts. This is not a very
//efficient use of importScripts, importScripts will block until
//its script is downloaded and evaluated. However, if web workers
//are in play, the expectation that a build has been done so that
//only one script needs to be loaded anyway. This may need to be
//reevaluated if other use cases become common.
importScripts(url);
//Account for anonymous modules
context.completeLoad(moduleName);
} catch (e) {
context.onError(makeError('importscripts',
'importScripts failed for ' +
moduleName + ' at ' + url,
e,
[moduleName]));
}
}
};
function getInteractiveScript() {
if (interactiveScript && interactiveScript.readyState === 'interactive') {
return interactiveScript;
}
eachReverse(scripts(), function (script) {
if (script.readyState === 'interactive') {
return (interactiveScript = script);
}
});
return interactiveScript;
}
//Look for a data-main script attribute, which could also adjust the baseUrl.
if (isBrowser && !cfg.skipDataMain) {
//Figure out baseUrl. Get it from the script tag with require.js in it.
eachReverse(scripts(), function (script) {
//Set the 'head' where we can append children by
//using the script's parent.
if (!head) {
head = script.parentNode;
}
//Look for a data-main attribute to set main script for the page
//to load. If it is there, the path to data main becomes the
//baseUrl, if it is not already set.
dataMain = script.getAttribute('data-main');
if (dataMain) {
//Preserve dataMain in case it is a path (i.e. contains '?')
mainScript = dataMain;
//Set final baseUrl if there is not already an explicit one.
if (!cfg.baseUrl) {
//Pull off the directory of data-main for use as the
//baseUrl.
src = mainScript.split('/');
mainScript = src.pop();
subPath = src.length ? src.join('/') + '/' : './';
cfg.baseUrl = subPath;
}
//Strip off any trailing .js since mainScript is now
//like a module name.
mainScript = mainScript.replace(jsSuffixRegExp, '');
//If mainScript is still a path, fall back to dataMain
if (req.jsExtRegExp.test(mainScript)) {
mainScript = dataMain;
}
//Put the data-main script in the files to load.
cfg.deps = cfg.deps ? cfg.deps.concat(mainScript) : [mainScript];
return true;
}
});
}
/**
* The function that handles definitions of modules. Differs from
* require() in that a string for the module should be the first argument,
* and the function to execute after dependencies are loaded should
* return a value to define the module corresponding to the first argument's
* name.
*/
define = function (name, deps, callback) {
var node, context;
//Allow for anonymous modules
if (typeof name !== 'string') {
//Adjust args appropriately
callback = deps;
deps = name;
name = null;
}
//This module may not have dependencies
if (!isArray(deps)) {
callback = deps;
deps = null;
}
//If no name, and callback is a function, then figure out if it a
//CommonJS thing with dependencies.
if (!deps && isFunction(callback)) {
deps = [];
//Remove comments from the callback string,
//look for require calls, and pull them into the dependencies,
//but only if there are function args.
if (callback.length) {
callback
.toString()
.replace(commentRegExp, '')
.replace(cjsRequireRegExp, function (match, dep) {
deps.push(dep);
});
//May be a CommonJS thing even without require calls, but still
//could use exports, and module. Avoid doing exports and module
//work though if it just needs require.
//REQUIRES the function to expect the CommonJS variables in the
//order listed below.
deps = (callback.length === 1 ? ['require'] : ['require', 'exports', 'module']).concat(deps);
}
}
//If in IE 6-8 and hit an anonymous define() call, do the interactive
//work.
if (useInteractive) {
node = currentlyAddingScript || getInteractiveScript();
if (node) {
if (!name) {
name = node.getAttribute('data-requiremodule');
}
context = contexts[node.getAttribute('data-requirecontext')];
}
}
//Always save off evaluating the def call until the script onload handler.
//This allows multiple modules to be in a file without prematurely
//tracing dependencies, and allows for anonymous module support,
//where the module name is not known until the script onload event
//occurs. If no context, use the global queue, and get it processed
//in the onscript load callback.
(context ? context.defQueue : globalDefQueue).push([name, deps, callback]);
};
define.amd = {
jQuery: true
};
/**
* Executes the text. Normally just uses eval, but can be modified
* to use a better, environment-specific call. Only used for transpiling
* loader plugins, not for plain JS modules.
* @param {String} text the text to execute/evaluate.
*/
req.exec = function (text) {
/*jslint evil: true */
return eval(text);
};
//Set up with config info.
req(cfg);
}(this)); | suttonj/YoutubePlaylistCuratorChromeExtension | src/js/thirdParty/require.js | JavaScript | apache-2.0 | 83,052 |
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.activiti.dmn.engine.impl.parser;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import org.activiti.dmn.engine.ActivitiDmnException;
import org.activiti.dmn.engine.DmnEngineConfiguration;
import org.activiti.dmn.engine.impl.context.Context;
import org.activiti.dmn.engine.impl.io.InputStreamSource;
import org.activiti.dmn.engine.impl.io.ResourceStreamSource;
import org.activiti.dmn.engine.impl.io.StreamSource;
import org.activiti.dmn.engine.impl.io.StringStreamSource;
import org.activiti.dmn.engine.impl.io.UrlStreamSource;
import org.activiti.dmn.engine.impl.persistence.entity.DecisionTableEntity;
import org.activiti.dmn.engine.impl.persistence.entity.DmnDeploymentEntity;
import org.activiti.dmn.model.Decision;
import org.activiti.dmn.model.DmnDefinition;
import org.activiti.dmn.xml.constants.DmnXMLConstants;
import org.activiti.dmn.xml.converter.DmnXMLConverter;
import org.activiti.dmn.xml.exception.DmnXMLException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Specific parsing of one BPMN 2.0 XML file, created by the {@link DmnParse}.
*
* @author Tijs Rademakers
* @author Joram Barrez
*/
public class DmnParse implements DmnXMLConstants {
protected static final Logger LOGGER = LoggerFactory.getLogger(DmnParse.class);
protected String name;
protected boolean validateSchema = true;
protected StreamSource streamSource;
protected String sourceSystemId;
protected DmnDefinition dmnDefinition;
protected String targetNamespace;
/** The deployment to which the parsed decision tables will be added. */
protected DmnDeploymentEntity deployment;
/** The end result of the parsing: a list of decision tables. */
protected List<DecisionTableEntity> decisionTables = new ArrayList<DecisionTableEntity>();
public DmnParse deployment(DmnDeploymentEntity deployment) {
this.deployment = deployment;
return this;
}
public DmnParse execute(DmnEngineConfiguration dmnEngineConfig) {
try {
DmnXMLConverter converter = new DmnXMLConverter();
boolean enableSafeDmnXml = dmnEngineConfig.isEnableSafeDmnXml();
String encoding = dmnEngineConfig.getXmlEncoding();
if (encoding != null) {
dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml, encoding);
} else {
dmnDefinition = converter.convertToDmnModel(streamSource, validateSchema, enableSafeDmnXml);
}
if (dmnDefinition != null && dmnDefinition.getDecisions() != null) {
for (Decision decision : dmnDefinition.getDecisions()) {
DecisionTableEntity decisionTableEntity = Context.getDmnEngineConfiguration().getDecisionTableEntityManager().create();
decisionTableEntity.setKey(decision.getId());
decisionTableEntity.setName(decision.getName());
decisionTableEntity.setResourceName(name);
decisionTableEntity.setDeploymentId(deployment.getId());
decisionTableEntity.setParentDeploymentId(deployment.getParentDeploymentId());
decisionTableEntity.setDescription(decision.getDescription());
decisionTables.add(decisionTableEntity);
}
}
} catch (Exception e) {
if (e instanceof ActivitiDmnException) {
throw (ActivitiDmnException) e;
} else if (e instanceof DmnXMLException) {
throw (DmnXMLException) e;
} else {
throw new ActivitiDmnException("Error parsing XML", e);
}
}
return this;
}
public DmnParse name(String name) {
this.name = name;
return this;
}
public DmnParse sourceInputStream(InputStream inputStream) {
if (name == null) {
name("inputStream");
}
setStreamSource(new InputStreamSource(inputStream));
return this;
}
public DmnParse sourceUrl(URL url) {
if (name == null) {
name(url.toString());
}
setStreamSource(new UrlStreamSource(url));
return this;
}
public DmnParse sourceUrl(String url) {
try {
return sourceUrl(new URL(url));
} catch (MalformedURLException e) {
throw new ActivitiDmnException("malformed url: " + url, e);
}
}
public DmnParse sourceResource(String resource) {
if (name == null) {
name(resource);
}
setStreamSource(new ResourceStreamSource(resource));
return this;
}
public DmnParse sourceString(String string) {
if (name == null) {
name("string");
}
setStreamSource(new StringStreamSource(string));
return this;
}
protected void setStreamSource(StreamSource streamSource) {
if (this.streamSource != null) {
throw new ActivitiDmnException("invalid: multiple sources " + this.streamSource + " and " + streamSource);
}
this.streamSource = streamSource;
}
public String getSourceSystemId() {
return sourceSystemId;
}
public DmnParse setSourceSystemId(String sourceSystemId) {
this.sourceSystemId = sourceSystemId;
return this;
}
/*
* ------------------- GETTERS AND SETTERS -------------------
*/
public boolean isValidateSchema() {
return validateSchema;
}
public void setValidateSchema(boolean validateSchema) {
this.validateSchema = validateSchema;
}
public List<DecisionTableEntity> getDecisionTables() {
return decisionTables;
}
public String getTargetNamespace() {
return targetNamespace;
}
public DmnDeploymentEntity getDeployment() {
return deployment;
}
public void setDeployment(DmnDeploymentEntity deployment) {
this.deployment = deployment;
}
public DmnDefinition getDmnDefinition() {
return dmnDefinition;
}
public void setDmnDefinition(DmnDefinition dmnDefinition) {
this.dmnDefinition = dmnDefinition;
}
}
| stefan-ziel/Activiti | modules/activiti-dmn-engine/src/main/java/org/activiti/dmn/engine/impl/parser/DmnParse.java | Java | apache-2.0 | 6,382 |
import application = require("application");
// Specify custom UIApplicationDelegate.
/*
class MyDelegate extends UIResponder implements UIApplicationDelegate {
public static ObjCProtocols = [UIApplicationDelegate];
applicationDidFinishLaunchingWithOptions(application: UIApplication, launchOptions: NSDictionary): boolean {
console.log("applicationWillFinishLaunchingWithOptions: " + launchOptions)
return true;
}
applicationDidBecomeActive(application: UIApplication): void {
console.log("applicationDidBecomeActive: " + application)
}
}
application.ios.delegate = MyDelegate;
*/
if (application.ios) {
// Observe application notifications.
application.ios.addNotificationObserver(UIApplicationDidFinishLaunchingNotification, (notification: NSNotification) => {
console.log("UIApplicationDidFinishLaunchingNotification: " + notification)
});
}
application.mainModule = "app/mainPage";
// Common events for both Android and iOS.
application.on(application.launchEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android.content.Intent class.
console.log("Launched Android application with the following intent: " + args.android + ".");
} else if (args.ios !== undefined) {
// For iOS applications, args.ios is NSDictionary (launchOptions).
console.log("Launched iOS application with options: " + args.ios);
}
});
application.on(application.suspendEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android activity class.
console.log("Activity: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is UIApplication.
console.log("UIApplication: " + args.ios);
}
});
application.on(application.resumeEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android activity class.
console.log("Activity: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is UIApplication.
console.log("UIApplication: " + args.ios);
}
});
application.on(application.exitEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android activity class.
console.log("Activity: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is UIApplication.
console.log("UIApplication: " + args.ios);
}
});
application.on(application.lowMemoryEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an android activity class.
console.log("Activity: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is UIApplication.
console.log("UIApplication: " + args.ios);
}
});
application.on(application.uncaughtErrorEvent, function (args: application.ApplicationEventData) {
if (args.android) {
// For Android applications, args.android is an NativeScriptError.
console.log("NativeScriptError: " + args.android);
} else if (args.ios) {
// For iOS applications, args.ios is NativeScriptError.
console.log("NativeScriptError: " + args.ios);
}
});
// Android activity events
if (application.android) {
application.android.on(application.AndroidApplication.activityCreatedEvent, function (args: application.AndroidActivityBundleEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", Bundle: " + args.bundle);
});
application.android.on(application.AndroidApplication.activityDestroyedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.activityStartedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.activityPausedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.activityResumedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.activityStoppedEvent, function (args: application.AndroidActivityEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
});
application.android.on(application.AndroidApplication.saveActivityStateEvent, function (args: application.AndroidActivityBundleEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity + ", Bundle: " + args.bundle);
});
application.android.on(application.AndroidApplication.activityResultEvent, function (args: application.AndroidActivityResultEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity +
", requestCode: " + args.requestCode + ", resultCode: " + args.resultCode + ", Intent: " + args.intent);
});
application.android.on(application.AndroidApplication.activityBackPressedEvent, function (args: application.AndroidActivityBackPressedEventData) {
console.log("Event: " + args.eventName + ", Activity: " + args.activity);
// Set args.cancel = true to cancel back navigation and do something custom.
});
}
application.start();
| hdeshev/NativeScript | apps/tests/app/app.ts | TypeScript | apache-2.0 | 5,921 |
//// [variadicTuples1.ts]
// Variadics in tuple types
type TV0<T extends unknown[]> = [string, ...T];
type TV1<T extends unknown[]> = [string, ...T, number];
type TV2<T extends unknown[]> = [string, ...T, number, ...T];
type TV3<T extends unknown[]> = [string, ...T, ...number[], ...T];
// Normalization
type TN1 = TV1<[boolean, string]>;
type TN2 = TV1<[]>;
type TN3 = TV1<[boolean?]>;
type TN4 = TV1<string[]>;
type TN5 = TV1<[boolean] | [symbol, symbol]>;
type TN6 = TV1<any>;
type TN7 = TV1<never>;
// Variadics in array literals
function tup2<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]) {
return [1, ...t, 2, ...u, 3] as const;
}
const t2 = tup2(['hello'], [10, true]);
function concat<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]): [...T, ...U] {
return [...t, ...u];
}
declare const sa: string[];
const tc1 = concat([], []);
const tc2 = concat(['hello'], [42]);
const tc3 = concat([1, 2, 3], sa);
const tc4 = concat(sa, [1, 2, 3]); // Ideally would be [...string[], number, number, number]
function concat2<T extends readonly unknown[], U extends readonly unknown[]>(t: T, u: U) {
return [...t, ...u]; // (T[number] | U[number])[]
}
const tc5 = concat2([1, 2, 3] as const, [4, 5, 6] as const); // (1 | 2 | 3 | 4 | 5 | 6)[]
// Spread arguments
declare function foo1(a: number, b: string, c: boolean, ...d: number[]): void;
function foo2(t1: [number, string], t2: [boolean], a1: number[]) {
foo1(1, 'abc', true, 42, 43, 44);
foo1(...t1, true, 42, 43, 44);
foo1(...t1, ...t2, 42, 43, 44);
foo1(...t1, ...t2, ...a1);
foo1(...t1); // Error
foo1(...t1, 45); // Error
}
declare function foo3<T extends unknown[]>(x: number, ...args: [...T, number]): T;
function foo4<U extends unknown[]>(u: U) {
foo3(1, 2);
foo3(1, 'hello', true, 2);
foo3(1, ...u, 'hi', 2);
foo3(1);
}
// Contextual typing of array literals
declare function ft1<T extends unknown[]>(t: T): T;
declare function ft2<T extends unknown[]>(t: T): readonly [...T];
declare function ft3<T extends unknown[]>(t: [...T]): T;
declare function ft4<T extends unknown[]>(t: [...T]): readonly [...T];
ft1(['hello', 42]); // (string | number)[]
ft2(['hello', 42]); // readonly (string | number)[]
ft3(['hello', 42]); // [string, number]
ft4(['hello', 42]); // readonly [string, number]
// Indexing variadic tuple types
function f0<T extends unknown[]>(t: [string, ...T], n: number) {
const a = t[0]; // string
const b = t[1]; // [string, ...T][1]
const c = t[2]; // [string, ...T][2]
const d = t[n]; // [string, ...T][number]
}
function f1<T extends unknown[]>(t: [string, ...T, number], n: number) {
const a = t[0]; // string
const b = t[1]; // [string, ...T, number][1]
const c = t[2]; // [string, ...T, number][2]
const d = t[n]; // [string, ...T, number][number]
}
// Destructuring variadic tuple types
function f2<T extends unknown[]>(t: [string, ...T]) {
let [...ax] = t; // [string, ...T]
let [b1, ...bx] = t; // string, [...T]
let [c1, c2, ...cx] = t; // string, [string, ...T][1], T[number][]
}
function f3<T extends unknown[]>(t: [string, ...T, number]) {
let [...ax] = t; // [string, ...T, number]
let [b1, ...bx] = t; // string, [...T, number]
let [c1, c2, ...cx] = t; // string, [string, ...T, number][1], (number | T[number])[]
}
// Mapped types applied to variadic tuple types
type Arrayify<T> = { [P in keyof T]: T[P][] };
type TM1<U extends unknown[]> = Arrayify<readonly [string, number?, ...U, ...boolean[]]>; // [string[], (number | undefined)[]?, Arrayify<U>, ...boolean[][]]
type TP1<T extends unknown[]> = Partial<[string, ...T, number]>; // [string?, Partial<T>, number?]
type TP2<T extends unknown[]> = Partial<[string, ...T, ...number[]]>; // [string?, Partial<T>, ...(number | undefined)[]]
// Reverse mapping through mapped type applied to variadic tuple type
declare function fm1<T extends unknown[]>(t: Arrayify<[string, number, ...T]>): T;
let tm1 = fm1([['abc'], [42], [true], ['def']]); // [boolean, string]
// Spread of readonly array-like infers mutable array-like
declare function fx1<T extends unknown[]>(a: string, ...args: T): T;
function gx1<U extends unknown[], V extends readonly unknown[]>(u: U, v: V) {
fx1('abc'); // []
fx1('abc', ...u); // U
fx1('abc', ...v); // [...V]
fx1<U>('abc', ...u); // U
fx1<V>('abc', ...v); // Error
}
declare function fx2<T extends readonly unknown[]>(a: string, ...args: T): T;
function gx2<U extends unknown[], V extends readonly unknown[]>(u: U, v: V) {
fx2('abc'); // []
fx2('abc', ...u); // U
fx2('abc', ...v); // [...V]
fx2<U>('abc', ...u); // U
fx2<V>('abc', ...v); // V
}
// Relations involving variadic tuple types
function f10<T extends string[], U extends T>(x: [string, ...unknown[]], y: [string, ...T], z: [string, ...U]) {
x = y;
x = z;
y = x; // Error
y = z;
z = x; // Error
z = y; // Error
}
// For a generic type T, [...T] is assignable to T, T is assignable to readonly [...T], and T is assignable
// to [...T] when T is constrained to a mutable array or tuple type.
function f11<T extends unknown[]>(t: T, m: [...T], r: readonly [...T]) {
t = m;
t = r; // Error
m = t;
m = r; // Error
r = t;
r = m;
}
function f12<T extends readonly unknown[]>(t: T, m: [...T], r: readonly [...T]) {
t = m;
t = r; // Error
m = t; // Error
m = r; // Error
r = t;
r = m;
}
function f13<T extends string[], U extends T>(t0: T, t1: [...T], t2: [...U]) {
t0 = t1;
t0 = t2;
t1 = t0;
t1 = t2;
t2 = t0; // Error
t2 = t1; // Error
}
function f14<T extends readonly string[], U extends T>(t0: T, t1: [...T], t2: [...U]) {
t0 = t1;
t0 = t2;
t1 = t0; // Error
t1 = t2;
t2 = t0; // Error
t2 = t1; // Error
}
function f15<T extends string[], U extends T>(k0: keyof T, k1: keyof [...T], k2: keyof [...U], k3: keyof [1, 2, ...T]) {
k0 = 'length';
k1 = 'length';
k2 = 'length';
k0 = 'slice';
k1 = 'slice';
k2 = 'slice';
k3 = '0';
k3 = '1';
k3 = '2'; // Error
}
// Inference between variadic tuple types
type First<T extends readonly unknown[]> =
T extends readonly [unknown, ...unknown[]] ? T[0] :
T[0] | undefined;
type DropFirst<T extends readonly unknown[]> = T extends readonly [unknown?, ...infer U] ? U : [...T];
type Last<T extends readonly unknown[]> =
T extends readonly [...unknown[], infer U] ? U :
T extends readonly [unknown, ...unknown[]] ? T[number] :
T[number] | undefined;
type DropLast<T extends readonly unknown[]> = T extends readonly [...infer U, unknown] ? U : [...T];
type T00 = First<[number, symbol, string]>;
type T01 = First<[symbol, string]>;
type T02 = First<[string]>;
type T03 = First<[number, symbol, ...string[]]>;
type T04 = First<[symbol, ...string[]]>;
type T05 = First<[string?]>;
type T06 = First<string[]>;
type T07 = First<[]>;
type T08 = First<any>;
type T09 = First<never>;
type T10 = DropFirst<[number, symbol, string]>;
type T11 = DropFirst<[symbol, string]>;
type T12 = DropFirst<[string]>;
type T13 = DropFirst<[number, symbol, ...string[]]>;
type T14 = DropFirst<[symbol, ...string[]]>;
type T15 = DropFirst<[string?]>;
type T16 = DropFirst<string[]>;
type T17 = DropFirst<[]>;
type T18 = DropFirst<any>;
type T19 = DropFirst<never>;
type T20 = Last<[number, symbol, string]>;
type T21 = Last<[symbol, string]>;
type T22 = Last<[string]>;
type T23 = Last<[number, symbol, ...string[]]>;
type T24 = Last<[symbol, ...string[]]>;
type T25 = Last<[string?]>;
type T26 = Last<string[]>;
type T27 = Last<[]>;
type T28 = Last<any>;
type T29 = Last<never>;
type T30 = DropLast<[number, symbol, string]>;
type T31 = DropLast<[symbol, string]>;
type T32 = DropLast<[string]>;
type T33 = DropLast<[number, symbol, ...string[]]>;
type T34 = DropLast<[symbol, ...string[]]>;
type T35 = DropLast<[string?]>;
type T36 = DropLast<string[]>;
type T37 = DropLast<[]>; // unknown[], maybe should be []
type T38 = DropLast<any>;
type T39 = DropLast<never>;
type R00 = First<readonly [number, symbol, string]>;
type R01 = First<readonly [symbol, string]>;
type R02 = First<readonly [string]>;
type R03 = First<readonly [number, symbol, ...string[]]>;
type R04 = First<readonly [symbol, ...string[]]>;
type R05 = First<readonly string[]>;
type R06 = First<readonly []>;
type R10 = DropFirst<readonly [number, symbol, string]>;
type R11 = DropFirst<readonly [symbol, string]>;
type R12 = DropFirst<readonly [string]>;
type R13 = DropFirst<readonly [number, symbol, ...string[]]>;
type R14 = DropFirst<readonly [symbol, ...string[]]>;
type R15 = DropFirst<readonly string[]>;
type R16 = DropFirst<readonly []>;
type R20 = Last<readonly [number, symbol, string]>;
type R21 = Last<readonly [symbol, string]>;
type R22 = Last<readonly [string]>;
type R23 = Last<readonly [number, symbol, ...string[]]>;
type R24 = Last<readonly [symbol, ...string[]]>;
type R25 = Last<readonly string[]>;
type R26 = Last<readonly []>;
type R30 = DropLast<readonly [number, symbol, string]>;
type R31 = DropLast<readonly [symbol, string]>;
type R32 = DropLast<readonly [string]>;
type R33 = DropLast<readonly [number, symbol, ...string[]]>;
type R34 = DropLast<readonly [symbol, ...string[]]>;
type R35 = DropLast<readonly string[]>;
type R36 = DropLast<readonly []>;
// Inference to [...T, ...U] with implied arity for T
function curry<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, ...a: T) {
return (...b: U) => f(...a, ...b);
}
const fn1 = (a: number, b: string, c: boolean, d: string[]) => 0;
const c0 = curry(fn1); // (a: number, b: string, c: boolean, d: string[]) => number
const c1 = curry(fn1, 1); // (b: string, c: boolean, d: string[]) => number
const c2 = curry(fn1, 1, 'abc'); // (c: boolean, d: string[]) => number
const c3 = curry(fn1, 1, 'abc', true); // (d: string[]) => number
const c4 = curry(fn1, 1, 'abc', true, ['x', 'y']); // () => number
const fn2 = (x: number, b: boolean, ...args: string[]) => 0;
const c10 = curry(fn2); // (x: number, b: boolean, ...args: string[]) => number
const c11 = curry(fn2, 1); // (b: boolean, ...args: string[]) => number
const c12 = curry(fn2, 1, true); // (...args: string[]) => number
const c13 = curry(fn2, 1, true, 'abc', 'def'); // (...args: string[]) => number
const fn3 = (...args: string[]) => 0;
const c20 = curry(fn3); // (...args: string[]) => number
const c21 = curry(fn3, 'abc', 'def'); // (...args: string[]) => number
const c22 = curry(fn3, ...sa); // (...args: string[]) => number
// No inference to [...T, ...U] when there is no implied arity
function curry2<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, t: [...T], u: [...U]) {
return f(...t, ...u);
}
declare function fn10(a: string, b: number, c: boolean): string[];
curry2(fn10, ['hello', 42], [true]);
curry2(fn10, ['hello'], [42, true]);
// Inference to [...T] has higher priority than inference to [...T, number?]
declare function ft<T extends unknown[]>(t1: [...T], t2: [...T, number?]): T;
ft([1, 2, 3], [1, 2, 3]);
ft([1, 2], [1, 2, 3]);
ft(['a', 'b'], ['c', 'd'])
ft(['a', 'b'], ['c', 'd', 42])
// Last argument is contextually typed
declare function call<T extends unknown[], R>(...args: [...T, (...args: T) => R]): [T, R];
call('hello', 32, (a, b) => 42);
call(...sa, (...x) => 42);
// No inference to ending optional elements (except with identical structure)
declare function f20<T extends unknown[] = []>(args: [...T, number?]): T;
function f21<U extends string[]>(args: [...U, number?]) {
let v1 = f20(args); // U
let v2 = f20(["foo", "bar"]); // [string]
let v3 = f20(["foo", 42]); // [string]
}
declare function f22<T extends unknown[] = []>(args: [...T, number]): T;
declare function f22<T extends unknown[] = []>(args: [...T]): T;
function f23<U extends string[]>(args: [...U, number]) {
let v1 = f22(args); // U
let v2 = f22(["foo", "bar"]); // [string, string]
let v3 = f22(["foo", 42]); // [string]
}
// Repro from #39327
interface Desc<A extends unknown[], T> {
readonly f: (...args: A) => T;
bind<T extends unknown[], U extends unknown[], R>(this: Desc<[...T, ...U], R>, ...args: T): Desc<[...U], R>;
}
declare const a: Desc<[string, number, boolean], object>;
const b = a.bind("", 1); // Desc<[boolean], object>
// Repro from #39607
declare function getUser(id: string, options?: { x?: string }): string;
declare function getOrgUser(id: string, orgId: number, options?: { y?: number, z?: boolean }): void;
function callApi<T extends unknown[] = [], U = void>(method: (...args: [...T, object]) => U) {
return (...args: [...T]) => method(...args, {});
}
callApi(getUser);
callApi(getOrgUser);
// Repro from #40235
type Numbers = number[];
type Unbounded = [...Numbers, boolean];
const data: Unbounded = [false, false]; // Error
type U1 = [string, ...Numbers, boolean];
type U2 = [...[string, ...Numbers], boolean];
type U3 = [...[string, number], boolean];
//// [variadicTuples1.js]
"use strict";
// Variadics in tuple types
var __spreadArray = (this && this.__spreadArray) || function (to, from, pack) {
if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) {
if (ar || !(i in from)) {
if (!ar) ar = Array.prototype.slice.call(from, 0, i);
ar[i] = from[i];
}
}
return to.concat(ar || Array.prototype.slice.call(from));
};
// Variadics in array literals
function tup2(t, u) {
return __spreadArray(__spreadArray(__spreadArray(__spreadArray([1], t, true), [2], false), u, true), [3], false);
}
var t2 = tup2(['hello'], [10, true]);
function concat(t, u) {
return __spreadArray(__spreadArray([], t, true), u, true);
}
var tc1 = concat([], []);
var tc2 = concat(['hello'], [42]);
var tc3 = concat([1, 2, 3], sa);
var tc4 = concat(sa, [1, 2, 3]); // Ideally would be [...string[], number, number, number]
function concat2(t, u) {
return __spreadArray(__spreadArray([], t, true), u, true); // (T[number] | U[number])[]
}
var tc5 = concat2([1, 2, 3], [4, 5, 6]); // (1 | 2 | 3 | 4 | 5 | 6)[]
function foo2(t1, t2, a1) {
foo1(1, 'abc', true, 42, 43, 44);
foo1.apply(void 0, __spreadArray(__spreadArray([], t1, false), [true, 42, 43, 44], false));
foo1.apply(void 0, __spreadArray(__spreadArray(__spreadArray([], t1, false), t2, false), [42, 43, 44], false));
foo1.apply(void 0, __spreadArray(__spreadArray(__spreadArray([], t1, false), t2, false), a1, false));
foo1.apply(void 0, t1); // Error
foo1.apply(void 0, __spreadArray(__spreadArray([], t1, false), [45], false)); // Error
}
function foo4(u) {
foo3(1, 2);
foo3(1, 'hello', true, 2);
foo3.apply(void 0, __spreadArray(__spreadArray([1], u, false), ['hi', 2], false));
foo3(1);
}
ft1(['hello', 42]); // (string | number)[]
ft2(['hello', 42]); // readonly (string | number)[]
ft3(['hello', 42]); // [string, number]
ft4(['hello', 42]); // readonly [string, number]
// Indexing variadic tuple types
function f0(t, n) {
var a = t[0]; // string
var b = t[1]; // [string, ...T][1]
var c = t[2]; // [string, ...T][2]
var d = t[n]; // [string, ...T][number]
}
function f1(t, n) {
var a = t[0]; // string
var b = t[1]; // [string, ...T, number][1]
var c = t[2]; // [string, ...T, number][2]
var d = t[n]; // [string, ...T, number][number]
}
// Destructuring variadic tuple types
function f2(t) {
var ax = t.slice(0); // [string, ...T]
var b1 = t[0], bx = t.slice(1); // string, [...T]
var c1 = t[0], c2 = t[1], cx = t.slice(2); // string, [string, ...T][1], T[number][]
}
function f3(t) {
var ax = t.slice(0); // [string, ...T, number]
var b1 = t[0], bx = t.slice(1); // string, [...T, number]
var c1 = t[0], c2 = t[1], cx = t.slice(2); // string, [string, ...T, number][1], (number | T[number])[]
}
var tm1 = fm1([['abc'], [42], [true], ['def']]); // [boolean, string]
function gx1(u, v) {
fx1('abc'); // []
fx1.apply(void 0, __spreadArray(['abc'], u, false)); // U
fx1.apply(void 0, __spreadArray(['abc'], v, false)); // [...V]
fx1.apply(void 0, __spreadArray(['abc'], u, false)); // U
fx1.apply(void 0, __spreadArray(['abc'], v, false)); // Error
}
function gx2(u, v) {
fx2('abc'); // []
fx2.apply(void 0, __spreadArray(['abc'], u, false)); // U
fx2.apply(void 0, __spreadArray(['abc'], v, false)); // [...V]
fx2.apply(void 0, __spreadArray(['abc'], u, false)); // U
fx2.apply(void 0, __spreadArray(['abc'], v, false)); // V
}
// Relations involving variadic tuple types
function f10(x, y, z) {
x = y;
x = z;
y = x; // Error
y = z;
z = x; // Error
z = y; // Error
}
// For a generic type T, [...T] is assignable to T, T is assignable to readonly [...T], and T is assignable
// to [...T] when T is constrained to a mutable array or tuple type.
function f11(t, m, r) {
t = m;
t = r; // Error
m = t;
m = r; // Error
r = t;
r = m;
}
function f12(t, m, r) {
t = m;
t = r; // Error
m = t; // Error
m = r; // Error
r = t;
r = m;
}
function f13(t0, t1, t2) {
t0 = t1;
t0 = t2;
t1 = t0;
t1 = t2;
t2 = t0; // Error
t2 = t1; // Error
}
function f14(t0, t1, t2) {
t0 = t1;
t0 = t2;
t1 = t0; // Error
t1 = t2;
t2 = t0; // Error
t2 = t1; // Error
}
function f15(k0, k1, k2, k3) {
k0 = 'length';
k1 = 'length';
k2 = 'length';
k0 = 'slice';
k1 = 'slice';
k2 = 'slice';
k3 = '0';
k3 = '1';
k3 = '2'; // Error
}
// Inference to [...T, ...U] with implied arity for T
function curry(f) {
var a = [];
for (var _i = 1; _i < arguments.length; _i++) {
a[_i - 1] = arguments[_i];
}
return function () {
var b = [];
for (var _i = 0; _i < arguments.length; _i++) {
b[_i] = arguments[_i];
}
return f.apply(void 0, __spreadArray(__spreadArray([], a, false), b, false));
};
}
var fn1 = function (a, b, c, d) { return 0; };
var c0 = curry(fn1); // (a: number, b: string, c: boolean, d: string[]) => number
var c1 = curry(fn1, 1); // (b: string, c: boolean, d: string[]) => number
var c2 = curry(fn1, 1, 'abc'); // (c: boolean, d: string[]) => number
var c3 = curry(fn1, 1, 'abc', true); // (d: string[]) => number
var c4 = curry(fn1, 1, 'abc', true, ['x', 'y']); // () => number
var fn2 = function (x, b) {
var args = [];
for (var _i = 2; _i < arguments.length; _i++) {
args[_i - 2] = arguments[_i];
}
return 0;
};
var c10 = curry(fn2); // (x: number, b: boolean, ...args: string[]) => number
var c11 = curry(fn2, 1); // (b: boolean, ...args: string[]) => number
var c12 = curry(fn2, 1, true); // (...args: string[]) => number
var c13 = curry(fn2, 1, true, 'abc', 'def'); // (...args: string[]) => number
var fn3 = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
return 0;
};
var c20 = curry(fn3); // (...args: string[]) => number
var c21 = curry(fn3, 'abc', 'def'); // (...args: string[]) => number
var c22 = curry.apply(void 0, __spreadArray([fn3], sa, false)); // (...args: string[]) => number
// No inference to [...T, ...U] when there is no implied arity
function curry2(f, t, u) {
return f.apply(void 0, __spreadArray(__spreadArray([], t, false), u, false));
}
curry2(fn10, ['hello', 42], [true]);
curry2(fn10, ['hello'], [42, true]);
ft([1, 2, 3], [1, 2, 3]);
ft([1, 2], [1, 2, 3]);
ft(['a', 'b'], ['c', 'd']);
ft(['a', 'b'], ['c', 'd', 42]);
call('hello', 32, function (a, b) { return 42; });
call.apply(void 0, __spreadArray(__spreadArray([], sa, false), [function () {
var x = [];
for (var _i = 0; _i < arguments.length; _i++) {
x[_i] = arguments[_i];
}
return 42;
}], false));
function f21(args) {
var v1 = f20(args); // U
var v2 = f20(["foo", "bar"]); // [string]
var v3 = f20(["foo", 42]); // [string]
}
function f23(args) {
var v1 = f22(args); // U
var v2 = f22(["foo", "bar"]); // [string, string]
var v3 = f22(["foo", 42]); // [string]
}
var b = a.bind("", 1); // Desc<[boolean], object>
function callApi(method) {
return function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
return method.apply(void 0, __spreadArray(__spreadArray([], args, false), [{}], false));
};
}
callApi(getUser);
callApi(getOrgUser);
var data = [false, false]; // Error
//// [variadicTuples1.d.ts]
declare type TV0<T extends unknown[]> = [string, ...T];
declare type TV1<T extends unknown[]> = [string, ...T, number];
declare type TV2<T extends unknown[]> = [string, ...T, number, ...T];
declare type TV3<T extends unknown[]> = [string, ...T, ...number[], ...T];
declare type TN1 = TV1<[boolean, string]>;
declare type TN2 = TV1<[]>;
declare type TN3 = TV1<[boolean?]>;
declare type TN4 = TV1<string[]>;
declare type TN5 = TV1<[boolean] | [symbol, symbol]>;
declare type TN6 = TV1<any>;
declare type TN7 = TV1<never>;
declare function tup2<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]): readonly [1, ...T, 2, ...U, 3];
declare const t2: readonly [1, string, 2, number, boolean, 3];
declare function concat<T extends unknown[], U extends unknown[]>(t: [...T], u: [...U]): [...T, ...U];
declare const sa: string[];
declare const tc1: [];
declare const tc2: [string, number];
declare const tc3: [number, number, number, ...string[]];
declare const tc4: [...string[], number, number, number];
declare function concat2<T extends readonly unknown[], U extends readonly unknown[]>(t: T, u: U): (T[number] | U[number])[];
declare const tc5: (2 | 4 | 1 | 3 | 6 | 5)[];
declare function foo1(a: number, b: string, c: boolean, ...d: number[]): void;
declare function foo2(t1: [number, string], t2: [boolean], a1: number[]): void;
declare function foo3<T extends unknown[]>(x: number, ...args: [...T, number]): T;
declare function foo4<U extends unknown[]>(u: U): void;
declare function ft1<T extends unknown[]>(t: T): T;
declare function ft2<T extends unknown[]>(t: T): readonly [...T];
declare function ft3<T extends unknown[]>(t: [...T]): T;
declare function ft4<T extends unknown[]>(t: [...T]): readonly [...T];
declare function f0<T extends unknown[]>(t: [string, ...T], n: number): void;
declare function f1<T extends unknown[]>(t: [string, ...T, number], n: number): void;
declare function f2<T extends unknown[]>(t: [string, ...T]): void;
declare function f3<T extends unknown[]>(t: [string, ...T, number]): void;
declare type Arrayify<T> = {
[P in keyof T]: T[P][];
};
declare type TM1<U extends unknown[]> = Arrayify<readonly [string, number?, ...U, ...boolean[]]>;
declare type TP1<T extends unknown[]> = Partial<[string, ...T, number]>;
declare type TP2<T extends unknown[]> = Partial<[string, ...T, ...number[]]>;
declare function fm1<T extends unknown[]>(t: Arrayify<[string, number, ...T]>): T;
declare let tm1: [boolean, string];
declare function fx1<T extends unknown[]>(a: string, ...args: T): T;
declare function gx1<U extends unknown[], V extends readonly unknown[]>(u: U, v: V): void;
declare function fx2<T extends readonly unknown[]>(a: string, ...args: T): T;
declare function gx2<U extends unknown[], V extends readonly unknown[]>(u: U, v: V): void;
declare function f10<T extends string[], U extends T>(x: [string, ...unknown[]], y: [string, ...T], z: [string, ...U]): void;
declare function f11<T extends unknown[]>(t: T, m: [...T], r: readonly [...T]): void;
declare function f12<T extends readonly unknown[]>(t: T, m: [...T], r: readonly [...T]): void;
declare function f13<T extends string[], U extends T>(t0: T, t1: [...T], t2: [...U]): void;
declare function f14<T extends readonly string[], U extends T>(t0: T, t1: [...T], t2: [...U]): void;
declare function f15<T extends string[], U extends T>(k0: keyof T, k1: keyof [...T], k2: keyof [...U], k3: keyof [1, 2, ...T]): void;
declare type First<T extends readonly unknown[]> = T extends readonly [unknown, ...unknown[]] ? T[0] : T[0] | undefined;
declare type DropFirst<T extends readonly unknown[]> = T extends readonly [unknown?, ...infer U] ? U : [...T];
declare type Last<T extends readonly unknown[]> = T extends readonly [...unknown[], infer U] ? U : T extends readonly [unknown, ...unknown[]] ? T[number] : T[number] | undefined;
declare type DropLast<T extends readonly unknown[]> = T extends readonly [...infer U, unknown] ? U : [...T];
declare type T00 = First<[number, symbol, string]>;
declare type T01 = First<[symbol, string]>;
declare type T02 = First<[string]>;
declare type T03 = First<[number, symbol, ...string[]]>;
declare type T04 = First<[symbol, ...string[]]>;
declare type T05 = First<[string?]>;
declare type T06 = First<string[]>;
declare type T07 = First<[]>;
declare type T08 = First<any>;
declare type T09 = First<never>;
declare type T10 = DropFirst<[number, symbol, string]>;
declare type T11 = DropFirst<[symbol, string]>;
declare type T12 = DropFirst<[string]>;
declare type T13 = DropFirst<[number, symbol, ...string[]]>;
declare type T14 = DropFirst<[symbol, ...string[]]>;
declare type T15 = DropFirst<[string?]>;
declare type T16 = DropFirst<string[]>;
declare type T17 = DropFirst<[]>;
declare type T18 = DropFirst<any>;
declare type T19 = DropFirst<never>;
declare type T20 = Last<[number, symbol, string]>;
declare type T21 = Last<[symbol, string]>;
declare type T22 = Last<[string]>;
declare type T23 = Last<[number, symbol, ...string[]]>;
declare type T24 = Last<[symbol, ...string[]]>;
declare type T25 = Last<[string?]>;
declare type T26 = Last<string[]>;
declare type T27 = Last<[]>;
declare type T28 = Last<any>;
declare type T29 = Last<never>;
declare type T30 = DropLast<[number, symbol, string]>;
declare type T31 = DropLast<[symbol, string]>;
declare type T32 = DropLast<[string]>;
declare type T33 = DropLast<[number, symbol, ...string[]]>;
declare type T34 = DropLast<[symbol, ...string[]]>;
declare type T35 = DropLast<[string?]>;
declare type T36 = DropLast<string[]>;
declare type T37 = DropLast<[]>;
declare type T38 = DropLast<any>;
declare type T39 = DropLast<never>;
declare type R00 = First<readonly [number, symbol, string]>;
declare type R01 = First<readonly [symbol, string]>;
declare type R02 = First<readonly [string]>;
declare type R03 = First<readonly [number, symbol, ...string[]]>;
declare type R04 = First<readonly [symbol, ...string[]]>;
declare type R05 = First<readonly string[]>;
declare type R06 = First<readonly []>;
declare type R10 = DropFirst<readonly [number, symbol, string]>;
declare type R11 = DropFirst<readonly [symbol, string]>;
declare type R12 = DropFirst<readonly [string]>;
declare type R13 = DropFirst<readonly [number, symbol, ...string[]]>;
declare type R14 = DropFirst<readonly [symbol, ...string[]]>;
declare type R15 = DropFirst<readonly string[]>;
declare type R16 = DropFirst<readonly []>;
declare type R20 = Last<readonly [number, symbol, string]>;
declare type R21 = Last<readonly [symbol, string]>;
declare type R22 = Last<readonly [string]>;
declare type R23 = Last<readonly [number, symbol, ...string[]]>;
declare type R24 = Last<readonly [symbol, ...string[]]>;
declare type R25 = Last<readonly string[]>;
declare type R26 = Last<readonly []>;
declare type R30 = DropLast<readonly [number, symbol, string]>;
declare type R31 = DropLast<readonly [symbol, string]>;
declare type R32 = DropLast<readonly [string]>;
declare type R33 = DropLast<readonly [number, symbol, ...string[]]>;
declare type R34 = DropLast<readonly [symbol, ...string[]]>;
declare type R35 = DropLast<readonly string[]>;
declare type R36 = DropLast<readonly []>;
declare function curry<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, ...a: T): (...b: U) => R;
declare const fn1: (a: number, b: string, c: boolean, d: string[]) => number;
declare const c0: (a: number, b: string, c: boolean, d: string[]) => number;
declare const c1: (b: string, c: boolean, d: string[]) => number;
declare const c2: (c: boolean, d: string[]) => number;
declare const c3: (d: string[]) => number;
declare const c4: () => number;
declare const fn2: (x: number, b: boolean, ...args: string[]) => number;
declare const c10: (x: number, b: boolean, ...args: string[]) => number;
declare const c11: (b: boolean, ...args: string[]) => number;
declare const c12: (...b: string[]) => number;
declare const c13: (...b: string[]) => number;
declare const fn3: (...args: string[]) => number;
declare const c20: (...b: string[]) => number;
declare const c21: (...b: string[]) => number;
declare const c22: (...b: string[]) => number;
declare function curry2<T extends unknown[], U extends unknown[], R>(f: (...args: [...T, ...U]) => R, t: [...T], u: [...U]): R;
declare function fn10(a: string, b: number, c: boolean): string[];
declare function ft<T extends unknown[]>(t1: [...T], t2: [...T, number?]): T;
declare function call<T extends unknown[], R>(...args: [...T, (...args: T) => R]): [T, R];
declare function f20<T extends unknown[] = []>(args: [...T, number?]): T;
declare function f21<U extends string[]>(args: [...U, number?]): void;
declare function f22<T extends unknown[] = []>(args: [...T, number]): T;
declare function f22<T extends unknown[] = []>(args: [...T]): T;
declare function f23<U extends string[]>(args: [...U, number]): void;
interface Desc<A extends unknown[], T> {
readonly f: (...args: A) => T;
bind<T extends unknown[], U extends unknown[], R>(this: Desc<[...T, ...U], R>, ...args: T): Desc<[...U], R>;
}
declare const a: Desc<[string, number, boolean], object>;
declare const b: Desc<[boolean], object>;
declare function getUser(id: string, options?: {
x?: string;
}): string;
declare function getOrgUser(id: string, orgId: number, options?: {
y?: number;
z?: boolean;
}): void;
declare function callApi<T extends unknown[] = [], U = void>(method: (...args: [...T, object]) => U): (...args_0: T) => U;
declare type Numbers = number[];
declare type Unbounded = [...Numbers, boolean];
declare const data: Unbounded;
declare type U1 = [string, ...Numbers, boolean];
declare type U2 = [...[string, ...Numbers], boolean];
declare type U3 = [...[string, number], boolean];
| Microsoft/TypeScript | tests/baselines/reference/variadicTuples1.js | JavaScript | apache-2.0 | 30,820 |
/*
Derby - Class org.apache.derbyBuild.MessageVetter
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derbyBuild;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.MessageFormat;
import java.util.Enumeration;
import java.util.HashSet;
import java.util.Properties;
import java.util.Set;
import java.util.regex.Pattern;
/**
* Class that checks the message files for common problems.
*/
public class MessageVetter {
/**
* <p>
* Check all the message translations in the specified directories for
* common problems. Assume that all properties files in the directories
* are message translations.
* </p>
*
* <p>
* If a problem is found, an error will be raised.
* </p>
*
* @param args names of the directories to check
*/
public static void main(String[] args) throws IOException {
FileFilter filter = new FileFilter() {
public boolean accept(File pathname) {
return pathname.getName().endsWith(".properties");
}
};
for (String directory : args) {
for (File file : new File(directory).listFiles(filter)) {
new MessageVetter(file).vet();
}
}
}
/**
* A regular expression that matches a single-quote character that is
* neither preceeded nor followed by another single-quote character. Used
* by {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to
* verify that messages contain two single-quotes in order to produce a
* single apostrophe (dictated by {@code java.text.MessageFormat}).
*/
private static final Pattern LONE_QUOTE_PATTERN =
Pattern.compile("^'[^']|[^']'[^']|[^']'$");
/**
* A regular expression that matches a single-quote character that have
* no adjacent single-quote or curly brace character. Used by
* {@link #checkSingleQuotes(java.lang.String, java.lang.String)} to
* verify that all single-quotes are either correctly formatted apostrophes
* or used for quoting curly braces, as required by
* {@code java.text.MessageFormat}.
*/
private static final Pattern LONE_QUOTE_ALLOWED_PATTERN =
Pattern.compile("^'[^'{}]|[^'{}]'[^'{}]|[^'{}]'$");
/**
* A set of message identifiers in whose messages single-quotes may legally
* appear with no adjacent single-quote character. This will be messages
* where the single-quotes are needed to quote curly braces that should
* appear literally in the message text.
*/
private static final Set<String> LONE_QUOTE_ALLOWED = new HashSet<String>();
static {
// The IJ help text contains curly braces that need quoting.
LONE_QUOTE_ALLOWED.add("IJ_HelpText");
// Some of the DRDA usage messages contain the text {on|off}, which
// needs quoting.
LONE_QUOTE_ALLOWED.add("DRDA_Usage8.I");
LONE_QUOTE_ALLOWED.add("DRDA_Usage11.I");
LONE_QUOTE_ALLOWED.add("PE_HelpText");
}
/** The message file to check. */
private final File file;
/** The properties found in the message file. */
private final Properties properties;
/**
* Create a new {@code MessageVetter} instance.
*
* @param file the file with the messages to check
* @throws IOException if the file cannot be loaded
*/
private MessageVetter(File file) throws IOException {
this.file = file;
properties = new Properties();
FileInputStream in = new FileInputStream(file);
try {
properties.load(in);
} finally {
in.close();
}
}
/**
* Vet the messages in this file. An error will be raised if an
* ill-formatted message is found.
*/
private void vet() {
Enumeration e = properties.propertyNames();
while (e.hasMoreElements()) {
String key = (String) e.nextElement();
String message = properties.getProperty(key);
vetMessage(key, message);
}
}
/**
* Vet a specific message. Raise an error if it is not well-formed.
*
* @param key the message identifier
* @param message the message format specifier
*/
private void vetMessage(String key, String message) {
checkSingleQuotes(key, message);
checkValidMessageFormat(key, message);
}
/**
* Check that single-quote characters are doubled, as required by
* {@code java.text.MessageFormat}. Raise an error otherwise.
*
* @param key the message identifier
* @param message the message format specifier
*/
private void checkSingleQuotes(String key, String message) {
Pattern p;
if (LONE_QUOTE_ALLOWED.contains(key)) {
// In some messages we allow lone single-quote characters, but
// only if they are used to quote curly braces. Use a regular
// expression that finds all single-quotes that aren't adjacent to
// another single-quote or a curly brace character.
p = LONE_QUOTE_ALLOWED_PATTERN;
} else {
// Otherwise, we don't allow lone single-quote characters at all.
p = LONE_QUOTE_PATTERN;
}
if (p.matcher(message).find()) {
throw new AssertionError("Lone single-quote in message " + key +
" in " + file + ".\nThis is OK if it is used for quoting " +
"special characters in the message. If this is what the " +
"character is used for, add an exception in " +
getClass().getName() + ".LONE_QUOTE_ALLOWED.");
}
}
/**
* Check that a message format specifier is valid. Raise an error if it
* is not.
*
* @param key the message identifier
* @param message the message format specifier
*/
private void checkValidMessageFormat(String key, String message) {
try {
// See if a MessageFormat instance can be produced based on this
// message format specifier.
new MessageFormat(message);
} catch (Exception e) {
AssertionError ae = new AssertionError(
"Message " + key + " in " + file + " isn't a valid " +
"java.text.MessageFormat pattern.");
ae.initCause(e);
throw ae;
}
}
}
| apache/derby | java/build/org/apache/derbyBuild/MessageVetter.java | Java | apache-2.0 | 7,276 |
<?xml version='1.0' encoding='UTF-8'?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html>
<head>
<title>groupby - org.saddle.groupby</title>
<meta name="description" content="groupby - org.saddle.groupby" />
<meta name="keywords" content="groupby org.saddle.groupby" />
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<link href="../../../lib/template.css" media="screen" type="text/css" rel="stylesheet" />
<link href="../../../lib/diagrams.css" media="screen" type="text/css" rel="stylesheet" id="diagrams-css" />
<script type="text/javascript">
if(top === self) {
var url = '../../../index.html';
var hash = 'org.saddle.groupby.package';
var anchor = window.location.hash;
var anchor_opt = '';
if (anchor.length >= 1)
anchor_opt = '@' + anchor.substring(1);
window.location.href = url + '#' + hash + anchor_opt;
}
</script>
</head>
<body class="value">
<div id="definition">
<img src="../../../lib/package_big.png" />
<p id="owner"><a href="../../package.html" class="extype" name="org">org</a>.<a href="../package.html" class="extype" name="org.saddle">saddle</a></p>
<h1>groupby</h1>
</div>
<h4 id="signature" class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">package</span>
</span>
<span class="symbol">
<span class="name">groupby</span>
</span>
</h4>
<div id="comment" class="fullcommenttop"></div>
<div id="mbrsel">
<div id="textfilter"><span class="pre"></span><span class="input"><input id="mbrsel-input" type="text" accesskey="/" /></span><span class="post"></span></div>
<div id="visbl">
<span class="filtertype">Visibility</span>
<ol><li class="public in"><span>Public</span></li><li class="all out"><span>All</span></li></ol>
</div>
</div>
<div id="template">
<div id="allMembers">
<div id="types" class="types members">
<h3>Type Members</h3>
<ol><li name="org.saddle.groupby.FrameGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped">
<a id="FrameGrouper[Z,X,Y,T]extendsAnyRef"></a>
<a id="FrameGrouper[Z,X,Y,T]:FrameGrouper[Z,X,Y,T]"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">class</span>
</span>
<span class="symbol">
<a href="FrameGrouper.html"><span class="name">FrameGrouper</span></a><span class="tparams">[<span name="Z">Z</span>, <span name="X">X</span>, <span name="Y">Y</span>, <span name="T">T</span>]</span><span class="result"> extends <span class="extype" name="scala.AnyRef">AnyRef</span></span>
</span>
</h4>
<p class="comment cmt">Helper class to do combine or transform after a groupBy
</p>
</li><li name="org.saddle.groupby.IndexGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped">
<a id="IndexGrouper[Y]extendsAnyRef"></a>
<a id="IndexGrouper[Y]:IndexGrouper[Y]"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">class</span>
</span>
<span class="symbol">
<a href="IndexGrouper.html"><span class="name">IndexGrouper</span></a><span class="tparams">[<span name="Y">Y</span>]</span><span class="result"> extends <span class="extype" name="scala.AnyRef">AnyRef</span></span>
</span>
</h4>
<p class="comment cmt">Creates groups for each unique key in an index
</p>
</li><li name="org.saddle.groupby.SeriesGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped">
<a id="SeriesGrouper[Y,X,T]extendsIndexGrouper[Y]"></a>
<a id="SeriesGrouper[Y,X,T]:SeriesGrouper[Y,X,T]"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">class</span>
</span>
<span class="symbol">
<a href="SeriesGrouper.html"><span class="name">SeriesGrouper</span></a><span class="tparams">[<span name="Y">Y</span>, <span name="X">X</span>, <span name="T">T</span>]</span><span class="result"> extends <a href="IndexGrouper.html" class="extype" name="org.saddle.groupby.IndexGrouper">IndexGrouper</a>[<span class="extype" name="org.saddle.groupby.SeriesGrouper.Y">Y</span>]</span>
</span>
</h4>
<p class="comment cmt">Helper class to do combine or transform after a groupBy
</p>
</li></ol>
</div>
<div id="values" class="values members">
<h3>Value Members</h3>
<ol><li name="org.saddle.groupby.FrameGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped">
<a id="FrameGrouper"></a>
<a id="FrameGrouper:FrameGrouper"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">object</span>
</span>
<span class="symbol">
<a href="FrameGrouper$.html"><span class="name">FrameGrouper</span></a>
</span>
</h4>
</li><li name="org.saddle.groupby.IndexGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped">
<a id="IndexGrouper"></a>
<a id="IndexGrouper:IndexGrouper"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">object</span>
</span>
<span class="symbol">
<a href="IndexGrouper$.html"><span class="name">IndexGrouper</span></a>
</span>
</h4>
</li><li name="org.saddle.groupby.SeriesGrouper" visbl="pub" data-isabs="false" fullComment="no" group="Ungrouped">
<a id="SeriesGrouper"></a>
<a id="SeriesGrouper:SeriesGrouper"></a>
<h4 class="signature">
<span class="modifier_kind">
<span class="modifier"></span>
<span class="kind">object</span>
</span>
<span class="symbol">
<a href="SeriesGrouper$.html"><span class="name">SeriesGrouper</span></a>
</span>
</h4>
</li></ol>
</div>
</div>
<div id="inheritedMembers">
</div>
<div id="groupedMembers">
<div class="group" name="Ungrouped">
<h3>Ungrouped</h3>
</div>
</div>
</div>
<div id="tooltip"></div>
<div id="footer"> </div>
<script defer="defer" type="text/javascript" id="jquery-js" src="../../../lib/jquery.js"></script><script defer="defer" type="text/javascript" id="jquery-ui-js" src="../../../lib/jquery-ui.js"></script><script defer="defer" type="text/javascript" id="tools-tooltip-js" src="../../../lib/tools.tooltip.js"></script><script defer="defer" type="text/javascript" id="template-js" src="../../../lib/template.js"></script>
</body>
</html> | saddle/saddle | saddle-core/target/scala-2.10/api/org/saddle/groupby/package.html | HTML | apache-2.0 | 7,315 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.http.parser;
import java.io.IOException;
import java.io.StringReader;
import org.apache.tomcat.util.collections.ConcurrentCache;
/**
* Caches the results of parsing content-type headers.
*/
public class MediaTypeCache {
private final ConcurrentCache<String,String[]> cache;
public MediaTypeCache(int size) {
cache = new ConcurrentCache<>(size);
}
/**
* Looks in the cache and returns the cached value if one is present. If no
* match exists in the cache, a new parser is created, the input parsed and
* the results placed in the cache and returned to the user.
*
* @param input The content-type header value to parse
* @return The results are provided as a two element String array. The
* first element is the media type less the charset and
* the second element is the charset
*/
public String[] parse(String input) {
String[] result = cache.get(input);
if (result != null) {
return result;
}
MediaType m = null;
try {
m = MediaType.parseMediaType(new StringReader(input));
} catch (IOException e) {
// Ignore - return null
}
if (m != null) {
result = new String[] {m.toStringNoCharset(), m.getCharset()};
cache.put(input, result);
}
return result;
}
}
| IAMTJW/Tomcat-8.5.20 | tomcat-8.5.20/java/org/apache/tomcat/util/http/parser/MediaTypeCache.java | Java | apache-2.0 | 2,331 |
package io.agrest.it.fixture.cayenne;
import io.agrest.it.fixture.cayenne.auto._E15E1;
public class E15E1 extends _E15E1 {
private static final long serialVersionUID = 1L;
}
| AbleOne/link-rest | agrest/src/test/java/io/agrest/it/fixture/cayenne/E15E1.java | Java | apache-2.0 | 183 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="pt">
<head>
<!-- Generated by javadoc (version 1.7.0_71) on Tue Jun 16 10:37:20 BRT 2015 -->
<meta http-equiv="Content-Type" content="text/html" charset="UTF-8">
<title>opennlp.tools.doccat (Apache OpenNLP Tools 1.6.0 API)</title>
<meta name="date" content="2015-06-16">
<link rel="stylesheet" type="text/css" href="../../../stylesheet.css" title="Style">
</head>
<body>
<h1 class="bar"><a href="../../../opennlp/tools/doccat/package-summary.html" target="classFrame">opennlp.tools.doccat</a></h1>
<div class="indexContainer">
<h2 title="Interfaces">Interfaces</h2>
<ul title="Interfaces">
<li><a href="DoccatEvaluationMonitor.html" title="interface in opennlp.tools.doccat" target="classFrame"><i>DoccatEvaluationMonitor</i></a></li>
<li><a href="DocumentCategorizer.html" title="interface in opennlp.tools.doccat" target="classFrame"><i>DocumentCategorizer</i></a></li>
<li><a href="FeatureGenerator.html" title="interface in opennlp.tools.doccat" target="classFrame"><i>FeatureGenerator</i></a></li>
</ul>
<h2 title="Classes">Classes</h2>
<ul title="Classes">
<li><a href="BagOfWordsFeatureGenerator.html" title="class in opennlp.tools.doccat" target="classFrame">BagOfWordsFeatureGenerator</a></li>
<li><a href="DoccatCrossValidator.html" title="class in opennlp.tools.doccat" target="classFrame">DoccatCrossValidator</a></li>
<li><a href="DoccatFactory.html" title="class in opennlp.tools.doccat" target="classFrame">DoccatFactory</a></li>
<li><a href="DoccatModel.html" title="class in opennlp.tools.doccat" target="classFrame">DoccatModel</a></li>
<li><a href="DocumentCategorizerEvaluator.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentCategorizerEvaluator</a></li>
<li><a href="DocumentCategorizerEventStream.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentCategorizerEventStream</a></li>
<li><a href="DocumentCategorizerME.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentCategorizerME</a></li>
<li><a href="DocumentSample.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentSample</a></li>
<li><a href="DocumentSampleStream.html" title="class in opennlp.tools.doccat" target="classFrame">DocumentSampleStream</a></li>
<li><a href="NGramFeatureGenerator.html" title="class in opennlp.tools.doccat" target="classFrame">NGramFeatureGenerator</a></li>
</ul>
</div>
</body>
</html>
| ericmguimaraes/COMP0378 | references/opennlp-docs/apidocs/opennlp-tools/opennlp/tools/doccat/package-frame.html | HTML | apache-2.0 | 2,512 |
<html>
<head>
<link rel="stylesheet" type="text/css" href="{{{host}}}/stylesheets/bootstrap.min.css">
<script src="{{{host}}}/javascripts/jquery-1.8.3.js"></script>
<script src="{{{host}}}/javascripts/oauth2client.js"></script>
<script src="{{{host}}}/{{{TILE_NAME}}}/javascripts/action.js"></script>
<script>
$(document).ready( function() {
doIt( '{{{host}}}');
});
</script>
</head>
<body>
<div id="j-card-authentication" class="j-card" style='display: none'>
<h2>Expanded Project Information ..</h2>
<p>
<p>The remote system (Basecamp) requires you to grant access before proceeding</p>
<!---
Project:
<label id="projectA"><b>Placeholder for Project</b> </label>
<br>
Description:
<label id="descriptionA"><b>Placeholder for Description</b> </label>
<br>
<a id="BasecampLinkA" href="https://basecamp.com" target="_blank" >Vist this project at Basecamp</a>
-->
</p>
<br><br>
<button class="btn btn-primary" id="oauth">Grant Access</button>
<button class="btn btn-primary" id="btn_doneA">Exit</button>
</div>
<div id="j-card-action" class="j-card" style='display: none'>
<h3>Expanded Project Information ..</h3>
<p>
<h5><u>Project:</u></h5><br>
<label id="projectB">Placeholder for Project</label>
<br><br>
<h5><u>Description:</u></h5> <br>
<label id="descriptionB">Placeholder for Description</label>
<br><br><br>
<a id="BasecampLinkB" href="https://basecamp.com" target="_blank" >Vist this project at Basecamp</a>
</p>
<button class="btn btn-primary" id="btn_done">Close Window</button>
</div>
</body>
</html>
| jivesoftware/jive-sdk | jive-sdk-service/generator/examples/example-basecamp/tiles/BaseCamp-ProjectInfo/public/action.html | HTML | apache-2.0 | 1,748 |
#pragma once
#include "generator/collector_interface.hpp"
#include <fstream>
#include <functional>
#include <memory>
#include <string>
struct OsmElement;
namespace base
{
class GeoObjectId;
} // namespace base
namespace generator
{
namespace cache
{
class IntermediateDataReaderInterface;
} // namespace cache
// CollectorTag class collects validated value of a tag and saves it to file with following
// format: osmId<tab>tagValue.
class CollectorTag : public CollectorInterface
{
public:
using Validator = std::function<bool(std::string const & tagValue)>;
explicit CollectorTag(std::string const & filename, std::string const & tagKey,
Validator const & validator);
// CollectorInterface overrides:
std::shared_ptr<CollectorInterface> Clone(
std::shared_ptr<cache::IntermediateDataReaderInterface> const & = {}) const override;
void Collect(OsmElement const & el) override;
void Finish() override;
void Merge(CollectorInterface const & collector) override;
void MergeInto(CollectorTag & collector) const override;
protected:
void Save() override;
void OrderCollectedData() override;
private:
std::ofstream m_stream;
std::string m_tagKey;
Validator m_validator;
};
} // namespace generator
| mpimenov/omim | generator/collector_tag.hpp | C++ | apache-2.0 | 1,262 |
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import collections
import time
from enum import Enum
from pyflink.datastream import TimerService
from pyflink.datastream.timerservice import InternalTimer, K, N, InternalTimerService
from pyflink.fn_execution.state_impl import RemoteKeyedStateBackend
class InternalTimerImpl(InternalTimer[K, N]):
def __init__(self, timestamp: int, key: K, namespace: N):
self._timestamp = timestamp
self._key = key
self._namespace = namespace
def get_timestamp(self) -> int:
return self._timestamp
def get_key(self) -> K:
return self._key
def get_namespace(self) -> N:
return self._namespace
def __hash__(self):
result = int(self._timestamp ^ (self._timestamp >> 32))
result = 31 * result + hash(tuple(self._key))
result = 31 * result + hash(self._namespace)
return result
def __eq__(self, other):
return self.__class__ == other.__class__ and self._timestamp == other._timestamp \
and self._key == other._key and self._namespace == other._namespace
class TimerOperandType(Enum):
REGISTER_EVENT_TIMER = 0
REGISTER_PROC_TIMER = 1
DELETE_EVENT_TIMER = 2
DELETE_PROC_TIMER = 3
class InternalTimerServiceImpl(InternalTimerService[N]):
"""
Internal implementation of InternalTimerService.
"""
def __init__(self, keyed_state_backend: RemoteKeyedStateBackend):
self._keyed_state_backend = keyed_state_backend
self._current_watermark = None
self.timers = collections.OrderedDict()
def current_processing_time(self):
return int(time.time() * 1000)
def current_watermark(self):
return self._current_watermark
def advance_watermark(self, watermark: int):
self._current_watermark = watermark
def register_processing_time_timer(self, namespace: N, t: int):
current_key = self._keyed_state_backend.get_current_key()
timer = (TimerOperandType.REGISTER_PROC_TIMER, InternalTimerImpl(t, current_key, namespace))
self.timers[timer] = None
def register_event_time_timer(self, namespace: N, t: int):
current_key = self._keyed_state_backend.get_current_key()
timer = (TimerOperandType.REGISTER_EVENT_TIMER,
InternalTimerImpl(t, current_key, namespace))
self.timers[timer] = None
def delete_processing_time_timer(self, namespace: N, t: int):
current_key = self._keyed_state_backend.get_current_key()
timer = (TimerOperandType.DELETE_PROC_TIMER, InternalTimerImpl(t, current_key, namespace))
self.timers[timer] = None
def delete_event_time_timer(self, namespace: N, t: int):
current_key = self._keyed_state_backend.get_current_key()
timer = (TimerOperandType.DELETE_EVENT_TIMER, InternalTimerImpl(t, current_key, namespace))
self.timers[timer] = None
class TimerServiceImpl(TimerService):
"""
Internal implementation of TimerService.
"""
def __init__(self, internal_timer_service: InternalTimerServiceImpl):
self._internal = internal_timer_service
self.timers = self._internal.timers
def current_processing_time(self) -> int:
return self._internal.current_processing_time()
def current_watermark(self) -> int:
return self._internal.current_watermark()
def advance_watermark(self, wm):
self._internal.advance_watermark(wm)
def register_processing_time_timer(self, t: int):
self._internal.register_processing_time_timer(None, t)
def register_event_time_timer(self, t: int):
self._internal.register_event_time_timer(None, t)
def delete_processing_time_timer(self, t: int):
self._internal.delete_processing_time_timer(None, t)
def delete_event_time_timer(self, t: int):
self._internal.delete_event_time_timer(None, t)
| clarkyzl/flink | flink-python/pyflink/fn_execution/timerservice_impl.py | Python | apache-2.0 | 4,824 |
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* Copyright 2012-2019 the original author or authors.
*/
package org.assertj.core.error;
import static java.lang.String.format;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveDeclaredFields;
import static org.assertj.core.error.ShouldOnlyHaveFields.shouldOnlyHaveFields;
import static org.assertj.core.util.Sets.newLinkedHashSet;
import java.util.LinkedHashSet;
import org.assertj.core.description.Description;
import org.assertj.core.description.TextDescription;
import org.assertj.core.presentation.Representation;
import org.assertj.core.presentation.StandardRepresentation;
import org.assertj.core.test.Player;
import org.assertj.core.util.Sets;
import org.junit.jupiter.api.Test;
/**
* Tests for
* <code>{@link ShouldOnlyHaveFields#create(Description, Representation)}</code>
*
* @author Filip Hrisafov
*/
public class ShouldOnlyHaveFields_create_Test {
private static final LinkedHashSet<String> EMPTY_STRING_SET = Sets.<String> newLinkedHashSet();
@Test
public void should_create_error_message_for_fields() {
ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class,
newLinkedHashSet("name", "team"),
newLinkedHashSet("nickname"),
newLinkedHashSet("address"));
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following public accessible fields:%n" +
" <[\"name\", \"team\"]>%n" +
"fields not found:%n" +
" <[\"nickname\"]>%n" +
"and fields not expected:%n" +
" <[\"address\"]>"));
}
@Test
public void should_not_display_unexpected_fields_when_there_are_none_for_fields() {
ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class,
newLinkedHashSet("name", "team"),
newLinkedHashSet("nickname"),
EMPTY_STRING_SET);
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following public accessible fields:%n" +
" <[\"name\", \"team\"]>%n" +
"but could not find the following fields:%n" +
" <[\"nickname\"]>"));
}
@Test
public void should_not_display_fields_not_found_when_there_are_none_for_fields() {
ErrorMessageFactory factory = shouldOnlyHaveFields(Player.class,
newLinkedHashSet("name", "team"),
EMPTY_STRING_SET,
newLinkedHashSet("address"));
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following public accessible fields:%n" +
" <[\"name\", \"team\"]>%n" +
"but the following fields were unexpected:%n" +
" <[\"address\"]>"));
}
@Test
public void should_create_error_message_for_declared_fields() {
ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class,
newLinkedHashSet("name", "team"),
newLinkedHashSet("nickname"),
newLinkedHashSet("address"));
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following declared fields:%n" +
" <[\"name\", \"team\"]>%n" +
"fields not found:%n" +
" <[\"nickname\"]>%n" +
"and fields not expected:%n" +
" <[\"address\"]>"));
}
@Test
public void should_not_display_unexpected_fields_when_there_are_none_for_declared_fields() {
ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class,
newLinkedHashSet("name", "team"),
newLinkedHashSet("nickname"),
EMPTY_STRING_SET);
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following declared fields:%n" +
" <[\"name\", \"team\"]>%n" +
"but could not find the following fields:%n" +
" <[\"nickname\"]>"));
}
@Test
public void should_not_display_fields_not_found_when_there_are_none_for_declared_fields() {
ErrorMessageFactory factory = shouldOnlyHaveDeclaredFields(Player.class,
newLinkedHashSet("name", "team"),
EMPTY_STRING_SET,
newLinkedHashSet("address"));
String message = factory.create(new TextDescription("Test"), new StandardRepresentation());
assertThat(message).isEqualTo(String.format("[Test] %n" +
"Expecting%n" +
" <org.assertj.core.test.Player>%n" +
"to only have the following declared fields:%n" +
" <[\"name\", \"team\"]>%n" +
"but the following fields were unexpected:%n" +
" <[\"address\"]>"));
}
}
| xasx/assertj-core | src/test/java/org/assertj/core/error/ShouldOnlyHaveFields_create_Test.java | Java | apache-2.0 | 8,163 |
/*-
* See the file LICENSE for redistribution information.
*
* Copyright (c) 2001, 2010 Oracle and/or its affiliates. All rights reserved.
*
* $Id$
*/
#include "db_config.h"
#include "db_int.h"
/*
* __os_id --
* Return the current process ID.
*
* PUBLIC: void __os_id __P((DB_ENV *, pid_t *, db_threadid_t*));
*/
void
__os_id(dbenv, pidp, tidp)
DB_ENV *dbenv;
pid_t *pidp;
db_threadid_t *tidp;
{
/*
* We can't depend on dbenv not being NULL, this routine is called
* from places where there's no DB_ENV handle.
*
* We cache the pid in the ENV handle, getting the process ID is a
* fairly slow call on lots of systems.
*/
if (pidp != NULL) {
if (dbenv == NULL) {
#if defined(HAVE_VXWORKS)
*pidp = taskIdSelf();
#else
*pidp = getpid();
#endif
} else
*pidp = dbenv->env->pid_cache;
}
if (tidp != NULL) {
#if defined(DB_WIN32)
*tidp = GetCurrentThreadId();
#elif defined(HAVE_MUTEX_UI_THREADS)
*tidp = thr_self();
#elif defined(HAVE_PTHREAD_SELF)
*tidp = pthread_self();
#else
/*
* Default to just getpid.
*/
*tidp = 0;
#endif
}
}
| racker/omnibus | source/db-5.0.26.NC/os/os_pid.c | C | apache-2.0 | 1,092 |
@extends('dashboard.main')
@section('styles')
<meta name="lang" content="{{ \Session::get('locale') }}">
<link rel="stylesheet" href="{{ URL::to('libs/vendor/iCheck/skins/square/blue.css') }}">
<link rel="stylesheet" href="{{ URL::to('libs/vendor/malihu-custom-scrollbar-plugin/jquery.mCustomScrollbar.css') }}">
<link rel="stylesheet" href="{{ URL::to('libs/vendor/datetimepicker/jquery.datetimepicker.css') }}">
<link rel="stylesheet" href="{{ URL::to('libs/vendor/bootstrap-select/dist/css/bootstrap-select.css') }}">
@endsection
@section('scripts')
<script src="{{ URL::to('libs/vendor/moment/moment.js') }}"></script>
<script src="{{ URL::to('libs/vendor/moment/locale/en-gb.js') }}"></script>
<script src="{{ URL::to('libs/dashboard/moment-ru.js') }}"></script>
<script src="{{ URL::to('libs/vendor/underscore/underscore.js') }}"></script>
<script src="{{ URL::to('libs/dashboard/notify.min.js') }}"></script>
<script src="{{ URL::to('libs/vendor/bootstrap-select/dist/js/bootstrap-select.js') }}"></script>
<script src="{{ URL::to('libs/vendor/malihu-custom-scrollbar-plugin/jquery.mCustomScrollbar.js') }}"></script>
<script src="{{ URL::to('libs/vendor/datetimepicker/build/jquery.datetimepicker.full.js') }}"></script>
<script src="{{ URL::asset('libs/vendor/clndr/src/clndr.js') }}"></script>
<script src="{{ URL::asset('libs/vendor/iCheck/icheck.js') }}"></script>
<script src="{{ URL::to('libs/dashboard/schedule.js') }}"></script>
@endsection
@section('navigation')
@include('dashboard.components.nav')
@endsection
@section('body-class', 'page-schedule')
@section('content')
<div id="full-clndr" class="clearfix">
@include('dashboard.components.clndr')
</div>
<div id="fountainTextG"><div id="fountainTextG_1" class="fountainTextG">L</div><div id="fountainTextG_2" class="fountainTextG">o</div><div id="fountainTextG_3" class="fountainTextG">a</div><div id="fountainTextG_4" class="fountainTextG">d</div><div id="fountainTextG_5" class="fountainTextG">i</div><div id="fountainTextG_6" class="fountainTextG">n</div><div id="fountainTextG_7" class="fountainTextG">g</div></div>
<div class="modal fade" tabindex="-1" role="dialog" id="modal">
<div class="modal-dialog">
<div class="modal-content">
<div class="modal-header">
<button type="button" class="close" data-dismiss="modal" aria-label="Close"><span aria-hidden="true">×</span></button>
<h4 class="modal-title">@lang('dashboard.components.scheduler.modal.modalTitle')</h4>
</div>
<div class="modal-body">
<form>
<div class="form-group">
<label for="Title">@lang('dashboard.components.scheduler.modal.title')</label>
<input type="text" class="form-control" id="Title" placeholder="@lang('dashboard.components.scheduler.modal.title')" title="@lang('dashboard.components.scheduler.modal.titleTip')">
</div>
<div class="form-group">
<label for="Description">@lang('dashboard.components.scheduler.modal.desc')</label>
<input type="text" class="form-control" id="Description" placeholder="@lang('dashboard.components.scheduler.modal.desc')">
</div>
<div class="form-group">
<label for="playlist">@lang('dashboard.components.scheduler.modal.playlist')</label>
<select class="selectpicker form-control" id="playlist" name="playlist">
<option value="0">@lang('dashboard.components.scheduler.modal.emptyPlaylist')</option>
</select>
</div>
<div class="form-group">
<input type="checkbox" id="repeat-day">
<label for="repeat-day">@lang('dashboard.components.scheduler.modal.repeat.everyDay')</label>
</div>
<div class="form-group">
<input type="checkbox" id="repeat-month">
<label for="repeat-month">@lang('dashboard.components.scheduler.modal.repeat.everyWeek')</label>
<div class="form-group" id="repeat-on">
<input type="checkbox" id="repeat-on-mon">
<label for="repeat-on-mon">@lang('dashboard.components.scheduler.modal.repeat.weeks.mon')</label>
<input type="checkbox" id="repeat-on-tue">
<label for="repeat-on-tue">@lang('dashboard.components.scheduler.modal.repeat.weeks.tue')</label>
<input type="checkbox" id="repeat-on-wed">
<label for="repeat-on-wed">@lang('dashboard.components.scheduler.modal.repeat.weeks.wed')</label>
<input type="checkbox" id="repeat-on-thu">
<label for="repeat-on-thu">@lang('dashboard.components.scheduler.modal.repeat.weeks.thu')</label>
<input type="checkbox" id="repeat-on-fri">
<label for="repeat-on-fri">@lang('dashboard.components.scheduler.modal.repeat.weeks.fri')</label>
<input type="checkbox" id="repeat-on-sat">
<label for="repeat-on-sat">@lang('dashboard.components.scheduler.modal.repeat.weeks.sat')</label>
<input type="checkbox" id="repeat-on-sun">
<label for="repeat-on-sun">@lang('dashboard.components.scheduler.modal.repeat.weeks.sun')</label>
</div>
</div>
<div class="form-group">
<label for="datetimepicker">@lang('dashboard.components.scheduler.modal.datetime')</label>
<input type="text" class="form-control" id="datetimepicker" placeholder="Datetime" autocomplete="off">
</div>
</form>
</div>
<div class="modal-footer">
<button type="button" class="btn btn-danger pull-left delete-event">@lang('dashboard.core.buttons.delete')</button>
<button type="button" class="btn btn-default" data-dismiss="modal">@lang('dashboard.core.buttons.close')</button>
<button type="button" class="btn btn-primary save-changes">@lang('dashboard.core.buttons.save')</button>
</div>
</div><!-- /.modal-content -->
</div><!-- /.modal-dialog -->
</div><!-- /.modal -->
@endsection | artemsky/EvartFM | resources/views/dashboard/pages/schedule/index.blade.php | PHP | apache-2.0 | 6,950 |
package rule
import (
"fmt"
"github.com/mgechev/revive/lint"
)
// ImportsBlacklistRule lints given else constructs.
type ImportsBlacklistRule struct {
blacklist map[string]bool
}
// Apply applies the rule to given file.
func (r *ImportsBlacklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
var failures []lint.Failure
if file.IsTest() {
return failures // skip, test file
}
if r.blacklist == nil {
r.blacklist = make(map[string]bool, len(arguments))
for _, arg := range arguments {
argStr, ok := arg.(string)
if !ok {
panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting a string, got %T", arg))
}
// we add quotes if not present, because when parsed, the value of the AST node, will be quoted
if len(argStr) > 2 && argStr[0] != '"' && argStr[len(argStr)-1] != '"' {
argStr = fmt.Sprintf(`%q`, argStr)
}
r.blacklist[argStr] = true
}
}
for _, is := range file.AST.Imports {
path := is.Path
if path != nil && r.blacklist[path.Value] {
failures = append(failures, lint.Failure{
Confidence: 1,
Failure: "should not use the following blacklisted import: " + path.Value,
Node: is,
Category: "imports",
})
}
}
return failures
}
// Name returns the rule name.
func (r *ImportsBlacklistRule) Name() string {
return "imports-blacklist"
}
| nalind/buildah-1 | tests/tools/vendor/github.com/mgechev/revive/rule/imports-blacklist.go | GO | apache-2.0 | 1,377 |
#!/bin/bash
# Copyright 2014 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## Contains configuration values for interacting with the Vagrant cluster
# Number of minions in the cluster
NUM_MINIONS=${NUM_MINIONS-"1"}
export NUM_MINIONS
# The IP of the master
export MASTER_IP="10.245.1.2"
export KUBE_MASTER_IP="10.245.1.2"
export INSTANCE_PREFIX="kubernetes"
export MASTER_NAME="${INSTANCE_PREFIX}-master"
# Map out the IPs, names and container subnets of each minion
export MINION_IP_BASE="10.245.1."
MINION_CONTAINER_SUBNET_BASE="10.246"
MASTER_CONTAINER_NETMASK="255.255.255.0"
MASTER_CONTAINER_ADDR="${MINION_CONTAINER_SUBNET_BASE}.0.1"
MASTER_CONTAINER_SUBNET="${MINION_CONTAINER_SUBNET_BASE}.0.1/24"
CONTAINER_SUBNET="${MINION_CONTAINER_SUBNET_BASE}.0.0/16"
for ((i=0; i < NUM_MINIONS; i++)) do
MINION_IPS[$i]="${MINION_IP_BASE}$((i+3))"
MINION_NAMES[$i]="${INSTANCE_PREFIX}-minion-$((i+1))"
MINION_CONTAINER_SUBNETS[$i]="${MINION_CONTAINER_SUBNET_BASE}.$((i+1)).1/24"
MINION_CONTAINER_ADDRS[$i]="${MINION_CONTAINER_SUBNET_BASE}.$((i+1)).1"
MINION_CONTAINER_NETMASKS[$i]="255.255.255.0"
VAGRANT_MINION_NAMES[$i]="minion-$((i+1))"
done
PORTAL_NET=10.247.0.0/16
# Since this isn't exposed on the network, default to a simple user/passwd
MASTER_USER=vagrant
MASTER_PASSWD=vagrant
# Admission Controllers to invoke prior to persisting objects in cluster
ADMISSION_CONTROL=NamespaceLifecycle,NamespaceAutoProvision,LimitRanger,SecurityContextDeny,ResourceQuota
# Optional: Install node monitoring.
ENABLE_NODE_MONITORING=true
# Optional: Enable node logging.
ENABLE_NODE_LOGGING=false
LOGGING_DESTINATION=elasticsearch
# Optional: When set to true, Elasticsearch and Kibana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_LOGGING=false
ELASTICSEARCH_LOGGING_REPLICAS=1
# Optional: When set to true, heapster, Influxdb and Grafana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_MONITORING="${KUBE_ENABLE_CLUSTER_MONITORING:-true}"
# Extra options to set on the Docker command line. This is useful for setting
# --insecure-registry for local registries.
DOCKER_OPTS=""
# Optional: Install cluster DNS.
ENABLE_CLUSTER_DNS=true
DNS_SERVER_IP="10.247.0.10"
DNS_DOMAIN="kubernetes.local"
DNS_REPLICAS=1
# Optional: Enable setting flags for kube-apiserver to turn on behavior in active-dev
#RUNTIME_CONFIG=""
RUNTIME_CONFIG="api/v1beta3"
| bprashanth/kubernetes | cluster/vagrant/config-default.sh | Shell | apache-2.0 | 2,933 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __TESTS_ENVIRONMENT_HPP__
#define __TESTS_ENVIRONMENT_HPP__
#include <gtest/gtest.h>
namespace mesos {
namespace internal {
namespace tests {
// Used to set up our particular test environment.
class Environment : public ::testing::Environment {
public:
virtual void SetUp();
virtual void TearDown();
};
} // namespace tests {
} // namespace internal {
} // namespace mesos {
#endif // __TESTS_ENVIRONMENT_HPP__
| enachb/mesos | src/tests/environment.hpp | C++ | apache-2.0 | 1,236 |
<?php
/**
* 模板操作
*
* [WeEngine System] Copyright (c) 2013 WE7.CC
*/
defined('IN_IA') or exit('Access Denied');
/**
* 导入全局变量,并直接显示模板页内容。
* @var int
*/
define('TEMPLATE_DISPLAY', 0);
/**
* 导入全局变量,并返回模板页内容的字符串
* @var int
*/
define('TEMPLATE_FETCH', 1);
/**
* 返回模板编译文件的包含路径
* @var int
*/
define('TEMPLATE_INCLUDEPATH', 2);
/**
* 缓存输出信息,@todo 未完成
* @var int
*/
define('TEMPLATE_CACHE', 3);
function template($filename, $flag = TEMPLATE_DISPLAY) {
global $_W;
$source = "{$_W['template']['source']}/{$_W['template']['current']}/{$filename}.html";
// exit($source);
if(!is_file($source)) {
$source = "{$_W['template']['source']}/default/{$filename}.html";
}
if(!is_file($source)) {
exit("Error: template source '{$filename}' is not exist!");
}
$compile = "{$_W['template']['compile']}/{$_W['template']['current']}/{$filename}.tpl.php";
if (DEVELOPMENT || !is_file($compile) || filemtime($source) > filemtime($compile)) {
template_compile($source, $compile);
}
switch ($flag) {
case TEMPLATE_DISPLAY:
default:
extract($GLOBALS, EXTR_SKIP);
include $compile;
break;
case TEMPLATE_FETCH:
extract($GLOBALS, EXTR_SKIP);
ob_start();
ob_clean();
include $compile;
$contents = ob_get_contents();
ob_clean();
return $contents;
break;
case TEMPLATE_INCLUDEPATH:
return $compile;
break;
case TEMPLATE_CACHE:
exit('暂未支持');
break;
}
}
function template_compile($from, $to) {
$path = dirname($to);
if (!is_dir($path))
mkdirs($path);
$content = template_parse(file_get_contents($from));
file_put_contents($to, $content);
}
function template_parse($str) {
$str = preg_replace('/<!--{(.+?)}-->/s', '{$1}', $str);
$str = preg_replace('/{template\s+(.+?)}/', '<?php include template($1, TEMPLATE_INCLUDEPATH);?>', $str);
$str = preg_replace('/{php\s+(.+?)}/', '<?php $1?>', $str);
$str = preg_replace('/{if\s+(.+?)}/', '<?php if($1) { ?>', $str);
$str = preg_replace('/{else}/', '<?php } else { ?>', $str);
$str = preg_replace('/{else ?if\s+(.+?)}/', '<?php } else if($1) { ?>', $str);
$str = preg_replace('/{\/if}/', '<?php } ?>', $str);
$str = preg_replace('/{loop\s+(\S+)\s+(\S+)}/', '<?php if(is_array($1)) { foreach($1 as $2) { ?>', $str);
$str = preg_replace('/{loop\s+(\S+)\s+(\S+)\s+(\S+)}/', '<?php if(is_array($1)) { foreach($1 as $2 => $3) { ?>', $str);
$str = preg_replace('/{\/loop}/', '<?php } } ?>', $str);
$str = preg_replace('/{(\$[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*)}/', '<?php echo $1;?>', $str);
$str = preg_replace('/{(\$[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff\[\]\'\"\$]*)}/', '<?php echo $1;?>', $str);
$str = preg_replace('/<\?php([^\?]+)\?>/es', "template_addquote('<?php$1?>')", $str);
$str = preg_replace('/{([A-Z_\x7f-\xff][A-Z0-9_\x7f-\xff]*)}/s', '<?php echo $1;?>', $str);
$str = str_replace('{##', '{', $str);
$str = str_replace('##}', '}', $str);
$str = "<?php defined('IN_IA') or exit('Access Denied');?>" . $str;
return $str;
}
function template_addquote($code) {
$code = preg_replace('/\[([a-zA-Z0-9_\-\.\x7f-\xff]+)\]/s', "['$1']", $code);
return str_replace('\\\"', '\"', $code);
}
| royalwang/saivi | tpl/User/default/common/wxq/source/function/template.func.php | PHP | apache-2.0 | 3,255 |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.diff.impl.settings;
import com.intellij.icons.AllIcons;
import com.intellij.idea.ActionsBundle;
import com.intellij.openapi.actionSystem.ActionGroup;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.ToggleAction;
import com.intellij.openapi.editor.Editor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
/**
* The "gear" action allowing to configure merge tool visual preferences, such as displaying whitespaces, line numbers and soft wraps.
*
* @see DiffMergeSettings
*/
public class DiffMergeSettingsAction extends ActionGroup {
@NotNull private final Collection<Editor> myEditors;
@NotNull private final DiffMergeSettings mySettings;
public DiffMergeSettingsAction(@NotNull Collection<Editor> editors, @NotNull DiffMergeSettings settings) {
super("Settings", null, AllIcons.General.GearPlain);
setPopup(true);
myEditors = editors;
mySettings = settings;
}
@NotNull
@Override
public AnAction[] getChildren(@Nullable AnActionEvent e) {
return new AnAction[] {
new DiffMergeToggleAction("EditorToggleShowWhitespaces", DiffMergeEditorSetting.WHITESPACES, myEditors, mySettings),
new DiffMergeToggleAction("EditorToggleShowLineNumbers", DiffMergeEditorSetting.LINE_NUMBERS, myEditors, mySettings),
new DiffMergeToggleAction("EditorToggleShowIndentLines", DiffMergeEditorSetting.INDENT_LINES, myEditors, mySettings),
new DiffMergeToggleAction("EditorToggleUseSoftWraps", DiffMergeEditorSetting.SOFT_WRAPS, myEditors, mySettings)
};
}
private static class DiffMergeToggleAction extends ToggleAction {
@NotNull private final DiffMergeEditorSetting mySetting;
@NotNull private final Collection<Editor> myEditors;
@NotNull private final DiffMergeSettings mySettings;
private DiffMergeToggleAction(@NotNull String actionId, @NotNull DiffMergeEditorSetting setting, @NotNull Collection<Editor> editors,
@NotNull DiffMergeSettings settings) {
super(ActionsBundle.actionText(actionId), ActionsBundle.actionDescription(actionId), null);
mySetting = setting;
myEditors = editors;
mySettings = settings;
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return getPreference(mySetting);
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
setPreference(mySetting, state);
for (Editor editor : myEditors) {
mySetting.apply(editor, state);
}
}
private void setPreference(DiffMergeEditorSetting preference, boolean state) {
mySettings.setPreference(preference, state);
}
private boolean getPreference(DiffMergeEditorSetting preference) {
return mySettings.getPreference(preference);
}
}
}
| goodwinnk/intellij-community | platform/platform-impl/src/com/intellij/openapi/diff/impl/settings/DiffMergeSettingsAction.java | Java | apache-2.0 | 3,098 |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2019 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.servlet.test.response.cookies;
import java.util.Arrays;
import java.util.Comparator;
import javax.servlet.ServletException;
import io.undertow.servlet.api.ServletInfo;
import io.undertow.servlet.test.util.DeploymentUtils;
import io.undertow.testutils.DefaultServer;
import io.undertow.testutils.HttpClientUtils;
import io.undertow.testutils.TestHttpClient;
import io.undertow.util.StatusCodes;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Test for response.addCookie
*
* @author Flavia Rainone
*/
@RunWith(DefaultServer.class)
public class ResponseCookiesTestCase {
@BeforeClass
public static void setup() throws ServletException {
DeploymentUtils.setupServlet(
new ServletInfo("add-cookies", AddCookiesServlet.class)
.addMapping("/add-cookies"),
new ServletInfo("duplicate-cookies", DuplicateCookiesServlet.class)
.addMapping("/duplicate-cookies"),
new ServletInfo("overwrite-cookies", OverwriteCookiesServlet.class)
.addMapping("/overwrite-cookies"),
new ServletInfo("jsessionid-cookies", JSessionIDCookiesServlet.class)
.addMapping("/jsessionid-cookies"));
}
@Test
public void addCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/add-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(2, setCookieHeaders.length);
assertEquals("test1=test1; path=/test", setCookieHeaders[0].getValue());
assertEquals("test2=test2", setCookieHeaders[1].getValue());
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void duplicateCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/duplicate-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(7, setCookieHeaders.length);
Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString));
assertEquals("test1=test1; path=/test1_1", setCookieHeaders[0].getValue());
assertEquals("test1=test1; path=/test1_2", setCookieHeaders[1].getValue());
assertEquals("test2=test2; path=/test2", setCookieHeaders[2].getValue());
assertEquals("test2=test2; path=/test2; domain=www.domain2.com", setCookieHeaders[3].getValue());
assertEquals("test3=test3", setCookieHeaders[4].getValue());
assertEquals("test3=test3; domain=www.domain3-1.com", setCookieHeaders[5].getValue());
assertEquals("test3=test3; domain=www.domain3-2.com", setCookieHeaders[6].getValue());
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void overwriteCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/overwrite-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(5, setCookieHeaders.length);
Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString));
assertTrue("Header " + setCookieHeaders[0] + "didn't match expected regex",
setCookieHeaders[0].getValue().matches("JSESSIONID=.*; path=/servletContext"));
assertEquals("test=test10; domain=www.domain.com", setCookieHeaders[1].getValue());
assertEquals("test=test2; path=/test", setCookieHeaders[2].getValue());
assertEquals("test=test5", setCookieHeaders[3].getValue());
assertEquals("test=test8; path=/test; domain=www.domain.com", setCookieHeaders[4].getValue());
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void jsessionIdCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/jsessionid-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(3, setCookieHeaders.length);
assertTrue("Header " + setCookieHeaders[0] + "didn't start with expected prefix",
setCookieHeaders[0].getValue().startsWith("JSESSIONID=_bug_fix; path=/path3; Max-Age=500; Expires="));
assertTrue("Header " + setCookieHeaders[1] + "didn't start with expected prefix",
setCookieHeaders[1].getValue().startsWith("JSESSIONID=_bug_fix; path=/path4; Max-Age=1000; Expires="));
assertTrue("Header " + setCookieHeaders[2] + "didn't match expected regex",
setCookieHeaders[2].getValue().matches("JSESSIONID=.*; path=/servletContext"));
} finally {
client.getConnectionManager().shutdown();
}
}
}
| stuartwdouglas/undertow | servlet/src/test/java/io/undertow/servlet/test/response/cookies/ResponseCookiesTestCase.java | Java | apache-2.0 | 7,421 |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.grid.node.local;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.junit.Test;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.ImmutableCapabilities;
import org.openqa.selenium.events.local.GuavaEventBus;
import org.openqa.selenium.grid.data.CreateSessionRequest;
import org.openqa.selenium.grid.data.CreateSessionResponse;
import org.openqa.selenium.grid.data.Session;
import org.openqa.selenium.grid.node.Node;
import org.openqa.selenium.grid.testing.TestSessionFactory;
import org.openqa.selenium.json.Json;
import org.openqa.selenium.remote.ErrorCodes;
import org.openqa.selenium.remote.http.HttpRequest;
import org.openqa.selenium.remote.tracing.DefaultTestTracer;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Map;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.openqa.selenium.json.Json.MAP_TYPE;
import static org.openqa.selenium.remote.Dialect.OSS;
import static org.openqa.selenium.remote.Dialect.W3C;
import static org.openqa.selenium.remote.http.Contents.utf8String;
import static org.openqa.selenium.remote.http.HttpMethod.POST;
public class CreateSessionTest {
private final Json json = new Json();
private final Capabilities stereotype = new ImmutableCapabilities("cheese", "brie");
@Test
public void shouldAcceptAW3CPayload() throws URISyntaxException {
String payload = json.toJson(ImmutableMap.of(
"capabilities", ImmutableMap.of(
"alwaysMatch", ImmutableMap.of("cheese", "brie"))));
HttpRequest request = new HttpRequest(POST, "/session");
request.setContent(utf8String(payload));
URI uri = new URI("http://example.com");
Node node = LocalNode.builder(
DefaultTestTracer.createTracer(),
new GuavaEventBus(),
uri,
uri,
null)
.add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps)))
.build();
CreateSessionResponse sessionResponse = node.newSession(
new CreateSessionRequest(
ImmutableSet.of(W3C),
stereotype,
ImmutableMap.of()))
.orElseThrow(() -> new AssertionError("Unable to create session"));
Map<String, Object> all = json.toType(
new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8),
MAP_TYPE);
// Ensure that there's no status field (as this is used by the protocol handshake to determine
// whether the session is using the JWP or the W3C dialect.
assertThat(all.containsKey("status")).isFalse();
// Now check the fields required by the spec
Map<?, ?> value = (Map<?, ?>) all.get("value");
assertThat(value.get("sessionId")).isInstanceOf(String.class);
assertThat(value.get("capabilities")).isInstanceOf(Map.class);
}
@Test
public void shouldOnlyAcceptAJWPPayloadIfConfiguredTo() {
// TODO: implement shouldOnlyAcceptAJWPPayloadIfConfiguredTo test
}
@Test
public void ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured() {
// TODO: implement ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured test
}
@Test
public void ifOnlyJWPPayloadSentResponseShouldBeJWPOnlyIfJWPConfigured()
throws URISyntaxException {
String payload = json.toJson(ImmutableMap.of(
"desiredCapabilities", ImmutableMap.of("cheese", "brie")));
HttpRequest request = new HttpRequest(POST, "/session");
request.setContent(utf8String(payload));
URI uri = new URI("http://example.com");
Node node = LocalNode.builder(
DefaultTestTracer.createTracer(),
new GuavaEventBus(),
uri,
uri,
null)
.add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps)))
.build();
CreateSessionResponse sessionResponse = node.newSession(
new CreateSessionRequest(
ImmutableSet.of(OSS),
stereotype,
ImmutableMap.of()))
.orElseThrow(() -> new AssertionError("Unable to create session"));
Map<String, Object> all = json.toType(
new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8),
MAP_TYPE);
// The status field is used by local ends to determine whether or not the session is a JWP one.
assertThat(all.get("status")).matches(obj -> ((Number) obj).intValue() == ErrorCodes.SUCCESS);
// The session id is a top level field
assertThat(all.get("sessionId")).isInstanceOf(String.class);
// And the value should contain the capabilities.
assertThat(all.get("value")).isInstanceOf(Map.class);
}
@Test
public void shouldPreferUsingTheW3CProtocol() throws URISyntaxException {
String payload = json.toJson(ImmutableMap.of(
"desiredCapabilities", ImmutableMap.of(
"cheese", "brie"),
"capabilities", ImmutableMap.of(
"alwaysMatch", ImmutableMap.of("cheese", "brie"))));
HttpRequest request = new HttpRequest(POST, "/session");
request.setContent(utf8String(payload));
URI uri = new URI("http://example.com");
Node node = LocalNode.builder(
DefaultTestTracer.createTracer(),
new GuavaEventBus(),
uri,
uri,
null)
.add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps)))
.build();
CreateSessionResponse sessionResponse = node.newSession(
new CreateSessionRequest(
ImmutableSet.of(W3C),
stereotype,
ImmutableMap.of()))
.orElseThrow(() -> new AssertionError("Unable to create session"));
Map<String, Object> all = json.toType(
new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8),
MAP_TYPE);
// Ensure that there's no status field (as this is used by the protocol handshake to determine
// whether the session is using the JWP or the W3C dialect.
assertThat(all.containsKey("status")).isFalse();
// Now check the fields required by the spec
Map<?, ?> value = (Map<?, ?>) all.get("value");
assertThat(value.get("sessionId")).isInstanceOf(String.class);
assertThat(value.get("capabilities")).isInstanceOf(Map.class);
}
@Test
public void sessionDataShouldBeCorrectRegardlessOfPayloadProtocol() {
// TODO: implement sessionDataShouldBeCorrectRegardlessOfPayloadProtocol test
}
@Test
public void shouldSupportProtocolConversion() {
// TODO: implement shouldSupportProtocolConversion test
}
}
| asolntsev/selenium | java/server/test/org/openqa/selenium/grid/node/local/CreateSessionTest.java | Java | apache-2.0 | 7,428 |
package frc.team5333.lib;
import java.util.HashMap;
/**
* A static class that contains all kinds of Launch data for the robot,
* such as network ports, current state and more
*
* @author Jaci
*/
public class RobotData {
/**
* A blackboard containing objects that are common throughout the
* program, along with their String Identifier
*/
public static HashMap<String, Object> blackboard = new HashMap<String, Object>();
}
| FRC-Team5333/2015-RecycleRush | FRC2015/src/main/java/frc/team5333/lib/RobotData.java | Java | apache-2.0 | 455 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.openshift.build_configs;
import java.util.Map;
import io.fabric8.kubernetes.client.Watch;
import io.fabric8.kubernetes.client.dsl.FilterWatchListMultiDeletable;
import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation;
import io.fabric8.openshift.api.model.Build;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.BuildConfigList;
import io.fabric8.openshift.api.model.DoneableBuildConfig;
import io.fabric8.openshift.client.OpenShiftClient;
import io.fabric8.openshift.client.dsl.BuildConfigResource;
import org.apache.camel.Exchange;
import org.apache.camel.component.kubernetes.AbstractKubernetesEndpoint;
import org.apache.camel.component.kubernetes.KubernetesConstants;
import org.apache.camel.component.kubernetes.KubernetesOperations;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.support.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OpenshiftBuildConfigsProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(OpenshiftBuildConfigsProducer.class);
public OpenshiftBuildConfigsProducer(AbstractKubernetesEndpoint endpoint) {
super(endpoint);
}
@Override
public AbstractKubernetesEndpoint getEndpoint() {
return (AbstractKubernetesEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) throws Exception {
String operation;
if (ObjectHelper.isEmpty(getEndpoint().getKubernetesConfiguration().getOperation())) {
operation = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_OPERATION, String.class);
} else {
operation = getEndpoint().getKubernetesConfiguration().getOperation();
}
switch (operation) {
case KubernetesOperations.LIST_BUILD_CONFIGS:
doList(exchange, operation);
break;
case KubernetesOperations.LIST_BUILD_CONFIGS_BY_LABELS_OPERATION:
doListBuildConfigsByLabels(exchange, operation);
break;
case KubernetesOperations.GET_BUILD_CONFIG_OPERATION:
doGetBuildConfig(exchange, operation);
break;
default:
throw new IllegalArgumentException("Unsupported operation " + operation);
}
}
protected void doList(Exchange exchange, String operation) throws Exception {
BuildConfigList buildConfigsList
= getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace().list();
exchange.getOut().setBody(buildConfigsList.getItems());
}
protected void doListBuildConfigsByLabels(Exchange exchange, String operation) throws Exception {
BuildConfigList buildConfigsList = null;
Map<String, String> labels = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIGS_LABELS, Map.class);
String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (!ObjectHelper.isEmpty(namespaceName)) {
NonNamespaceOperation<BuildConfig, BuildConfigList, DoneableBuildConfig, BuildConfigResource<BuildConfig, DoneableBuildConfig, Void, Build>> buildConfigs;
buildConfigs = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs()
.inNamespace(namespaceName);
for (Map.Entry<String, String> entry : labels.entrySet()) {
buildConfigs.withLabel(entry.getKey(), entry.getValue());
}
buildConfigsList = buildConfigs.list();
} else {
FilterWatchListMultiDeletable<BuildConfig, BuildConfigList, Boolean, Watch> buildConfigs
= getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace();
for (Map.Entry<String, String> entry : labels.entrySet()) {
buildConfigs.withLabel(entry.getKey(), entry.getValue());
}
buildConfigsList = buildConfigs.list();
}
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(buildConfigsList.getItems());
}
protected void doGetBuildConfig(Exchange exchange, String operation) throws Exception {
BuildConfig buildConfig = null;
String buildConfigName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIG_NAME, String.class);
String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (ObjectHelper.isEmpty(buildConfigName)) {
LOG.error("Get a specific Build Config require specify a Build Config name");
throw new IllegalArgumentException("Get a specific Build Config require specify a Build Config name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Get a specific Build Config require specify a namespace name");
throw new IllegalArgumentException("Get a specific Build Config require specify a namespace name");
}
buildConfig = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inNamespace(namespaceName)
.withName(buildConfigName).get();
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(buildConfig);
}
}
| nicolaferraro/camel | components/camel-kubernetes/src/main/java/org/apache/camel/component/openshift/build_configs/OpenshiftBuildConfigsProducer.java | Java | apache-2.0 | 6,392 |
function ChangeTo(to) {
if (to == "text") {
$('#admincommentlinks').ghide();
$('#admincomment').gshow();
resize('admincomment');
var buttons = document.getElementsByName('admincommentbutton');
for (var i = 0; i < buttons.length; i++) {
buttons[i].setAttribute('onclick',"ChangeTo('links'); return false;");
}
} else if (to == "links") {
ajax.post("ajax.php?action=preview","form", function(response) {
$('#admincommentlinks').raw().innerHTML = response;
$('#admincomment').ghide();
$('#admincommentlinks').gshow();
var buttons = document.getElementsByName('admincommentbutton');
for (var i = 0; i < buttons.length; i++) {
buttons[i].setAttribute('onclick',"ChangeTo('text'); return false;");
}
})
}
}
function UncheckIfDisabled(checkbox) {
if (checkbox.disabled) {
checkbox.checked = false;
}
}
function AlterParanoia() {
// Required Ratio is almost deducible from downloaded, the count of seeding and the count of snatched
// we will "warn" the user by automatically checking the required ratio box when they are
// revealing that information elsewhere
if (!$('input[name=p_ratio]').raw()) {
return;
}
var showDownload = $('input[name=p_downloaded]').raw().checked || ($('input[name=p_uploaded]').raw().checked && $('input[name=p_ratio]').raw().checked);
if (($('input[name=p_seeding_c]').raw().checked) && ($('input[name=p_snatched_c]').raw().checked) && showDownload) {
$('input[type=checkbox][name=p_requiredratio]').raw().checked = true;
$('input[type=checkbox][name=p_requiredratio]').raw().disabled = true;
} else {
$('input[type=checkbox][name=p_requiredratio]').raw().disabled = false;
}
$('input[name=p_torrentcomments_l]').raw().disabled = !$('input[name=p_torrentcomments_c]').raw().checked;
$('input[name=p_collagecontribs_l]').raw().disabled = !$('input[name=p_collagecontribs_c]').raw().checked;
$('input[name=p_requestsfilled_list]').raw().disabled = !($('input[name=p_requestsfilled_count]').raw().checked && $('input[name=p_requestsfilled_bounty]').raw().checked);
$('input[name=p_requestsvoted_list]').raw().disabled = !($('input[name=p_requestsvoted_count]').raw().checked && $('input[name=p_requestsvoted_bounty]').raw().checked);
$('input[name=p_uploads_l]').raw().disabled = !$('input[name=p_uploads_c]').raw().checked;
$('input[name=p_uniquegroups_l]').raw().disabled = !$('input[name=p_uniquegroups_c]').raw().checked;
$('input[name=p_perfectflacs_l]').raw().disabled = !$('input[name=p_perfectflacs_c]').raw().checked;
$('input[name=p_seeding_l]').raw().disabled = !$('input[name=p_seeding_c]').raw().checked;
$('input[name=p_leeching_l]').raw().disabled = !$('input[name=p_leeching_c]').raw().checked;
$('input[name=p_snatched_l]').raw().disabled = !$('input[name=p_snatched_c]').raw().checked;
UncheckIfDisabled($('input[name=p_torrentcomments_l]').raw());
UncheckIfDisabled($('input[name=p_collagecontribs_l]').raw());
UncheckIfDisabled($('input[name=p_requestsfilled_list]').raw());
UncheckIfDisabled($('input[name=p_requestsvoted_list]').raw());
UncheckIfDisabled($('input[name=p_uploads_l]').raw());
UncheckIfDisabled($('input[name=p_uniquegroups_l]').raw());
UncheckIfDisabled($('input[name=p_perfectflacs_l]').raw());
UncheckIfDisabled($('input[name=p_seeding_l]').raw());
UncheckIfDisabled($('input[name=p_leeching_l]').raw());
UncheckIfDisabled($('input[name=p_snatched_l]').raw());
// unique groups, "Perfect" FLACs and artists added are deducible from the list of uploads
if ($('input[name=p_uploads_l]').raw().checked) {
$('input[name=p_uniquegroups_c]').raw().checked = true;
$('input[name=p_uniquegroups_l]').raw().checked = true;
$('input[name=p_uniquegroups_c]').raw().disabled = true;
$('input[name=p_uniquegroups_l]').raw().disabled = true;
$('input[name=p_perfectflacs_c]').raw().checked = true;
$('input[name=p_perfectflacs_l]').raw().checked = true;
$('input[name=p_perfectflacs_c]').raw().disabled = true;
$('input[name=p_perfectflacs_l]').raw().disabled = true;
$('input[type=checkbox][name=p_artistsadded]').raw().checked = true;
$('input[type=checkbox][name=p_artistsadded]').raw().disabled = true;
} else {
$('input[name=p_uniquegroups_c]').raw().disabled = false;
$('input[name=p_uniquegroups_l]').raw().checked = false;
$('input[name=p_uniquegroups_l]').raw().disabled = true;
$('input[name=p_perfectflacs_c]').raw().disabled = false;
$('input[type=checkbox][name=p_artistsadded]').raw().disabled = false;
}
if ($('input[name=p_collagecontribs_l]').raw().checked) {
$('input[name=p_collages_c]').raw().disabled = true;
$('input[name=p_collages_l]').raw().disabled = true;
$('input[name=p_collages_c]').raw().checked = true;
$('input[name=p_collages_l]').raw().checked = true;
} else {
$('input[name=p_collages_c]').raw().disabled = false;
$('input[name=p_collages_l]').raw().disabled = !$('input[name=p_collages_c]').raw().checked;
UncheckIfDisabled($('input[name=p_collages_l]').raw());
}
}
function ParanoiaReset(checkbox, drops) {
var selects = $('select');
for (var i = 0; i < selects.results(); i++) {
if (selects.raw(i).name.match(/^p_/)) {
if (drops == 0) {
selects.raw(i).selectedIndex = 0;
} else if (drops == 1) {
selects.raw(i).selectedIndex = selects.raw(i).options.length - 2;
} else if (drops == 2) {
selects.raw(i).selectedIndex = selects.raw(i).options.length - 1;
}
AlterParanoia();
}
}
var checkboxes = $(':checkbox');
for (var i = 0; i < checkboxes.results(); i++) {
if (checkboxes.raw(i).name.match(/^p_/) && (checkboxes.raw(i).name != 'p_lastseen')) {
if (checkbox == 3) {
checkboxes.raw(i).checked = !(checkboxes.raw(i).name.match(/_list$/) || checkboxes.raw(i).name.match(/_l$/));
} else {
checkboxes.raw(i).checked = checkbox;
}
AlterParanoia();
}
}
}
function ParanoiaResetOff() {
ParanoiaReset(true, 0);
}
function ParanoiaResetStats() {
ParanoiaReset(3, 0);
$('input[name=p_collages_l]').raw().checked = false;
}
function ParanoiaResetOn() {
ParanoiaReset(false, 0);
$('input[name=p_collages_c]').raw().checked = false;
$('input[name=p_collages_l]').raw().checked = false;
}
addDOMLoadEvent(AlterParanoia);
function ToggleWarningAdjust(selector) {
if (selector.options[selector.selectedIndex].value == '---') {
$('#ReduceWarningTR').gshow();
$('#ReduceWarning').raw().disabled = false;
} else {
$('#ReduceWarningTR').ghide();
$('#ReduceWarning').raw().disabled = true;
}
}
addDOMLoadEvent(ToggleIdenticons);
function ToggleIdenticons() {
var disableAvatars = $('#disableavatars');
if (disableAvatars.size()) {
var selected = disableAvatars[0].selectedIndex;
if (selected == 2 || selected == 3) {
$('#identicons').gshow();
} else {
$('#identicons').ghide();
}
}
}
function userform_submit() {
if ($('#resetpasskey').is(':checked')) {
if (!confirm('Are you sure you want to reset your passkey?')) {
return false;
}
}
return formVal();
}
function togglePassKey(key) {
if ($('#passkey').raw().innerHTML == 'View') {
$('#passkey').raw().innerHTML = key;
} else {
$('#passkey').raw().innerHTML = 'View';
}
}
function commStats(userid) {
$('.user_commstats').html('Loading...');
ajax.get('ajax.php?action=community_stats&userid=' + userid, function(JSONresponse) {
var response = JSON.parse(JSONresponse) || false;
if (!response || response.status == 'failure') {
$('.user_commstats').html('An error occurred');
return;
}
displayCommStats(response.response);
});
}
function displayCommStats(stats) {
var baseid = '#user_commstats_';
for (x in stats) {
if (stats[x] === false) {
continue;
}
switch (x) {
case 'leeching':
$(baseid + x).html(stats[x]);
break;
case 'seeding':
$(baseid + x).html(stats[x]);
break;
case 'downloaded':
$(baseid + x).html(stats[x]);
break;
case 'snatched':
$(baseid + x).html(stats[x]);
break;
case 'usnatched':
$(baseid + x).html('(' + stats[x] + ')');
break;
case 'udownloaded':
$(baseid + x).html('(' + stats[x] + ')');
break;
case 'seedingperc':
$(baseid + x).html('(' + stats[x] + '%)');
break;
}
}
}
$(document).ready(function() {
$("#random_password").click(function() {
var length = 15,
charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()_+-=<>?",
password = "";
for (var i = 0, n = charset.length; i < length; ++i) {
password += charset.charAt(Math.floor(Math.random() * n));
}
$('#change_password').val(password);
});
});
| tisnats/tisnats.com | static/functions/user.js | JavaScript | apache-2.0 | 8,556 |
#!/bin/bash
rm -r /var/zootmp/*
echo "1" > /var/zootmp/myid
rm -r /var/lib/cassandra/*
rm ./AcaZoo-CompactionStats.dat
ant build
| placrosse/ACaZoo | acazooScript.sh | Shell | apache-2.0 | 130 |
#!/usr/bin/env perl
# $Source: /cvsroot/ensembl/ensembl-personal/genebuilders/ccds/scripts/store_ccds_xrefs.pl,v $
# $Revision: 1.13 $
=pod
=head1 NAME
store_ccds_xrefs.pl
=head1 SYNOPSIS
Make CCDS Xrefs.
=head1 DESCRIPTION
Will store the Ensembl transcript stable_id that matches the ccds structure.
Originally written for Ian Longden. Based on make_enst_to_ccds.pl
=head1 ARGUMENTS
perl store_ccds_xrefs.pl
-ccds_dbname
-ccds_host
-ccds_port
-ccds_user
-ccds_pass
-dbname
-host
-port
-user
-verbose
-species
-path
-write
-delete_old
=head1 EXAMPLE
perl $ENSEMBL_PERSONAL/genebuilders/ccds/scripts/store_ccds_xrefs.pl -ccds_dbname db8_human_vega_61 \
-ccds_host genebuild7 -ccds_port 3306 -ccds_user user -ccds_pass password \
-dbname homo_sapiens_core_61_37f -host ens-staging1 -port 3306 -user ensro -verbose \
-species human -path GRCh37 -write -delete_old
=cut
use warnings;
use strict;
use Getopt::Long;
use Bio::EnsEMBL::DBSQL::DBAdaptor;
use Bio::EnsEMBL::Utils::Exception qw(warning throw);
# db of CCDS strcutures
my $ccds_host = '';
my $ccds_port = '3306';
my $ccds_user = 'ensro';
my $ccds_pass = undef;
my $ccds_dbname = '';
# db of Ensembl (protein_coding) genes
my $host = 'ens-staging';
my $port = '';
my $user = 'ensro';
my $dbname = '';
my $path = 'GRCh37';
my $species = 'human';
my $verbose;
my $write;
my $delete_old;
&GetOptions( 'ccds_host=s' => \$ccds_host,
'ccds_port=s' => \$ccds_port,
'ccds_user=s' => \$ccds_user,
'ccds_pass=s' => \$ccds_pass,
'ccds_dbname=s' => \$ccds_dbname,
'host=s' => \$host,
'port=s' => \$port,
'user=s' => \$user,
'dbname=s' => \$dbname,
'path=s' => \$path,
'species=s' => \$species,
'verbose' => \$verbose,
'delete_old' => \$delete_old,
'write' => \$write, );
if ( !defined $species ) {
throw("Please define species as human or mouse");
} else {
$species =~ s/\s//g;
if ( $species =~ /^human$/i || $species =~ /^mouse$/i ) {
# we're ok
print "Species is *$species*\n";
} else {
throw("Species must be defined as human or mouse");
}
}
# we want to keep a record of any polymorphic pseudogenes for havana
# let's not write a file until the end though since they are not
# common
my @polymorphic_pseudogene;
# connect to dbs
my $db =
new Bio::EnsEMBL::DBSQL::DBAdaptor( -host => $host,
-user => $user,
-port => $port,
-dbname => $dbname );
my $ccds_db =
new Bio::EnsEMBL::DBSQL::DBAdaptor( -host => $ccds_host,
-user => $ccds_user,
-pass => $ccds_pass,
-port => $ccds_port,
-dbname => $ccds_dbname );
$ccds_db->dnadb($db);
my $ccds_sa = $ccds_db->get_SliceAdaptor;
my $outdea = $ccds_db->get_DBEntryAdaptor;
my $sa = $db->get_SliceAdaptor;
###
# delete old ones if delete_old set
###
if($write and $delete_old){
my $sth = $outdea->prepare('delete ox from xref x, object_xref ox, external_db e where x.xref_id = ox.xref_id and x.external_db_id = e.external_db_id and e.db_name like "Ens_%"');
$sth->execute || die "Could not delete old object_xrefs";
$sth = $outdea->prepare('delete x from xref x, external_db e where x.external_db_id = e.external_db_id and e.db_name like "Ens_%"');
$sth->execute || die "Could not delete ols xrefs";
}
# # #
# Loop thru toplevels
# # #
# maybe should use toplevel instead of chromosome?
foreach my $chr ( @{ $ccds_sa->fetch_all('chromosome') } ) {
print "Doing chromosome " . $chr->name . "\n" if ($verbose);
# fetch all CCDS structures on slice
foreach my $ccds_gene ( @{ $chr->get_all_Genes( undef, undef, 1 ) } ) {
# make sure genes are al on chr level
$ccds_gene = $ccds_gene->transform( 'chromosome', $path );
# loop thru all CCDS transcripts
foreach my $ccds_trans ( @{ $ccds_gene->get_all_Transcripts() } ) {
print "=> doing ccds trans "
. $ccds_trans->dbID
. ": start "
. $ccds_trans->start
. " stop "
. $ccds_trans->end
. " strand "
. $ccds_trans->strand . " \n"
if ($verbose);
# find the ccds_id
my $ccds_id;
my @db_entries = @{ $ccds_trans->get_all_DBEntries('CCDS') };
my %xref_hash;
foreach my $dbe (@db_entries) {
print "dbe " . $dbe->display_id . " " . $dbe->dbname . "\n";
}
# store unique CCDS xrefs for the transcript
foreach my $entry (@db_entries) {
$xref_hash{ $entry->display_id() } = 1;
}
# we should not have more than one CCDS id
# associated with a transcript
if ( scalar keys %xref_hash != 1 ) {
foreach my $entry ( keys %xref_hash ) {
print " Dodgy xref : " . $entry . "\n";
}
throw( "Something odd going on: Transcript dbID "
. $ccds_trans->dbID . " has "
. scalar( keys %xref_hash )
. " xrefs" );
} else {
# all is good; CCDS transcript only has 1 CCDS xref
foreach my $entry ( keys %xref_hash ) {
$ccds_id = $entry;
print "=> on ccds $ccds_id\n" if ($verbose);
}
}
# define the genomic location that we're working in
# ie. where the CCDS transcript is
my $chr_name = $ccds_trans->slice->seq_region_name;
my $start = $ccds_trans->start();
my $end = $ccds_trans->end();
# now fetch the slice out of ensembl db
my $slice =
$sa->fetch_by_region( 'chromosome', $chr_name, $start, $end, '1',
$path );
print " Ensembl slice name " . $slice->name . "\n" if ($verbose);
# get ccds coding exons
my @ccds_exons = @{ $ccds_trans->get_all_translateable_Exons() };
print " have " . @ccds_exons . " ccds coding exons\n" if ($verbose);
# get all Ensembl genes overlapping the CCDS regions
foreach my $gene ( @{ $slice->get_all_Genes( undef, undef, 1 ) } ) {
# only look at protein_coding genes
next unless ( $gene->biotype =~ /protein_coding/ || $gene->biotype =~ /polymorphic_pseudogene/);
# debug
# next if $gene->biotype =~ /protein_coding/ ;
# keep a record if it is a polymorphic pseudogene - these will need to be sent to havana
if ($gene->biotype =~ /polymorphic_pseudogene/) {
print STDERR " found a poly pseudo gene\n" if ($verbose);
push @polymorphic_pseudogene, $ccds_id;
}
# make sure ensembl gene also on chr level
print " on ensembl gene " . $gene->display_id . "\n" if ($verbose);
$gene = $gene->transform( 'chromosome', $path );
# loop thru ensembl transcripts
foreach my $trans ( @{ $gene->get_all_Transcripts } ) {
print " on ensembl trans " . $trans->display_id . "\n"
if ($verbose);
# get ensembl coding exons
my @exons = @{ $trans->get_all_translateable_Exons() };
print " have " . @exons . " ensembl coding exons\n" if ($verbose);
# loop thru ensembl coding exons and make sure they all match the ccds
# exons exactly
my $match = 0;
if ( scalar @exons == scalar @ccds_exons ) {
for ( my $i = 0 ; $i < scalar(@exons) ; $i++ ) {
# print " Ensembl start ".$exons[$i]->start." end ".$exons[$i]->end.
# " CCDS start ".$ccds_exons[$i]->start." end ".$ccds_exons[$i]->end."\n";
if ( $ccds_exons[$i]->start == $exons[$i]->start
&& $ccds_exons[$i]->end == $exons[$i]->end
&& $ccds_exons[$i]->strand == $exons[$i]->strand )
{
$match++;
} #else {
# print "no match ".$ccds_exons[$i]->start." != ".$exons[$i]->start." or ".
# $ccds_exons[$i]->end." != ".$exons[$i]->end."\n";
#}
}
if ( $match == scalar @exons ) {
print "MATCH\t" . $trans->stable_id . "\t" . $ccds_id . "\n"
if ($verbose);
store_ensembl_xref( $outdea, $species, $ccds_trans,
$trans->stable_id, $write );
store_ensembl_xref( $outdea,
$species,
$ccds_trans->translation,
$trans->translation->stable_id,
$write );
} else {
print " no match ($match)\t"
. $trans->stable_id . "\t"
. $ccds_id . "\n"
if ($verbose);
}
} ## end if ( scalar @exons == ...)
} ## end foreach my $trans ( @{ $gene...})
} ## end foreach my $gene ( @{ $slice...})
} ## end foreach my $ccds_trans ( @{...})
} ## end foreach my $ccds_gene ( @{ ...})
} ## end foreach my $chr ( @{ $ccds_sa...})
# report polymorphic pseudogenes
if (@polymorphic_pseudogene) {
for my $display_id (@polymorphic_pseudogene) {
print STDERR $display_id." matches a polymorphic pseudogene\n";
}
} else {
print STDERR "Found 0 polymorphic pseudogenes\n";
}
sub store_ensembl_xref {
my ( $dbea, $species, $ccds_trans, $ensembl_trans_stable_id, $write ) = @_;
if ( ref($ccds_trans) eq "Bio::EnsEMBL::Transcript" ) {
my $external_db;
if ( $species =~ /^human$/i ) {
$external_db = 'Ens_Hs_transcript';
} elsif ( $species =~ /^mouse$/i ) {
$external_db = 'Ens_Mm_transcript';
}
# make an xref
my $entry =
new Bio::EnsEMBL::DBEntry( -adaptor => $dbea,
-primary_id => $ensembl_trans_stable_id,
-display_id => $ensembl_trans_stable_id,
-version => 0,
-dbname => $external_db);
# store xref
$dbea->store( $entry, $ccds_trans->dbID, 'Transcript' ) if ($write);
} elsif ( ref($ccds_trans) eq "Bio::EnsEMBL::Translation" ) {
my $external_db;
if ( $species =~ /^human$/i ) {
$external_db = 'Ens_Hs_translation';
} elsif ( $species =~ /^mouse$/i ) {
$external_db = 'Ens_Mm_translation';
}
# make an xref
my $entry =
new Bio::EnsEMBL::DBEntry( -adaptor => $dbea,
-primary_id => $ensembl_trans_stable_id,
-display_id => $ensembl_trans_stable_id,
-version => 0,
-dbname => $external_db);
# store xref
$dbea->store( $entry, $ccds_trans->dbID, 'Translation' ) if ($write);
} else {
throw("Not a Transcript or Translation ");
}
return;
} ## end sub store_ensembl_xref
| danstaines/ensembl | misc-scripts/xref_mapping/store_ccds_xrefs.pl | Perl | apache-2.0 | 11,327 |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Reflection.Metadata;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CodeGen;
using Microsoft.CodeAnalysis.Collections;
using Microsoft.CodeAnalysis.CSharp.Emit;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Emit;
using Microsoft.CodeAnalysis.Symbols;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CSharp
{
/// <summary>
/// The compilation object is an immutable representation of a single invocation of the
/// compiler. Although immutable, a compilation is also on-demand, and will realize and cache
/// data as necessary. A compilation can produce a new compilation from existing compilation
/// with the application of small deltas. In many cases, it is more efficient than creating a
/// new compilation from scratch, as the new compilation can reuse information from the old
/// compilation.
/// </summary>
public sealed partial class CSharpCompilation : Compilation
{
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
//
// Changes to the public interface of this class should remain synchronized with the VB
// version. Do not make any changes to the public interface without making the corresponding
// change to the VB version.
//
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
internal static readonly ParallelOptions DefaultParallelOptions = new ParallelOptions();
private readonly CSharpCompilationOptions _options;
private readonly Lazy<Imports> _globalImports;
private readonly Lazy<AliasSymbol> _globalNamespaceAlias; // alias symbol used to resolve "global::".
private readonly Lazy<ImplicitNamedTypeSymbol> _scriptClass;
private readonly CSharpCompilation _previousSubmission;
// All imports (using directives and extern aliases) in syntax trees in this compilation.
// NOTE: We need to de-dup since the Imports objects that populate the list may be GC'd
// and re-created.
private ConcurrentSet<ImportInfo> _lazyImportInfos;
// Cache the CLS diagnostics for the whole compilation so they aren't computed repeatedly.
// NOTE: Presently, we do not cache the per-tree diagnostics.
private ImmutableArray<Diagnostic> _lazyClsComplianceDiagnostics;
private Conversions _conversions;
internal Conversions Conversions
{
get
{
if (_conversions == null)
{
Interlocked.CompareExchange(ref _conversions, new BuckStopsHereBinder(this).Conversions, null);
}
return _conversions;
}
}
/// <summary>
/// Manages anonymous types declared in this compilation. Unifies types that are structurally equivalent.
/// </summary>
private readonly AnonymousTypeManager _anonymousTypeManager;
private NamespaceSymbol _lazyGlobalNamespace;
internal readonly BuiltInOperators builtInOperators;
/// <summary>
/// The <see cref="SourceAssemblySymbol"/> for this compilation. Do not access directly, use Assembly property
/// instead. This field is lazily initialized by ReferenceManager, ReferenceManager.CacheLockObject must be locked
/// while ReferenceManager "calculates" the value and assigns it, several threads must not perform duplicate
/// "calculation" simultaneously.
/// </summary>
private SourceAssemblySymbol _lazyAssemblySymbol;
/// <summary>
/// Holds onto data related to reference binding.
/// The manager is shared among multiple compilations that we expect to have the same result of reference binding.
/// In most cases this can be determined without performing the binding. If the compilation however contains a circular
/// metadata reference (a metadata reference that refers back to the compilation) we need to avoid sharing of the binding results.
/// We do so by creating a new reference manager for such compilation.
/// </summary>
private ReferenceManager _referenceManager;
private readonly SyntaxAndDeclarationManager _syntaxAndDeclarations;
/// <summary>
/// Contains the main method of this assembly, if there is one.
/// </summary>
private EntryPoint _lazyEntryPoint;
/// <summary>
/// The set of trees for which a <see cref="CompilationUnitCompletedEvent"/> has been added to the queue.
/// </summary>
private HashSet<SyntaxTree> _lazyCompilationUnitCompletedTrees;
public override string Language
{
get
{
return LanguageNames.CSharp;
}
}
public override bool IsCaseSensitive
{
get
{
return true;
}
}
/// <summary>
/// The options the compilation was created with.
/// </summary>
public new CSharpCompilationOptions Options
{
get
{
return _options;
}
}
internal AnonymousTypeManager AnonymousTypeManager
{
get
{
return _anonymousTypeManager;
}
}
internal override CommonAnonymousTypeManager CommonAnonymousTypeManager
{
get
{
return AnonymousTypeManager;
}
}
/// <summary>
/// True when the compiler is run in "strict" mode, in which it enforces the language specification
/// in some cases even at the expense of full compatibility. Such differences typically arise when
/// earlier versions of the compiler failed to enforce the full language specification.
/// </summary>
internal bool FeatureStrictEnabled => Feature("strict") != null;
/// <summary>
/// The language version that was used to parse the syntax trees of this compilation.
/// </summary>
public LanguageVersion LanguageVersion
{
get;
}
public override INamedTypeSymbol CreateErrorTypeSymbol(INamespaceOrTypeSymbol container, string name, int arity)
{
return new ExtendedErrorTypeSymbol((NamespaceOrTypeSymbol)container, name, arity, null);
}
#region Constructors and Factories
private static readonly CSharpCompilationOptions s_defaultOptions = new CSharpCompilationOptions(OutputKind.ConsoleApplication);
private static readonly CSharpCompilationOptions s_defaultSubmissionOptions = new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary);
/// <summary>
/// Creates a new compilation from scratch. Methods such as AddSyntaxTrees or AddReferences
/// on the returned object will allow to continue building up the Compilation incrementally.
/// </summary>
/// <param name="assemblyName">Simple assembly name.</param>
/// <param name="syntaxTrees">The syntax trees with the source code for the new compilation.</param>
/// <param name="references">The references for the new compilation.</param>
/// <param name="options">The compiler options to use.</param>
/// <returns>A new compilation.</returns>
public static CSharpCompilation Create(
string assemblyName,
IEnumerable<SyntaxTree> syntaxTrees = null,
IEnumerable<MetadataReference> references = null,
CSharpCompilationOptions options = null)
{
return Create(
assemblyName,
options ?? s_defaultOptions,
syntaxTrees,
references,
previousSubmission: null,
returnType: null,
hostObjectType: null,
isSubmission: false);
}
/// <summary>
/// Creates a new compilation that can be used in scripting.
/// </summary>
public static CSharpCompilation CreateSubmission(
string assemblyName,
SyntaxTree syntaxTree = null,
IEnumerable<MetadataReference> references = null,
CSharpCompilationOptions options = null,
Compilation previousSubmission = null,
Type returnType = null,
Type hostObjectType = null)
{
CheckSubmissionOptions(options);
return Create(
assemblyName,
options ?? s_defaultSubmissionOptions,
(syntaxTree != null) ? new[] { syntaxTree } : SpecializedCollections.EmptyEnumerable<SyntaxTree>(),
references,
(CSharpCompilation)previousSubmission,
returnType,
hostObjectType,
isSubmission: true);
}
private static CSharpCompilation Create(
string assemblyName,
CSharpCompilationOptions options,
IEnumerable<SyntaxTree> syntaxTrees,
IEnumerable<MetadataReference> references,
CSharpCompilation previousSubmission,
Type returnType,
Type hostObjectType,
bool isSubmission)
{
Debug.Assert(options != null);
CheckAssemblyName(assemblyName);
var validatedReferences = ValidateReferences<CSharpCompilationReference>(references);
ValidateSubmissionParameters(previousSubmission, returnType, ref hostObjectType);
var compilation = new CSharpCompilation(
assemblyName,
options,
validatedReferences,
previousSubmission,
returnType,
hostObjectType,
isSubmission,
referenceManager: null,
reuseReferenceManager: false,
syntaxAndDeclarations: new SyntaxAndDeclarationManager(
ImmutableArray<SyntaxTree>.Empty,
options.ScriptClassName,
options.SourceReferenceResolver,
CSharp.MessageProvider.Instance,
isSubmission,
state: null));
if (syntaxTrees != null)
{
compilation = compilation.AddSyntaxTrees(syntaxTrees);
}
Debug.Assert((object)compilation._lazyAssemblySymbol == null);
return compilation;
}
private CSharpCompilation(
string assemblyName,
CSharpCompilationOptions options,
ImmutableArray<MetadataReference> references,
CSharpCompilation previousSubmission,
Type submissionReturnType,
Type hostObjectType,
bool isSubmission,
ReferenceManager referenceManager,
bool reuseReferenceManager,
SyntaxAndDeclarationManager syntaxAndDeclarations,
AsyncQueue<CompilationEvent> eventQueue = null)
: base(assemblyName, references, SyntaxTreeCommonFeatures(syntaxAndDeclarations.ExternalSyntaxTrees), submissionReturnType, hostObjectType, isSubmission, eventQueue)
{
_wellKnownMemberSignatureComparer = new WellKnownMembersSignatureComparer(this);
_options = options;
this.builtInOperators = new BuiltInOperators(this);
_scriptClass = new Lazy<ImplicitNamedTypeSymbol>(BindScriptClass);
_globalImports = new Lazy<Imports>(BindGlobalUsings);
_globalNamespaceAlias = new Lazy<AliasSymbol>(CreateGlobalNamespaceAlias);
_anonymousTypeManager = new AnonymousTypeManager(this);
this.LanguageVersion = CommonLanguageVersion(syntaxAndDeclarations.ExternalSyntaxTrees);
if (isSubmission)
{
Debug.Assert(previousSubmission == null || previousSubmission.HostObjectType == hostObjectType);
_previousSubmission = previousSubmission;
}
else
{
Debug.Assert(previousSubmission == null && submissionReturnType == null && hostObjectType == null);
}
if (reuseReferenceManager)
{
referenceManager.AssertCanReuseForCompilation(this);
_referenceManager = referenceManager;
}
else
{
_referenceManager = new ReferenceManager(
MakeSourceAssemblySimpleName(),
this.Options.AssemblyIdentityComparer,
observedMetadata: referenceManager?.ObservedMetadata);
}
_syntaxAndDeclarations = syntaxAndDeclarations;
Debug.Assert((object)_lazyAssemblySymbol == null);
if (EventQueue != null) EventQueue.Enqueue(new CompilationStartedEvent(this));
}
internal override void ValidateDebugEntryPoint(IMethodSymbol debugEntryPoint, DiagnosticBag diagnostics)
{
Debug.Assert(debugEntryPoint != null);
// Debug entry point has to be a method definition from this compilation.
var methodSymbol = debugEntryPoint as MethodSymbol;
if (methodSymbol?.DeclaringCompilation != this || !methodSymbol.IsDefinition)
{
diagnostics.Add(ErrorCode.ERR_DebugEntryPointNotSourceMethodDefinition, Location.None);
}
}
private static LanguageVersion CommonLanguageVersion(ImmutableArray<SyntaxTree> syntaxTrees)
{
LanguageVersion? result = null;
foreach (var tree in syntaxTrees)
{
var version = ((CSharpParseOptions)tree.Options).LanguageVersion;
if (result == null)
{
result = version;
}
else if (result != version)
{
throw new ArgumentException(CodeAnalysisResources.InconsistentLanguageVersions, nameof(syntaxTrees));
}
}
return result ?? CSharpParseOptions.Default.LanguageVersion;
}
/// <summary>
/// Create a duplicate of this compilation with different symbol instances.
/// </summary>
public new CSharpCompilation Clone()
{
return new CSharpCompilation(
this.AssemblyName,
_options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager: true,
syntaxAndDeclarations: _syntaxAndDeclarations);
}
private CSharpCompilation Update(
ReferenceManager referenceManager,
bool reuseReferenceManager,
SyntaxAndDeclarationManager syntaxAndDeclarations)
{
return new CSharpCompilation(
this.AssemblyName,
_options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
referenceManager,
reuseReferenceManager,
syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation with the specified name.
/// </summary>
public new CSharpCompilation WithAssemblyName(string assemblyName)
{
CheckAssemblyName(assemblyName);
// Can't reuse references since the source assembly name changed and the referenced symbols might
// have internals-visible-to relationship with this compilation or they might had a circular reference
// to this compilation.
return new CSharpCompilation(
assemblyName,
_options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager: assemblyName == this.AssemblyName,
syntaxAndDeclarations: _syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation with the specified references.
/// </summary>
/// <remarks>
/// The new <see cref="CSharpCompilation"/> will query the given <see cref="MetadataReference"/> for the underlying
/// metadata as soon as the are needed.
///
/// The new compilation uses whatever metadata is currently being provided by the <see cref="MetadataReference"/>.
/// E.g. if the current compilation references a metadata file that has changed since the creation of the compilation
/// the new compilation is going to use the updated version, while the current compilation will be using the previous (it doesn't change).
/// </remarks>
public new CSharpCompilation WithReferences(IEnumerable<MetadataReference> references)
{
// References might have changed, don't reuse reference manager.
// Don't even reuse observed metadata - let the manager query for the metadata again.
return new CSharpCompilation(
this.AssemblyName,
_options,
ValidateReferences<CSharpCompilationReference>(references),
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
referenceManager: null,
reuseReferenceManager: false,
syntaxAndDeclarations: _syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation with the specified references.
/// </summary>
public new CSharpCompilation WithReferences(params MetadataReference[] references)
{
return this.WithReferences((IEnumerable<MetadataReference>)references);
}
/// <summary>
/// Creates a new compilation with the specified compilation options.
/// </summary>
public CSharpCompilation WithOptions(CSharpCompilationOptions options)
{
var oldOptions = this.Options;
bool reuseReferenceManager = oldOptions.CanReuseCompilationReferenceManager(options);
bool reuseSyntaxAndDeclarationManager = oldOptions.ScriptClassName == options.ScriptClassName &&
oldOptions.SourceReferenceResolver == options.SourceReferenceResolver;
return new CSharpCompilation(
this.AssemblyName,
options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager,
reuseSyntaxAndDeclarationManager ?
_syntaxAndDeclarations :
new SyntaxAndDeclarationManager(
_syntaxAndDeclarations.ExternalSyntaxTrees,
options.ScriptClassName,
options.SourceReferenceResolver,
_syntaxAndDeclarations.MessageProvider,
_syntaxAndDeclarations.IsSubmission,
state: null));
}
/// <summary>
/// Returns a new compilation with the given compilation set as the previous submission.
/// </summary>
internal CSharpCompilation WithPreviousSubmission(CSharpCompilation newPreviousSubmission)
{
if (!this.IsSubmission)
{
throw new InvalidOperationException(CSharpResources.CannotHavePreviousSubmission);
}
// Reference binding doesn't depend on previous submission so we can reuse it.
return new CSharpCompilation(
this.AssemblyName,
_options,
this.ExternalReferences,
newPreviousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager: true,
syntaxAndDeclarations: _syntaxAndDeclarations);
}
/// <summary>
/// Returns a new compilation with a given event queue.
/// </summary>
internal override Compilation WithEventQueue(AsyncQueue<CompilationEvent> eventQueue)
{
return new CSharpCompilation(
this.AssemblyName,
_options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager: true,
syntaxAndDeclarations: _syntaxAndDeclarations,
eventQueue: eventQueue);
}
#endregion
#region Submission
internal new CSharpCompilation PreviousSubmission
{
get { return _previousSubmission; }
}
// TODO (tomat): consider moving this method to SemanticModel
/// <summary>
/// Returns the type of the submission return value.
/// </summary>
/// <returns>
/// The type of the last expression of the submission.
/// Null if the type of the last expression is unknown (null).
/// Void type if the type of the last expression statement is void or
/// the submission ends with a declaration or statement that is not an expression statement.
/// </returns>
/// <remarks>
/// Note that the return type is System.Void for both compilations "System.Console.WriteLine();" and "System.Console.WriteLine()",
/// and <paramref name="hasValue"/> is <c>False</c> for the former and <c>True</c> for the latter.
/// </remarks>
/// <param name="hasValue">True if the submission has value, i.e. if it ends with a statement that is an expression statement.</param>
/// <exception cref="InvalidOperationException">The compilation doesn't represent a submission (<see cref="Compilation.IsSubmission"/> return false).</exception>
internal new TypeSymbol GetSubmissionResultType(out bool hasValue)
{
if (!IsSubmission)
{
throw new InvalidOperationException(CSharpResources.ThisCompilationNotInteractive);
}
hasValue = false;
// A submission may be empty or comprised of a single script file.
var tree = _syntaxAndDeclarations.ExternalSyntaxTrees.SingleOrDefault();
if (tree == null || tree.Options.Kind != SourceCodeKind.Interactive)
{
return GetSpecialType(SpecialType.System_Void);
}
var lastStatement = (GlobalStatementSyntax)tree.GetCompilationUnitRoot().Members.LastOrDefault(decl => decl.Kind() == SyntaxKind.GlobalStatement);
if (lastStatement == null || lastStatement.Statement.Kind() != SyntaxKind.ExpressionStatement)
{
return GetSpecialType(SpecialType.System_Void);
}
var expressionStatement = (ExpressionStatementSyntax)lastStatement.Statement;
if (!expressionStatement.SemicolonToken.IsMissing)
{
return GetSpecialType(SpecialType.System_Void);
}
var model = GetSemanticModel(tree);
hasValue = true;
var expression = expressionStatement.Expression;
var info = model.GetTypeInfo(expression);
return (TypeSymbol)info.ConvertedType;
}
#endregion
#region Syntax Trees (maintain an ordered list)
/// <summary>
/// The syntax trees (parsed from source code) that this compilation was created with.
/// </summary>
public new ImmutableArray<SyntaxTree> SyntaxTrees
{
get { return _syntaxAndDeclarations.GetLazyState().SyntaxTrees; }
}
/// <summary>
/// Returns true if this compilation contains the specified tree. False otherwise.
/// </summary>
public new bool ContainsSyntaxTree(SyntaxTree syntaxTree)
{
var cstree = syntaxTree as SyntaxTree;
return cstree != null && _syntaxAndDeclarations.GetLazyState().RootNamespaces.ContainsKey(cstree);
}
/// <summary>
/// Creates a new compilation with additional syntax trees.
/// </summary>
public new CSharpCompilation AddSyntaxTrees(params SyntaxTree[] trees)
{
return AddSyntaxTrees((IEnumerable<SyntaxTree>)trees);
}
/// <summary>
/// Creates a new compilation with additional syntax trees.
/// </summary>
public new CSharpCompilation AddSyntaxTrees(IEnumerable<SyntaxTree> trees)
{
if (trees == null)
{
throw new ArgumentNullException(nameof(trees));
}
if (trees.IsEmpty())
{
return this;
}
// This HashSet is needed so that we don't allow adding the same tree twice
// with a single call to AddSyntaxTrees. Rather than using a separate HashSet,
// ReplaceSyntaxTrees can just check against ExternalSyntaxTrees, because we
// only allow replacing a single tree at a time.
var externalSyntaxTrees = PooledHashSet<SyntaxTree>.GetInstance();
var syntaxAndDeclarations = _syntaxAndDeclarations;
externalSyntaxTrees.AddAll(syntaxAndDeclarations.ExternalSyntaxTrees);
bool reuseReferenceManager = true;
int i = 0;
foreach (var tree in trees.Cast<CSharpSyntaxTree>())
{
if (tree == null)
{
throw new ArgumentNullException($"{nameof(trees)}[{i}]");
}
if (!tree.HasCompilationUnitRoot)
{
throw new ArgumentException(CSharpResources.TreeMustHaveARootNodeWith, $"{nameof(trees)}[{i}]");
}
if (externalSyntaxTrees.Contains(tree))
{
throw new ArgumentException(CSharpResources.SyntaxTreeAlreadyPresent, $"{nameof(trees)}[{i}]");
}
if (this.IsSubmission && tree.Options.Kind == SourceCodeKind.Regular)
{
throw new ArgumentException(CSharpResources.SubmissionCanOnlyInclude, $"{nameof(trees)}[{i}]");
}
externalSyntaxTrees.Add(tree);
reuseReferenceManager &= !tree.HasReferenceOrLoadDirectives;
i++;
}
externalSyntaxTrees.Free();
if (this.IsSubmission && i > 1)
{
throw new ArgumentException(CSharpResources.SubmissionCanHaveAtMostOne, nameof(trees));
}
syntaxAndDeclarations = syntaxAndDeclarations.AddSyntaxTrees(trees);
return Update(_referenceManager, reuseReferenceManager, syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation without the specified syntax trees. Preserves metadata info for use with trees
/// added later.
/// </summary>
public new CSharpCompilation RemoveSyntaxTrees(params SyntaxTree[] trees)
{
return RemoveSyntaxTrees((IEnumerable<SyntaxTree>)trees);
}
/// <summary>
/// Creates a new compilation without the specified syntax trees. Preserves metadata info for use with trees
/// added later.
/// </summary>
public new CSharpCompilation RemoveSyntaxTrees(IEnumerable<SyntaxTree> trees)
{
if (trees == null)
{
throw new ArgumentNullException(nameof(trees));
}
if (trees.IsEmpty())
{
return this;
}
var removeSet = PooledHashSet<SyntaxTree>.GetInstance();
// This HashSet is needed so that we don't allow adding the same tree twice
// with a single call to AddSyntaxTrees. Rather than using a separate HashSet,
// ReplaceSyntaxTrees can just check against ExternalSyntaxTrees, because we
// only allow replacing a single tree at a time.
var externalSyntaxTrees = PooledHashSet<SyntaxTree>.GetInstance();
var syntaxAndDeclarations = _syntaxAndDeclarations;
externalSyntaxTrees.AddAll(syntaxAndDeclarations.ExternalSyntaxTrees);
bool reuseReferenceManager = true;
int i = 0;
foreach (var tree in trees.Cast<CSharpSyntaxTree>())
{
if (!externalSyntaxTrees.Contains(tree))
{
// Check to make sure this is not a #load'ed tree.
var loadedSyntaxTreeMap = syntaxAndDeclarations.GetLazyState().LoadedSyntaxTreeMap;
if (SyntaxAndDeclarationManager.IsLoadedSyntaxTree(tree, loadedSyntaxTreeMap))
{
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeFromLoadNoRemoveReplace, tree), $"{nameof(trees)}[{i}]");
}
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeNotFoundTo, tree), $"{nameof(trees)}[{i}]");
}
removeSet.Add(tree);
reuseReferenceManager &= !tree.HasReferenceOrLoadDirectives;
i++;
}
externalSyntaxTrees.Free();
syntaxAndDeclarations = syntaxAndDeclarations.RemoveSyntaxTrees(removeSet);
removeSet.Free();
return Update(_referenceManager, reuseReferenceManager, syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation without any syntax trees. Preserves metadata info
/// from this compilation for use with trees added later.
/// </summary>
public new CSharpCompilation RemoveAllSyntaxTrees()
{
var syntaxAndDeclarations = _syntaxAndDeclarations;
return Update(
_referenceManager,
reuseReferenceManager: !syntaxAndDeclarations.MayHaveReferenceDirectives(),
syntaxAndDeclarations: syntaxAndDeclarations.WithExternalSyntaxTrees(ImmutableArray<SyntaxTree>.Empty));
}
/// <summary>
/// Creates a new compilation without the old tree but with the new tree.
/// </summary>
public new CSharpCompilation ReplaceSyntaxTree(SyntaxTree oldTree, SyntaxTree newTree)
{
// this is just to force a cast exception
oldTree = (CSharpSyntaxTree)oldTree;
newTree = (CSharpSyntaxTree)newTree;
if (oldTree == null)
{
throw new ArgumentNullException(nameof(oldTree));
}
if (newTree == null)
{
return this.RemoveSyntaxTrees(oldTree);
}
else if (newTree == oldTree)
{
return this;
}
if (!newTree.HasCompilationUnitRoot)
{
throw new ArgumentException(CSharpResources.TreeMustHaveARootNodeWith, nameof(newTree));
}
var syntaxAndDeclarations = _syntaxAndDeclarations;
var externalSyntaxTrees = syntaxAndDeclarations.ExternalSyntaxTrees;
if (!externalSyntaxTrees.Contains(oldTree))
{
// Check to see if this is a #load'ed tree.
var loadedSyntaxTreeMap = syntaxAndDeclarations.GetLazyState().LoadedSyntaxTreeMap;
if (SyntaxAndDeclarationManager.IsLoadedSyntaxTree(oldTree, loadedSyntaxTreeMap))
{
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeFromLoadNoRemoveReplace, oldTree), nameof(oldTree));
}
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeNotFoundTo, oldTree), nameof(oldTree));
}
if (externalSyntaxTrees.Contains(newTree))
{
throw new ArgumentException(CSharpResources.SyntaxTreeAlreadyPresent, nameof(newTree));
}
// TODO(tomat): Consider comparing #r's of the old and the new tree. If they are exactly the same we could still reuse.
// This could be a perf win when editing a script file in the IDE. The services create a new compilation every keystroke
// that replaces the tree with a new one.
var reuseReferenceManager = !oldTree.HasReferenceOrLoadDirectives() && !newTree.HasReferenceOrLoadDirectives();
syntaxAndDeclarations = syntaxAndDeclarations.ReplaceSyntaxTree(oldTree, newTree);
return Update(_referenceManager, reuseReferenceManager, syntaxAndDeclarations);
}
internal override int GetSyntaxTreeOrdinal(SyntaxTree tree)
{
Debug.Assert(this.ContainsSyntaxTree(tree));
return _syntaxAndDeclarations.GetLazyState().OrdinalMap[tree];
}
#endregion
#region References
internal override CommonReferenceManager CommonGetBoundReferenceManager()
{
return GetBoundReferenceManager();
}
internal new ReferenceManager GetBoundReferenceManager()
{
if ((object)_lazyAssemblySymbol == null)
{
_referenceManager.CreateSourceAssemblyForCompilation(this);
Debug.Assert((object)_lazyAssemblySymbol != null);
}
// referenceManager can only be accessed after we initialized the lazyAssemblySymbol.
// In fact, initialization of the assembly symbol might change the reference manager.
return _referenceManager;
}
// for testing only:
internal bool ReferenceManagerEquals(CSharpCompilation other)
{
return ReferenceEquals(_referenceManager, other._referenceManager);
}
public override ImmutableArray<MetadataReference> DirectiveReferences
{
get
{
return GetBoundReferenceManager().DirectiveReferences;
}
}
internal override IDictionary<string, MetadataReference> ReferenceDirectiveMap
{
get
{
return GetBoundReferenceManager().ReferenceDirectiveMap;
}
}
// for testing purposes
internal IEnumerable<string> ExternAliases
{
get
{
return GetBoundReferenceManager().ExternAliases;
}
}
/// <summary>
/// Gets the <see cref="AssemblySymbol"/> or <see cref="ModuleSymbol"/> for a metadata reference used to create this compilation.
/// </summary>
/// <returns><see cref="AssemblySymbol"/> or <see cref="ModuleSymbol"/> corresponding to the given reference or null if there is none.</returns>
/// <remarks>
/// Uses object identity when comparing two references.
/// </remarks>
internal new Symbol GetAssemblyOrModuleSymbol(MetadataReference reference)
{
if (reference == null)
{
throw new ArgumentNullException(nameof(reference));
}
if (reference.Properties.Kind == MetadataImageKind.Assembly)
{
return GetBoundReferenceManager().GetReferencedAssemblySymbol(reference);
}
else
{
Debug.Assert(reference.Properties.Kind == MetadataImageKind.Module);
int index = GetBoundReferenceManager().GetReferencedModuleIndex(reference);
return index < 0 ? null : this.Assembly.Modules[index];
}
}
public override IEnumerable<AssemblyIdentity> ReferencedAssemblyNames
{
get
{
return Assembly.Modules.SelectMany(module => module.GetReferencedAssemblies());
}
}
/// <summary>
/// All reference directives used in this compilation.
/// </summary>
internal override IEnumerable<ReferenceDirective> ReferenceDirectives
{
get { return this.Declarations.ReferenceDirectives; }
}
/// <summary>
/// Returns a metadata reference that a given #r resolves to.
/// </summary>
/// <param name="directive">#r directive.</param>
/// <returns>Metadata reference the specified directive resolves to.</returns>
public MetadataReference GetDirectiveReference(ReferenceDirectiveTriviaSyntax directive)
{
return ReferenceDirectiveMap[directive.File.ValueText];
}
/// <summary>
/// Creates a new compilation with additional metadata references.
/// </summary>
public new CSharpCompilation AddReferences(params MetadataReference[] references)
{
return (CSharpCompilation)base.AddReferences(references);
}
/// <summary>
/// Creates a new compilation with additional metadata references.
/// </summary>
public new CSharpCompilation AddReferences(IEnumerable<MetadataReference> references)
{
return (CSharpCompilation)base.AddReferences(references);
}
/// <summary>
/// Creates a new compilation without the specified metadata references.
/// </summary>
public new CSharpCompilation RemoveReferences(params MetadataReference[] references)
{
return (CSharpCompilation)base.RemoveReferences(references);
}
/// <summary>
/// Creates a new compilation without the specified metadata references.
/// </summary>
public new CSharpCompilation RemoveReferences(IEnumerable<MetadataReference> references)
{
return (CSharpCompilation)base.RemoveReferences(references);
}
/// <summary>
/// Creates a new compilation without any metadata references
/// </summary>
public new CSharpCompilation RemoveAllReferences()
{
return (CSharpCompilation)base.RemoveAllReferences();
}
/// <summary>
/// Creates a new compilation with an old metadata reference replaced with a new metadata reference.
/// </summary>
public new CSharpCompilation ReplaceReference(MetadataReference oldReference, MetadataReference newReference)
{
return (CSharpCompilation)base.ReplaceReference(oldReference, newReference);
}
public override CompilationReference ToMetadataReference(ImmutableArray<string> aliases = default(ImmutableArray<string>), bool embedInteropTypes = false)
{
return new CSharpCompilationReference(this, aliases, embedInteropTypes);
}
/// <summary>
/// Get all modules in this compilation, including the source module, added modules, and all
/// modules of referenced assemblies that do not come from an assembly with an extern alias.
/// Metadata imported from aliased assemblies is not visible at the source level except through
/// the use of an extern alias directive. So exclude them from this list which is used to construct
/// the global namespace.
/// </summary>
private void GetAllUnaliasedModules(ArrayBuilder<ModuleSymbol> modules)
{
// NOTE: This includes referenced modules - they count as modules of the compilation assembly.
modules.AddRange(Assembly.Modules);
var referenceManager = GetBoundReferenceManager();
for (int i = 0; i < referenceManager.ReferencedAssemblies.Length; i++)
{
if (referenceManager.DeclarationsAccessibleWithoutAlias(i))
{
modules.AddRange(referenceManager.ReferencedAssemblies[i].Modules);
}
}
}
/// <summary>
/// Return a list of assembly symbols than can be accessed without using an alias.
/// For example:
/// 1) /r:A.dll /r:B.dll -> A, B
/// 2) /r:Foo=A.dll /r:B.dll -> B
/// 3) /r:Foo=A.dll /r:A.dll -> A
/// </summary>
internal void GetUnaliasedReferencedAssemblies(ArrayBuilder<AssemblySymbol> assemblies)
{
var referenceManager = GetBoundReferenceManager();
for (int i = 0; i < referenceManager.ReferencedAssemblies.Length; i++)
{
if (referenceManager.DeclarationsAccessibleWithoutAlias(i))
{
assemblies.Add(referenceManager.ReferencedAssemblies[i]);
}
}
}
/// <summary>
/// Gets the <see cref="MetadataReference"/> that corresponds to the assembly symbol.
/// </summary>
public new MetadataReference GetMetadataReference(IAssemblySymbol assemblySymbol)
{
return base.GetMetadataReference(assemblySymbol);
}
#endregion
#region Symbols
/// <summary>
/// The AssemblySymbol that represents the assembly being created.
/// </summary>
internal SourceAssemblySymbol SourceAssembly
{
get
{
GetBoundReferenceManager();
return _lazyAssemblySymbol;
}
}
/// <summary>
/// The AssemblySymbol that represents the assembly being created.
/// </summary>
internal new AssemblySymbol Assembly
{
get
{
return SourceAssembly;
}
}
/// <summary>
/// Get a ModuleSymbol that refers to the module being created by compiling all of the code.
/// By getting the GlobalNamespace property of that module, all of the namespaces and types
/// defined in source code can be obtained.
/// </summary>
internal new ModuleSymbol SourceModule
{
get
{
return Assembly.Modules[0];
}
}
/// <summary>
/// Gets the root namespace that contains all namespaces and types defined in source code or in
/// referenced metadata, merged into a single namespace hierarchy.
/// </summary>
internal new NamespaceSymbol GlobalNamespace
{
get
{
if ((object)_lazyGlobalNamespace == null)
{
// Get the root namespace from each module, and merge them all together
// Get all modules in this compilation, ones referenced directly by the compilation
// as well as those referenced by all referenced assemblies.
var modules = ArrayBuilder<ModuleSymbol>.GetInstance();
GetAllUnaliasedModules(modules);
var result = MergedNamespaceSymbol.Create(
new NamespaceExtent(this),
null,
modules.SelectDistinct(m => m.GlobalNamespace));
modules.Free();
Interlocked.CompareExchange(ref _lazyGlobalNamespace, result, null);
}
return _lazyGlobalNamespace;
}
}
/// <summary>
/// Given for the specified module or assembly namespace, gets the corresponding compilation
/// namespace (merged namespace representation for all namespace declarations and references
/// with contributions for the namespaceSymbol). Can return null if no corresponding
/// namespace can be bound in this compilation with the same name.
/// </summary>
internal new NamespaceSymbol GetCompilationNamespace(INamespaceSymbol namespaceSymbol)
{
if (namespaceSymbol is NamespaceSymbol &&
namespaceSymbol.NamespaceKind == NamespaceKind.Compilation &&
namespaceSymbol.ContainingCompilation == this)
{
return (NamespaceSymbol)namespaceSymbol;
}
var containingNamespace = namespaceSymbol.ContainingNamespace;
if (containingNamespace == null)
{
return this.GlobalNamespace;
}
var current = GetCompilationNamespace(containingNamespace);
if ((object)current != null)
{
return current.GetNestedNamespace(namespaceSymbol.Name);
}
return null;
}
private ConcurrentDictionary<string, NamespaceSymbol> _externAliasTargets;
internal bool GetExternAliasTarget(string aliasName, out NamespaceSymbol @namespace)
{
if (_externAliasTargets == null)
{
Interlocked.CompareExchange(ref _externAliasTargets, new ConcurrentDictionary<string, NamespaceSymbol>(), null);
}
else if (_externAliasTargets.TryGetValue(aliasName, out @namespace))
{
return !(@namespace is MissingNamespaceSymbol);
}
ArrayBuilder<NamespaceSymbol> builder = null;
var referenceManager = GetBoundReferenceManager();
for (int i = 0; i < referenceManager.ReferencedAssemblies.Length; i++)
{
if (referenceManager.AliasesOfReferencedAssemblies[i].Contains(aliasName))
{
builder = builder ?? ArrayBuilder<NamespaceSymbol>.GetInstance();
builder.Add(referenceManager.ReferencedAssemblies[i].GlobalNamespace);
}
}
bool foundNamespace = builder != null;
// We want to cache failures as well as successes so that subsequent incorrect extern aliases with the
// same alias will have the same target.
@namespace = foundNamespace
? MergedNamespaceSymbol.Create(new NamespaceExtent(this), namespacesToMerge: builder.ToImmutableAndFree(), containingNamespace: null, nameOpt: null)
: new MissingNamespaceSymbol(new MissingModuleSymbol(new MissingAssemblySymbol(new AssemblyIdentity(System.Guid.NewGuid().ToString())), ordinal: -1));
// Use GetOrAdd in case another thread beat us to the punch (i.e. should return the same object for the same alias, every time).
@namespace = _externAliasTargets.GetOrAdd(aliasName, @namespace);
Debug.Assert(foundNamespace == !(@namespace is MissingNamespaceSymbol));
return foundNamespace;
}
/// <summary>
/// A symbol representing the implicit Script class. This is null if the class is not
/// defined in the compilation.
/// </summary>
internal new NamedTypeSymbol ScriptClass
{
get { return _scriptClass.Value; }
}
/// <summary>
/// Resolves a symbol that represents script container (Script class). Uses the
/// full name of the container class stored in <see cref="CompilationOptions.ScriptClassName"/> to find the symbol.
/// </summary>
/// <returns>The Script class symbol or null if it is not defined.</returns>
private ImplicitNamedTypeSymbol BindScriptClass()
{
if (_options.ScriptClassName == null || !_options.ScriptClassName.IsValidClrTypeName())
{
return null;
}
var namespaceOrType = this.Assembly.GlobalNamespace.GetNamespaceOrTypeByQualifiedName(_options.ScriptClassName.Split('.')).AsSingleton();
return namespaceOrType as ImplicitNamedTypeSymbol;
}
internal Imports GlobalImports
{
get { return _globalImports.Value; }
}
internal IEnumerable<NamespaceOrTypeSymbol> GlobalUsings
{
get
{
return GlobalImports.Usings.Select(u => u.NamespaceOrType);
}
}
internal AliasSymbol GlobalNamespaceAlias
{
get
{
return _globalNamespaceAlias.Value;
}
}
/// <summary>
/// Get the symbol for the predefined type from the COR Library referenced by this compilation.
/// </summary>
internal new NamedTypeSymbol GetSpecialType(SpecialType specialType)
{
if (specialType <= SpecialType.None || specialType > SpecialType.Count)
{
throw new ArgumentOutOfRangeException(nameof(specialType));
}
var result = Assembly.GetSpecialType(specialType);
Debug.Assert(result.SpecialType == specialType);
return result;
}
/// <summary>
/// Get the symbol for the predefined type member from the COR Library referenced by this compilation.
/// </summary>
internal Symbol GetSpecialTypeMember(SpecialMember specialMember)
{
return Assembly.GetSpecialTypeMember(specialMember);
}
internal TypeSymbol GetTypeByReflectionType(Type type, DiagnosticBag diagnostics)
{
var result = Assembly.GetTypeByReflectionType(type, includeReferences: true);
if ((object)result == null)
{
var errorType = new ExtendedErrorTypeSymbol(this, type.Name, 0, CreateReflectionTypeNotFoundError(type));
diagnostics.Add(errorType.ErrorInfo, NoLocation.Singleton);
result = errorType;
}
return result;
}
private static CSDiagnosticInfo CreateReflectionTypeNotFoundError(Type type)
{
// The type or namespace name '{0}' could not be found in the global namespace (are you missing an assembly reference?)
return new CSDiagnosticInfo(
ErrorCode.ERR_GlobalSingleTypeNameNotFound,
new object[] { type.AssemblyQualifiedName },
ImmutableArray<Symbol>.Empty,
ImmutableArray<Location>.Empty
);
}
// The type of host object model if available.
private TypeSymbol _lazyHostObjectTypeSymbol;
internal TypeSymbol GetHostObjectTypeSymbol()
{
if (HostObjectType != null && (object)_lazyHostObjectTypeSymbol == null)
{
TypeSymbol symbol = Assembly.GetTypeByReflectionType(HostObjectType, includeReferences: true);
if ((object)symbol == null)
{
MetadataTypeName mdName = MetadataTypeName.FromNamespaceAndTypeName(HostObjectType.Namespace ?? String.Empty,
HostObjectType.Name,
useCLSCompliantNameArityEncoding: true);
symbol = new MissingMetadataTypeSymbol.TopLevelWithCustomErrorInfo(
new MissingAssemblySymbol(AssemblyIdentity.FromAssemblyDefinition(HostObjectType.GetTypeInfo().Assembly)).Modules[0],
ref mdName,
CreateReflectionTypeNotFoundError(HostObjectType),
SpecialType.None);
}
Interlocked.CompareExchange(ref _lazyHostObjectTypeSymbol, symbol, null);
}
return _lazyHostObjectTypeSymbol;
}
internal SynthesizedInteractiveInitializerMethod GetSubmissionInitializer()
{
return (IsSubmission && (object)ScriptClass != null) ?
ScriptClass.GetScriptInitializer() :
null;
}
/// <summary>
/// Gets the type within the compilation's assembly and all referenced assemblies (other than
/// those that can only be referenced via an extern alias) using its canonical CLR metadata name.
/// </summary>
internal new NamedTypeSymbol GetTypeByMetadataName(string fullyQualifiedMetadataName)
{
return this.Assembly.GetTypeByMetadataName(fullyQualifiedMetadataName, includeReferences: true, isWellKnownType: false);
}
/// <summary>
/// The TypeSymbol for the type 'dynamic' in this Compilation.
/// </summary>
internal new TypeSymbol DynamicType
{
get
{
return AssemblySymbol.DynamicType;
}
}
/// <summary>
/// The NamedTypeSymbol for the .NET System.Object type, which could have a TypeKind of
/// Error if there was no COR Library in this Compilation.
/// </summary>
internal new NamedTypeSymbol ObjectType
{
get
{
return this.Assembly.ObjectType;
}
}
internal bool DeclaresTheObjectClass
{
get
{
return SourceAssembly.DeclaresTheObjectClass;
}
}
internal new MethodSymbol GetEntryPoint(CancellationToken cancellationToken)
{
EntryPoint entryPoint = GetEntryPointAndDiagnostics(cancellationToken);
return entryPoint == null ? null : entryPoint.MethodSymbol;
}
internal EntryPoint GetEntryPointAndDiagnostics(CancellationToken cancellationToken)
{
if (!this.Options.OutputKind.IsApplication() && ((object)this.ScriptClass == null))
{
return null;
}
if (this.Options.MainTypeName != null && !this.Options.MainTypeName.IsValidClrTypeName())
{
Debug.Assert(!this.Options.Errors.IsDefaultOrEmpty);
return new EntryPoint(null, ImmutableArray<Diagnostic>.Empty);
}
if (_lazyEntryPoint == null)
{
ImmutableArray<Diagnostic> diagnostics;
var entryPoint = FindEntryPoint(cancellationToken, out diagnostics);
Interlocked.CompareExchange(ref _lazyEntryPoint, new EntryPoint(entryPoint, diagnostics), null);
}
return _lazyEntryPoint;
}
private MethodSymbol FindEntryPoint(CancellationToken cancellationToken, out ImmutableArray<Diagnostic> sealedDiagnostics)
{
var diagnostics = DiagnosticBag.GetInstance();
var entryPointCandidates = ArrayBuilder<MethodSymbol>.GetInstance();
try
{
NamedTypeSymbol mainType;
string mainTypeName = this.Options.MainTypeName;
NamespaceSymbol globalNamespace = this.SourceModule.GlobalNamespace;
if (mainTypeName != null)
{
// Global code is the entry point, ignore all other Mains.
var scriptClass = this.ScriptClass;
if (scriptClass != null)
{
// CONSIDER: we could use the symbol instead of just the name.
diagnostics.Add(ErrorCode.WRN_MainIgnored, NoLocation.Singleton, mainTypeName);
return scriptClass.GetScriptEntryPoint();
}
var mainTypeOrNamespace = globalNamespace.GetNamespaceOrTypeByQualifiedName(mainTypeName.Split('.')).OfMinimalArity();
if ((object)mainTypeOrNamespace == null)
{
diagnostics.Add(ErrorCode.ERR_MainClassNotFound, NoLocation.Singleton, mainTypeName);
return null;
}
mainType = mainTypeOrNamespace as NamedTypeSymbol;
if ((object)mainType == null || mainType.IsGenericType || (mainType.TypeKind != TypeKind.Class && mainType.TypeKind != TypeKind.Struct))
{
diagnostics.Add(ErrorCode.ERR_MainClassNotClass, mainTypeOrNamespace.Locations.First(), mainTypeOrNamespace);
return null;
}
EntryPointCandidateFinder.FindCandidatesInSingleType(mainType, entryPointCandidates, cancellationToken);
}
else
{
mainType = null;
EntryPointCandidateFinder.FindCandidatesInNamespace(globalNamespace, entryPointCandidates, cancellationToken);
// Global code is the entry point, ignore all other Mains.
var scriptClass = this.ScriptClass;
if (scriptClass != null)
{
foreach (var main in entryPointCandidates)
{
diagnostics.Add(ErrorCode.WRN_MainIgnored, main.Locations.First(), main);
}
return scriptClass.GetScriptEntryPoint();
}
}
DiagnosticBag warnings = DiagnosticBag.GetInstance();
var viableEntryPoints = ArrayBuilder<MethodSymbol>.GetInstance();
foreach (var candidate in entryPointCandidates)
{
if (!candidate.HasEntryPointSignature())
{
// a single error for partial methods:
warnings.Add(ErrorCode.WRN_InvalidMainSig, candidate.Locations.First(), candidate);
continue;
}
if (candidate.IsGenericMethod || candidate.ContainingType.IsGenericType)
{
// a single error for partial methods:
warnings.Add(ErrorCode.WRN_MainCantBeGeneric, candidate.Locations.First(), candidate);
continue;
}
if (candidate.IsAsync)
{
diagnostics.Add(ErrorCode.ERR_MainCantBeAsync, candidate.Locations.First(), candidate);
}
viableEntryPoints.Add(candidate);
}
if ((object)mainType == null || viableEntryPoints.Count == 0)
{
diagnostics.AddRange(warnings);
}
warnings.Free();
MethodSymbol entryPoint = null;
if (viableEntryPoints.Count == 0)
{
if ((object)mainType == null)
{
diagnostics.Add(ErrorCode.ERR_NoEntryPoint, NoLocation.Singleton);
}
else
{
diagnostics.Add(ErrorCode.ERR_NoMainInClass, mainType.Locations.First(), mainType);
}
}
else if (viableEntryPoints.Count > 1)
{
viableEntryPoints.Sort(LexicalOrderSymbolComparer.Instance);
var info = new CSDiagnosticInfo(
ErrorCode.ERR_MultipleEntryPoints,
args: SpecializedCollections.EmptyArray<object>(),
symbols: viableEntryPoints.OfType<Symbol>().AsImmutable(),
additionalLocations: viableEntryPoints.Select(m => m.Locations.First()).OfType<Location>().AsImmutable());
diagnostics.Add(new CSDiagnostic(info, viableEntryPoints.First().Locations.First()));
}
else
{
entryPoint = viableEntryPoints[0];
}
viableEntryPoints.Free();
return entryPoint;
}
finally
{
entryPointCandidates.Free();
sealedDiagnostics = diagnostics.ToReadOnlyAndFree();
}
}
internal class EntryPoint
{
public readonly MethodSymbol MethodSymbol;
public readonly ImmutableArray<Diagnostic> Diagnostics;
public EntryPoint(MethodSymbol methodSymbol, ImmutableArray<Diagnostic> diagnostics)
{
this.MethodSymbol = methodSymbol;
this.Diagnostics = diagnostics;
}
}
internal bool MightContainNoPiaLocalTypes()
{
return SourceAssembly.MightContainNoPiaLocalTypes();
}
// NOTE(cyrusn): There is a bit of a discoverability problem with this method and the same
// named method in SyntaxTreeSemanticModel. Technically, i believe these are the appropriate
// locations for these methods. This method has no dependencies on anything but the
// compilation, while the other method needs a bindings object to determine what bound node
// an expression syntax binds to. Perhaps when we document these methods we should explain
// where a user can find the other.
public Conversion ClassifyConversion(ITypeSymbol source, ITypeSymbol destination)
{
// Note that it is possible for there to be both an implicit user-defined conversion
// and an explicit built-in conversion from source to destination. In that scenario
// this method returns the implicit conversion.
if ((object)source == null)
{
throw new ArgumentNullException(nameof(source));
}
if ((object)destination == null)
{
throw new ArgumentNullException(nameof(destination));
}
var cssource = source.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("source");
var csdest = destination.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("destination");
HashSet<DiagnosticInfo> useSiteDiagnostics = null;
return Conversions.ClassifyConversion(cssource, csdest, ref useSiteDiagnostics);
}
/// <summary>
/// Returns a new ArrayTypeSymbol representing an array type tied to the base types of the
/// COR Library in this Compilation.
/// </summary>
internal ArrayTypeSymbol CreateArrayTypeSymbol(TypeSymbol elementType, int rank = 1)
{
if ((object)elementType == null)
{
throw new ArgumentNullException(nameof(elementType));
}
return ArrayTypeSymbol.CreateCSharpArray(this.Assembly, elementType, ImmutableArray<CustomModifier>.Empty, rank);
}
/// <summary>
/// Returns a new PointerTypeSymbol representing a pointer type tied to a type in this Compilation.
/// </summary>
internal PointerTypeSymbol CreatePointerTypeSymbol(TypeSymbol elementType)
{
if ((object)elementType == null)
{
throw new ArgumentNullException(nameof(elementType));
}
return new PointerTypeSymbol(elementType);
}
#endregion
#region Binding
/// <summary>
/// Gets a new SyntaxTreeSemanticModel for the specified syntax tree.
/// </summary>
public new SemanticModel GetSemanticModel(SyntaxTree syntaxTree, bool ignoreAccessibility)
{
if (syntaxTree == null)
{
throw new ArgumentNullException(nameof(syntaxTree));
}
if (!_syntaxAndDeclarations.GetLazyState().RootNamespaces.ContainsKey(syntaxTree))
{
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeNotFoundTo, syntaxTree), nameof(syntaxTree));
}
return new SyntaxTreeSemanticModel(this, (SyntaxTree)syntaxTree, ignoreAccessibility);
}
// When building symbols from the declaration table (lazily), or inside a type, or when
// compiling a method body, we may not have a BinderContext in hand for the enclosing
// scopes. Therefore, we build them when needed (and cache them) using a ContextBuilder.
// Since a ContextBuilder is only a cache, and the identity of the ContextBuilders and
// BinderContexts have no semantic meaning, we can reuse them or rebuild them, whichever is
// most convenient. We store them using weak references so that GC pressure will cause them
// to be recycled.
private WeakReference<BinderFactory>[] _binderFactories;
internal BinderFactory GetBinderFactory(SyntaxTree syntaxTree)
{
var treeNum = GetSyntaxTreeOrdinal(syntaxTree);
var binderFactories = _binderFactories;
if (binderFactories == null)
{
binderFactories = new WeakReference<BinderFactory>[this.SyntaxTrees.Length];
binderFactories = Interlocked.CompareExchange(ref _binderFactories, binderFactories, null) ?? binderFactories;
}
BinderFactory previousFactory;
var previousWeakReference = binderFactories[treeNum];
if (previousWeakReference != null && previousWeakReference.TryGetTarget(out previousFactory))
{
return previousFactory;
}
return AddNewFactory(syntaxTree, ref binderFactories[treeNum]);
}
private BinderFactory AddNewFactory(SyntaxTree syntaxTree, ref WeakReference<BinderFactory> slot)
{
var newFactory = new BinderFactory(this, syntaxTree);
var newWeakReference = new WeakReference<BinderFactory>(newFactory);
while (true)
{
BinderFactory previousFactory;
WeakReference<BinderFactory> previousWeakReference = slot;
if (previousWeakReference != null && previousWeakReference.TryGetTarget(out previousFactory))
{
return previousFactory;
}
if (Interlocked.CompareExchange(ref slot, newWeakReference, previousWeakReference) == previousWeakReference)
{
return newFactory;
}
}
}
internal Binder GetBinder(SyntaxReference reference)
{
return GetBinderFactory(reference.SyntaxTree).GetBinder((CSharpSyntaxNode)reference.GetSyntax());
}
internal Binder GetBinder(CSharpSyntaxNode syntax)
{
return GetBinderFactory(syntax.SyntaxTree).GetBinder(syntax);
}
/// <summary>
/// Returns imported symbols for the given declaration.
/// </summary>
internal Imports GetImports(SingleNamespaceDeclaration declaration)
{
return GetBinderFactory(declaration.SyntaxReference.SyntaxTree).GetImportsBinder((CSharpSyntaxNode)declaration.SyntaxReference.GetSyntax()).GetImports();
}
internal Imports GetSubmissionImports()
{
return ((SourceNamespaceSymbol)SourceModule.GlobalNamespace).GetBoundImportsMerged().SingleOrDefault() ?? Imports.Empty;
}
internal InteractiveUsingsBinder GetInteractiveUsingsBinder()
{
Debug.Assert(IsSubmission);
// empty compilation:
if ((object)ScriptClass == null)
{
Debug.Assert(_syntaxAndDeclarations.ExternalSyntaxTrees.Length == 0);
return null;
}
return GetBinderFactory(_syntaxAndDeclarations.ExternalSyntaxTrees.Single()).GetInteractiveUsingsBinder();
}
private Imports BindGlobalUsings()
{
return Imports.FromGlobalUsings(this);
}
private AliasSymbol CreateGlobalNamespaceAlias()
{
return AliasSymbol.CreateGlobalNamespaceAlias(this.GlobalNamespace, new InContainerBinder(this.GlobalNamespace, new BuckStopsHereBinder(this)));
}
private void CompleteTree(SyntaxTree tree)
{
bool completedCompilationUnit = false;
bool completedCompilation = false;
if (_lazyCompilationUnitCompletedTrees == null) Interlocked.CompareExchange(ref _lazyCompilationUnitCompletedTrees, new HashSet<SyntaxTree>(), null);
lock (_lazyCompilationUnitCompletedTrees)
{
if (_lazyCompilationUnitCompletedTrees.Add(tree))
{
completedCompilationUnit = true;
if (_lazyCompilationUnitCompletedTrees.Count == this.SyntaxTrees.Length)
{
completedCompilation = true;
}
}
}
if (completedCompilationUnit)
{
EventQueue.Enqueue(new CompilationUnitCompletedEvent(this, tree));
}
if (completedCompilation)
{
EventQueue.Enqueue(new CompilationCompletedEvent(this));
EventQueue.Complete(); // signal the end of compilation events
}
}
internal void ReportUnusedImports(DiagnosticBag diagnostics, CancellationToken cancellationToken, SyntaxTree filterTree = null)
{
if (_lazyImportInfos != null)
{
foreach (ImportInfo info in _lazyImportInfos)
{
cancellationToken.ThrowIfCancellationRequested();
SyntaxTree infoTree = info.Tree;
if (filterTree == null || filterTree == infoTree)
{
TextSpan infoSpan = info.Span;
if (!this.IsImportDirectiveUsed(infoTree, infoSpan.Start))
{
ErrorCode code = info.Kind == SyntaxKind.ExternAliasDirective
? ErrorCode.HDN_UnusedExternAlias
: ErrorCode.HDN_UnusedUsingDirective;
diagnostics.Add(code, infoTree.GetLocation(infoSpan));
}
}
}
}
// By definition, a tree is complete when all of its compiler diagnostics have been reported.
// Since unused imports are the last thing we compute and report, a tree is complete when
// the unused imports have been reported.
if (EventQueue != null)
{
if (filterTree != null)
{
CompleteTree(filterTree);
}
else
{
foreach (var tree in this.SyntaxTrees)
{
CompleteTree(tree);
}
}
}
}
internal void RecordImport(UsingDirectiveSyntax syntax)
{
RecordImportInternal(syntax);
}
internal void RecordImport(ExternAliasDirectiveSyntax syntax)
{
RecordImportInternal(syntax);
}
private void RecordImportInternal(CSharpSyntaxNode syntax)
{
LazyInitializer.EnsureInitialized(ref _lazyImportInfos).
Add(new ImportInfo(syntax.SyntaxTree, syntax.Kind(), syntax.Span));
}
private struct ImportInfo : IEquatable<ImportInfo>
{
public readonly SyntaxTree Tree;
public readonly SyntaxKind Kind;
public readonly TextSpan Span;
public ImportInfo(SyntaxTree tree, SyntaxKind kind, TextSpan span)
{
this.Tree = tree;
this.Kind = kind;
this.Span = span;
}
public override bool Equals(object obj)
{
return (obj is ImportInfo) && Equals((ImportInfo)obj);
}
public bool Equals(ImportInfo other)
{
return
other.Kind == this.Kind &&
other.Tree == this.Tree &&
other.Span == this.Span;
}
public override int GetHashCode()
{
return Hash.Combine(Tree, Span.Start);
}
}
#endregion
#region Diagnostics
internal override CommonMessageProvider MessageProvider
{
get { return _syntaxAndDeclarations.MessageProvider; }
}
/// <summary>
/// The bag in which semantic analysis should deposit its diagnostics.
/// </summary>
internal DiagnosticBag DeclarationDiagnostics
{
get
{
// We should only be placing diagnostics in this bag until
// we are done gathering declaration diagnostics. Assert that is
// the case. But since we have bugs (see https://github.com/dotnet/roslyn/issues/846)
// we disable the assertion until they are fixed.
Debug.Assert(!_declarationDiagnosticsFrozen || true);
if (_lazyDeclarationDiagnostics == null)
{
var diagnostics = new DiagnosticBag();
Interlocked.CompareExchange(ref _lazyDeclarationDiagnostics, diagnostics, null);
}
return _lazyDeclarationDiagnostics;
}
}
private IEnumerable<Diagnostic> FreezeDeclarationDiagnostics()
{
_declarationDiagnosticsFrozen = true;
var result = _lazyDeclarationDiagnostics?.AsEnumerable() ?? Enumerable.Empty<Diagnostic>();
return result;
}
private DiagnosticBag _lazyDeclarationDiagnostics;
private bool _declarationDiagnosticsFrozen;
/// <summary>
/// A bag in which diagnostics that should be reported after code gen can be deposited.
/// </summary>
internal DiagnosticBag AdditionalCodegenWarnings
{
get
{
return _additionalCodegenWarnings;
}
}
private readonly DiagnosticBag _additionalCodegenWarnings = new DiagnosticBag();
internal DeclarationTable Declarations
{
get
{
return _syntaxAndDeclarations.GetLazyState().DeclarationTable;
}
}
/// <summary>
/// Gets the diagnostics produced during the parsing stage of a compilation. There are no diagnostics for declarations or accessor or
/// method bodies, for example.
/// </summary>
public override ImmutableArray<Diagnostic> GetParseDiagnostics(CancellationToken cancellationToken = default(CancellationToken))
{
return GetDiagnostics(CompilationStage.Parse, false, cancellationToken);
}
/// <summary>
/// Gets the diagnostics produced during symbol declaration headers. There are no diagnostics for accessor or
/// method bodies, for example.
/// </summary>
public override ImmutableArray<Diagnostic> GetDeclarationDiagnostics(CancellationToken cancellationToken = default(CancellationToken))
{
return GetDiagnostics(CompilationStage.Declare, false, cancellationToken);
}
/// <summary>
/// Gets the diagnostics produced during the analysis of method bodies and field initializers.
/// </summary>
public override ImmutableArray<Diagnostic> GetMethodBodyDiagnostics(CancellationToken cancellationToken = default(CancellationToken))
{
return GetDiagnostics(CompilationStage.Compile, false, cancellationToken);
}
/// <summary>
/// Gets the all the diagnostics for the compilation, including syntax, declaration, and binding. Does not
/// include any diagnostics that might be produced during emit.
/// </summary>
public override ImmutableArray<Diagnostic> GetDiagnostics(CancellationToken cancellationToken = default(CancellationToken))
{
return GetDiagnostics(DefaultDiagnosticsStage, true, cancellationToken);
}
internal ImmutableArray<Diagnostic> GetDiagnostics(CompilationStage stage, bool includeEarlierStages, CancellationToken cancellationToken)
{
var builder = DiagnosticBag.GetInstance();
if (stage == CompilationStage.Parse || (stage > CompilationStage.Parse && includeEarlierStages))
{
var syntaxTrees = this.SyntaxTrees;
if (this.Options.ConcurrentBuild)
{
var parallelOptions = cancellationToken.CanBeCanceled
? new ParallelOptions() { CancellationToken = cancellationToken }
: DefaultParallelOptions;
Parallel.For(0, syntaxTrees.Length, parallelOptions,
UICultureUtilities.WithCurrentUICulture<int>(i =>
{
var syntaxTree = syntaxTrees[i];
AppendLoadDirectiveDiagnostics(builder, _syntaxAndDeclarations, syntaxTree);
builder.AddRange(syntaxTree.GetDiagnostics(cancellationToken));
}));
}
else
{
foreach (var syntaxTree in syntaxTrees)
{
cancellationToken.ThrowIfCancellationRequested();
AppendLoadDirectiveDiagnostics(builder, _syntaxAndDeclarations, syntaxTree);
cancellationToken.ThrowIfCancellationRequested();
builder.AddRange(syntaxTree.GetDiagnostics(cancellationToken));
}
}
}
if (stage == CompilationStage.Declare || stage > CompilationStage.Declare && includeEarlierStages)
{
builder.AddRange(Options.Errors);
cancellationToken.ThrowIfCancellationRequested();
// the set of diagnostics related to establishing references.
builder.AddRange(GetBoundReferenceManager().Diagnostics);
cancellationToken.ThrowIfCancellationRequested();
builder.AddRange(GetSourceDeclarationDiagnostics(cancellationToken: cancellationToken));
}
cancellationToken.ThrowIfCancellationRequested();
if (stage == CompilationStage.Compile || stage > CompilationStage.Compile && includeEarlierStages)
{
var methodBodyDiagnostics = DiagnosticBag.GetInstance();
GetDiagnosticsForAllMethodBodies(methodBodyDiagnostics, cancellationToken);
builder.AddRangeAndFree(methodBodyDiagnostics);
}
// Before returning diagnostics, we filter warnings
// to honor the compiler options (e.g., /nowarn, /warnaserror and /warn) and the pragmas.
var result = DiagnosticBag.GetInstance();
FilterAndAppendAndFreeDiagnostics(result, ref builder);
return result.ToReadOnlyAndFree<Diagnostic>();
}
private static void AppendLoadDirectiveDiagnostics(DiagnosticBag builder, SyntaxAndDeclarationManager syntaxAndDeclarations, SyntaxTree syntaxTree, Func<IEnumerable<Diagnostic>, IEnumerable<Diagnostic>> locationFilterOpt = null)
{
ImmutableArray<LoadDirective> loadDirectives;
if (syntaxAndDeclarations.GetLazyState().LoadDirectiveMap.TryGetValue(syntaxTree, out loadDirectives))
{
Debug.Assert(!loadDirectives.IsEmpty);
foreach (var directive in loadDirectives)
{
IEnumerable<Diagnostic> diagnostics = directive.Diagnostics;
if (locationFilterOpt != null)
{
diagnostics = locationFilterOpt(diagnostics);
}
builder.AddRange(diagnostics);
}
}
}
// Do the steps in compilation to get the method body diagnostics, but don't actually generate
// IL or emit an assembly.
private void GetDiagnosticsForAllMethodBodies(DiagnosticBag diagnostics, CancellationToken cancellationToken)
{
MethodCompiler.CompileMethodBodies(
compilation: this,
moduleBeingBuiltOpt: null,
generateDebugInfo: false,
hasDeclarationErrors: false,
diagnostics: diagnostics,
filterOpt: null,
cancellationToken: cancellationToken);
DocumentationCommentCompiler.WriteDocumentationCommentXml(this, null, null, diagnostics, cancellationToken);
this.ReportUnusedImports(diagnostics, cancellationToken);
}
private static bool IsDefinedOrImplementedInSourceTree(Symbol symbol, SyntaxTree tree, TextSpan? span)
{
if (symbol.IsDefinedInSourceTree(tree, span))
{
return true;
}
if (symbol.IsPartialDefinition())
{
MethodSymbol implementationPart = ((MethodSymbol)symbol).PartialImplementationPart;
if ((object)implementationPart != null)
{
return implementationPart.IsDefinedInSourceTree(tree, span);
}
}
if (symbol.Kind == SymbolKind.Method && symbol.IsImplicitlyDeclared && ((MethodSymbol)symbol).MethodKind == MethodKind.Constructor)
{
// Include implicitly declared constructor if containing type is included
return IsDefinedOrImplementedInSourceTree(symbol.ContainingType, tree, span);
}
return false;
}
private ImmutableArray<Diagnostic> GetDiagnosticsForMethodBodiesInTree(SyntaxTree tree, TextSpan? span, CancellationToken cancellationToken)
{
DiagnosticBag diagnostics = DiagnosticBag.GetInstance();
MethodCompiler.CompileMethodBodies(
compilation: this,
moduleBeingBuiltOpt: null,
generateDebugInfo: false,
hasDeclarationErrors: false,
diagnostics: diagnostics,
filterOpt: s => IsDefinedOrImplementedInSourceTree(s, tree, span),
cancellationToken: cancellationToken);
DocumentationCommentCompiler.WriteDocumentationCommentXml(this, null, null, diagnostics, cancellationToken, tree, span);
// Report unused directives only if computing diagnostics for the entire tree.
// Otherwise we cannot determine if a particular directive is used outside of the given sub-span within the tree.
if (!span.HasValue || span.Value == tree.GetRoot(cancellationToken).FullSpan)
{
ReportUnusedImports(diagnostics, cancellationToken, tree);
}
return diagnostics.ToReadOnlyAndFree();
}
/// <summary>
/// Filter out warnings based on the compiler options (/nowarn, /warn and /warnaserror) and the pragma warning directives.
/// 'incoming' is freed.
/// </summary>
/// <returns>True when there is no error or warning treated as an error.</returns>
internal override bool FilterAndAppendAndFreeDiagnostics(DiagnosticBag accumulator, ref DiagnosticBag incoming)
{
bool result = FilterAndAppendDiagnostics(accumulator, incoming.AsEnumerableWithoutResolution());
incoming.Free();
incoming = null;
return result;
}
/// <summary>
/// Filter out warnings based on the compiler options (/nowarn, /warn and /warnaserror) and the pragma warning directives.
/// </summary>
/// <returns>True when there is no error.</returns>
private bool FilterAndAppendDiagnostics(DiagnosticBag accumulator, IEnumerable<Diagnostic> incoming)
{
bool hasError = false;
bool reportSuppressedDiagnostics = Options.ReportSuppressedDiagnostics;
foreach (Diagnostic d in incoming)
{
var filtered = _options.FilterDiagnostic(d);
if (filtered == null ||
(!reportSuppressedDiagnostics && filtered.IsSuppressed))
{
continue;
}
else if (filtered.Severity == DiagnosticSeverity.Error)
{
hasError = true;
}
accumulator.Add(filtered);
}
return !hasError;
}
private ImmutableArray<Diagnostic> GetSourceDeclarationDiagnostics(SyntaxTree syntaxTree = null, TextSpan? filterSpanWithinTree = null, Func<IEnumerable<Diagnostic>, SyntaxTree, TextSpan?, IEnumerable<Diagnostic>> locationFilterOpt = null, CancellationToken cancellationToken = default(CancellationToken))
{
// global imports diagnostics (specified via compilation options):
GlobalImports.Complete(cancellationToken);
SourceLocation location = null;
if (syntaxTree != null)
{
var root = syntaxTree.GetRoot(cancellationToken);
location = filterSpanWithinTree.HasValue ?
new SourceLocation(syntaxTree, filterSpanWithinTree.Value) :
new SourceLocation(root);
}
Assembly.ForceComplete(location, cancellationToken);
var result = this.FreezeDeclarationDiagnostics();
if (locationFilterOpt != null)
{
Debug.Assert(syntaxTree != null);
result = locationFilterOpt(result, syntaxTree, filterSpanWithinTree);
}
// NOTE: Concatenate the CLS diagnostics *after* filtering by tree/span, because they're already filtered.
ImmutableArray<Diagnostic> clsDiagnostics = GetClsComplianceDiagnostics(syntaxTree, filterSpanWithinTree, cancellationToken);
return result.AsImmutable().Concat(clsDiagnostics);
}
private ImmutableArray<Diagnostic> GetClsComplianceDiagnostics(SyntaxTree syntaxTree, TextSpan? filterSpanWithinTree, CancellationToken cancellationToken)
{
if (syntaxTree != null)
{
var builder = DiagnosticBag.GetInstance();
ClsComplianceChecker.CheckCompliance(this, builder, cancellationToken, syntaxTree, filterSpanWithinTree);
return builder.ToReadOnlyAndFree();
}
if (_lazyClsComplianceDiagnostics.IsDefault)
{
var builder = DiagnosticBag.GetInstance();
ClsComplianceChecker.CheckCompliance(this, builder, cancellationToken);
ImmutableInterlocked.InterlockedInitialize(ref _lazyClsComplianceDiagnostics, builder.ToReadOnlyAndFree());
}
Debug.Assert(!_lazyClsComplianceDiagnostics.IsDefault);
return _lazyClsComplianceDiagnostics;
}
private static IEnumerable<Diagnostic> FilterDiagnosticsByLocation(IEnumerable<Diagnostic> diagnostics, SyntaxTree tree, TextSpan? filterSpanWithinTree)
{
foreach (var diagnostic in diagnostics)
{
if (diagnostic.ContainsLocation(tree, filterSpanWithinTree))
{
yield return diagnostic;
}
}
}
internal ImmutableArray<Diagnostic> GetDiagnosticsForSyntaxTree(
CompilationStage stage,
SyntaxTree syntaxTree,
TextSpan? filterSpanWithinTree,
bool includeEarlierStages,
CancellationToken cancellationToken = default(CancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
var builder = DiagnosticBag.GetInstance();
if (stage == CompilationStage.Parse || (stage > CompilationStage.Parse && includeEarlierStages))
{
AppendLoadDirectiveDiagnostics(builder, _syntaxAndDeclarations, syntaxTree,
diagnostics => FilterDiagnosticsByLocation(diagnostics, syntaxTree, filterSpanWithinTree));
var syntaxDiagnostics = syntaxTree.GetDiagnostics();
syntaxDiagnostics = FilterDiagnosticsByLocation(syntaxDiagnostics, syntaxTree, filterSpanWithinTree);
builder.AddRange(syntaxDiagnostics);
}
cancellationToken.ThrowIfCancellationRequested();
if (stage == CompilationStage.Declare || (stage > CompilationStage.Declare && includeEarlierStages))
{
var declarationDiagnostics = GetSourceDeclarationDiagnostics(syntaxTree, filterSpanWithinTree, FilterDiagnosticsByLocation, cancellationToken);
Debug.Assert(declarationDiagnostics.All(d => d.ContainsLocation(syntaxTree, filterSpanWithinTree)));
builder.AddRange(declarationDiagnostics);
}
cancellationToken.ThrowIfCancellationRequested();
if (stage == CompilationStage.Compile || (stage > CompilationStage.Compile && includeEarlierStages))
{
//remove some errors that don't have locations in the tree, like "no suitable main method."
//Members in trees other than the one being examined are not compiled. This includes field
//initializers which can result in 'field is never initialized' warnings for fields in partial
//types when the field is in a different source file than the one for which we're getting diagnostics.
//For that reason the bag must be also filtered by tree.
IEnumerable<Diagnostic> methodBodyDiagnostics = GetDiagnosticsForMethodBodiesInTree(syntaxTree, filterSpanWithinTree, cancellationToken);
// TODO: Enable the below commented assert and remove the filtering code in the next line.
// GetDiagnosticsForMethodBodiesInTree seems to be returning diagnostics with locations that don't satisfy the filter tree/span, this must be fixed.
// Debug.Assert(methodBodyDiagnostics.All(d => DiagnosticContainsLocation(d, syntaxTree, filterSpanWithinTree)));
methodBodyDiagnostics = FilterDiagnosticsByLocation(methodBodyDiagnostics, syntaxTree, filterSpanWithinTree);
builder.AddRange(methodBodyDiagnostics);
}
// Before returning diagnostics, we filter warnings
// to honor the compiler options (/nowarn, /warnaserror and /warn) and the pragmas.
var result = DiagnosticBag.GetInstance();
FilterAndAppendAndFreeDiagnostics(result, ref builder);
return result.ToReadOnlyAndFree<Diagnostic>();
}
#endregion
#region Resources
protected override void AppendDefaultVersionResource(Stream resourceStream)
{
var sourceAssembly = SourceAssembly;
string fileVersion = sourceAssembly.FileVersion ?? sourceAssembly.Identity.Version.ToString();
Win32ResourceConversions.AppendVersionToResourceStream(resourceStream,
!this.Options.OutputKind.IsApplication(),
fileVersion: fileVersion,
originalFileName: this.SourceModule.Name,
internalName: this.SourceModule.Name,
productVersion: sourceAssembly.InformationalVersion ?? fileVersion,
fileDescription: sourceAssembly.Title ?? " ", //alink would give this a blank if nothing was supplied.
assemblyVersion: sourceAssembly.Identity.Version,
legalCopyright: sourceAssembly.Copyright ?? " ", //alink would give this a blank if nothing was supplied.
legalTrademarks: sourceAssembly.Trademark,
productName: sourceAssembly.Product,
comments: sourceAssembly.Description,
companyName: sourceAssembly.Company);
}
#endregion
#region Emit
internal override byte LinkerMajorVersion => 0x30;
internal override bool IsDelaySigned
{
get { return SourceAssembly.IsDelaySigned; }
}
internal override StrongNameKeys StrongNameKeys
{
get { return SourceAssembly.StrongNameKeys; }
}
internal override CommonPEModuleBuilder CreateModuleBuilder(
EmitOptions emitOptions,
IMethodSymbol debugEntryPoint,
IEnumerable<ResourceDescription> manifestResources,
CompilationTestData testData,
DiagnosticBag diagnostics,
CancellationToken cancellationToken)
{
Debug.Assert(!IsSubmission || HasCodeToEmit());
string runtimeMDVersion = GetRuntimeMetadataVersion(emitOptions, diagnostics);
if (runtimeMDVersion == null)
{
return null;
}
var moduleProps = ConstructModuleSerializationProperties(emitOptions, runtimeMDVersion);
if (manifestResources == null)
{
manifestResources = SpecializedCollections.EmptyEnumerable<ResourceDescription>();
}
PEModuleBuilder moduleBeingBuilt;
if (_options.OutputKind.IsNetModule())
{
moduleBeingBuilt = new PENetModuleBuilder(
(SourceModuleSymbol)SourceModule,
emitOptions,
moduleProps,
manifestResources);
}
else
{
var kind = _options.OutputKind.IsValid() ? _options.OutputKind : OutputKind.DynamicallyLinkedLibrary;
moduleBeingBuilt = new PEAssemblyBuilder(
SourceAssembly,
emitOptions,
kind,
moduleProps,
manifestResources);
}
if (debugEntryPoint != null)
{
moduleBeingBuilt.SetDebugEntryPoint((MethodSymbol)debugEntryPoint, diagnostics);
}
// testData is only passed when running tests.
if (testData != null)
{
moduleBeingBuilt.SetMethodTestData(testData.Methods);
testData.Module = moduleBeingBuilt;
}
return moduleBeingBuilt;
}
internal override bool CompileImpl(
CommonPEModuleBuilder moduleBuilder,
Stream win32Resources,
Stream xmlDocStream,
bool emittingPdb,
DiagnosticBag diagnostics,
Predicate<ISymbol> filterOpt,
CancellationToken cancellationToken)
{
// The diagnostics should include syntax and declaration errors. We insert these before calling Emitter.Emit, so that the emitter
// does not attempt to emit if there are declaration errors (but we do insert all errors from method body binding...)
bool hasDeclarationErrors = !FilterAndAppendDiagnostics(diagnostics, GetDiagnostics(CompilationStage.Declare, true, cancellationToken));
// TODO (tomat): NoPIA:
// EmbeddedSymbolManager.MarkAllDeferredSymbolsAsReferenced(this)
var moduleBeingBuilt = (PEModuleBuilder)moduleBuilder;
if (moduleBeingBuilt.EmitOptions.EmitMetadataOnly)
{
if (hasDeclarationErrors)
{
return false;
}
if (moduleBeingBuilt.SourceModule.HasBadAttributes)
{
// If there were errors but no declaration diagnostics, explicitly add a "Failed to emit module" error.
diagnostics.Add(ErrorCode.ERR_ModuleEmitFailure, NoLocation.Singleton, ((Cci.INamedEntity)moduleBeingBuilt).Name);
return false;
}
SynthesizedMetadataCompiler.ProcessSynthesizedMembers(this, moduleBeingBuilt, cancellationToken);
}
else
{
if (emittingPdb && !StartSourceChecksumCalculation(moduleBeingBuilt, diagnostics))
{
return false;
}
// Perform initial bind of method bodies in spite of earlier errors. This is the same
// behavior as when calling GetDiagnostics()
// Use a temporary bag so we don't have to refilter pre-existing diagnostics.
DiagnosticBag methodBodyDiagnosticBag = DiagnosticBag.GetInstance();
MethodCompiler.CompileMethodBodies(
this,
moduleBeingBuilt,
emittingPdb,
hasDeclarationErrors,
diagnostics: methodBodyDiagnosticBag,
filterOpt: filterOpt,
cancellationToken: cancellationToken);
SetupWin32Resources(moduleBeingBuilt, win32Resources, methodBodyDiagnosticBag);
ReportManifestResourceDuplicates(
moduleBeingBuilt.ManifestResources,
SourceAssembly.Modules.Skip(1).Select((m) => m.Name), //all modules except the first one
AddedModulesResourceNames(methodBodyDiagnosticBag),
methodBodyDiagnosticBag);
bool hasMethodBodyErrorOrWarningAsError = !FilterAndAppendAndFreeDiagnostics(diagnostics, ref methodBodyDiagnosticBag);
if (hasDeclarationErrors || hasMethodBodyErrorOrWarningAsError)
{
return false;
}
}
cancellationToken.ThrowIfCancellationRequested();
// Use a temporary bag so we don't have to refilter pre-existing diagnostics.
DiagnosticBag xmlDiagnostics = DiagnosticBag.GetInstance();
string assemblyName = FileNameUtilities.ChangeExtension(moduleBeingBuilt.EmitOptions.OutputNameOverride, extension: null);
DocumentationCommentCompiler.WriteDocumentationCommentXml(this, assemblyName, xmlDocStream, xmlDiagnostics, cancellationToken);
if (!FilterAndAppendAndFreeDiagnostics(diagnostics, ref xmlDiagnostics))
{
return false;
}
// Use a temporary bag so we don't have to refilter pre-existing diagnostics.
DiagnosticBag importDiagnostics = DiagnosticBag.GetInstance();
this.ReportUnusedImports(importDiagnostics, cancellationToken);
if (!FilterAndAppendAndFreeDiagnostics(diagnostics, ref importDiagnostics))
{
Debug.Assert(false, "Should never produce an error");
return false;
}
return true;
}
// TODO: consider unifying with VB
private bool StartSourceChecksumCalculation(PEModuleBuilder moduleBeingBuilt, DiagnosticBag diagnostics)
{
var syntaxTrees = this.SyntaxTrees;
// Check that all syntax trees are debuggable:
bool allTreesDebuggable = true;
foreach (var tree in syntaxTrees)
{
if (!string.IsNullOrEmpty(tree.FilePath) && tree.GetText().Encoding == null)
{
diagnostics.Add(ErrorCode.ERR_EncodinglessSyntaxTree, tree.GetRoot().GetLocation());
allTreesDebuggable = false;
}
}
if (!allTreesDebuggable)
{
return false;
}
// Add debug documents for all trees with distinct paths.
foreach (var tree in syntaxTrees)
{
if (!string.IsNullOrEmpty(tree.FilePath))
{
// compilation does not guarantee that all trees will have distinct paths.
// Do not attempt adding a document for a particular path if we already added one.
string normalizedPath = moduleBeingBuilt.NormalizeDebugDocumentPath(tree.FilePath, basePath: null);
var existingDoc = moduleBeingBuilt.TryGetDebugDocumentForNormalizedPath(normalizedPath);
if (existingDoc == null)
{
moduleBeingBuilt.AddDebugDocument(MakeDebugSourceDocumentForTree(normalizedPath, tree));
}
}
}
// Add debug documents for all pragmas.
// If there are clashes with already processed directives, report warnings.
// If there are clashes with debug documents that came from actual trees, ignore the pragma.
foreach (var tree in syntaxTrees)
{
AddDebugSourceDocumentsForChecksumDirectives(moduleBeingBuilt, tree, diagnostics);
}
return true;
}
private IEnumerable<string> AddedModulesResourceNames(DiagnosticBag diagnostics)
{
ImmutableArray<ModuleSymbol> modules = SourceAssembly.Modules;
for (int i = 1; i < modules.Length; i++)
{
var m = (Symbols.Metadata.PE.PEModuleSymbol)modules[i];
ImmutableArray<EmbeddedResource> resources;
try
{
resources = m.Module.GetEmbeddedResourcesOrThrow();
}
catch (BadImageFormatException)
{
diagnostics.Add(new CSDiagnosticInfo(ErrorCode.ERR_BindToBogus, m), NoLocation.Singleton);
continue;
}
foreach (var resource in resources)
{
yield return resource.Name;
}
}
}
internal override EmitDifferenceResult EmitDifference(
EmitBaseline baseline,
IEnumerable<SemanticEdit> edits,
Func<ISymbol, bool> isAddedSymbol,
Stream metadataStream,
Stream ilStream,
Stream pdbStream,
ICollection<MethodDefinitionHandle> updatedMethods,
CompilationTestData testData,
CancellationToken cancellationToken)
{
return EmitHelpers.EmitDifference(
this,
baseline,
edits,
isAddedSymbol,
metadataStream,
ilStream,
pdbStream,
updatedMethods,
testData,
cancellationToken);
}
internal string GetRuntimeMetadataVersion(EmitOptions emitOptions, DiagnosticBag diagnostics)
{
string runtimeMDVersion = GetRuntimeMetadataVersion(emitOptions);
if (runtimeMDVersion != null)
{
return runtimeMDVersion;
}
DiagnosticBag runtimeMDVersionDiagnostics = DiagnosticBag.GetInstance();
runtimeMDVersionDiagnostics.Add(ErrorCode.WRN_NoRuntimeMetadataVersion, NoLocation.Singleton);
if (!FilterAndAppendAndFreeDiagnostics(diagnostics, ref runtimeMDVersionDiagnostics))
{
return null;
}
return string.Empty; //prevent emitter from crashing.
}
private string GetRuntimeMetadataVersion(EmitOptions emitOptions)
{
var corAssembly = Assembly.CorLibrary as Symbols.Metadata.PE.PEAssemblySymbol;
if ((object)corAssembly != null)
{
return corAssembly.Assembly.ManifestModule.MetadataVersion;
}
return emitOptions.RuntimeMetadataVersion;
}
private static void AddDebugSourceDocumentsForChecksumDirectives(
PEModuleBuilder moduleBeingBuilt,
SyntaxTree tree,
DiagnosticBag diagnostics)
{
var checksumDirectives = tree.GetRoot().GetDirectives(d => d.Kind() == SyntaxKind.PragmaChecksumDirectiveTrivia &&
!d.ContainsDiagnostics);
foreach (var directive in checksumDirectives)
{
var checksumDirective = (PragmaChecksumDirectiveTriviaSyntax)directive;
var path = checksumDirective.File.ValueText;
var checksumText = checksumDirective.Bytes.ValueText;
var normalizedPath = moduleBeingBuilt.NormalizeDebugDocumentPath(path, basePath: tree.FilePath);
var existingDoc = moduleBeingBuilt.TryGetDebugDocumentForNormalizedPath(normalizedPath);
// duplicate checksum pragmas are valid as long as values match
// if we have seen this document already, check for matching values.
if (existingDoc != null)
{
// pragma matches a file path on an actual tree.
// Dev12 compiler just ignores the pragma in this case which means that
// checksum of the actual tree always wins and no warning is given.
// We will continue doing the same.
if (existingDoc.IsComputedChecksum)
{
continue;
}
var checksumAndAlgorithm = existingDoc.ChecksumAndAlgorithm;
if (ChecksumMatches(checksumText, checksumAndAlgorithm.Item1))
{
var guid = Guid.Parse(checksumDirective.Guid.ValueText);
if (guid == checksumAndAlgorithm.Item2)
{
// all parts match, nothing to do
continue;
}
}
// did not match to an existing document
// produce a warning and ignore the pragma
diagnostics.Add(ErrorCode.WRN_ConflictingChecksum, new SourceLocation(checksumDirective), path);
}
else
{
var newDocument = new Cci.DebugSourceDocument(
normalizedPath,
Cci.DebugSourceDocument.CorSymLanguageTypeCSharp,
MakeChecksumBytes(checksumDirective.Bytes.ValueText),
Guid.Parse(checksumDirective.Guid.ValueText));
moduleBeingBuilt.AddDebugDocument(newDocument);
}
}
}
private static bool ChecksumMatches(string bytesText, ImmutableArray<byte> bytes)
{
if (bytesText.Length != bytes.Length * 2)
{
return false;
}
for (int i = 0, len = bytesText.Length / 2; i < len; i++)
{
// 1A in text becomes 0x1A
var b = SyntaxFacts.HexValue(bytesText[i * 2]) * 16 +
SyntaxFacts.HexValue(bytesText[i * 2 + 1]);
if (b != bytes[i])
{
return false;
}
}
return true;
}
private static ImmutableArray<byte> MakeChecksumBytes(string bytesText)
{
int length = bytesText.Length / 2;
var builder = ArrayBuilder<byte>.GetInstance(length);
for (int i = 0; i < length; i++)
{
// 1A in text becomes 0x1A
var b = SyntaxFacts.HexValue(bytesText[i * 2]) * 16 +
SyntaxFacts.HexValue(bytesText[i * 2 + 1]);
builder.Add((byte)b);
}
return builder.ToImmutableAndFree();
}
private static Cci.DebugSourceDocument MakeDebugSourceDocumentForTree(string normalizedPath, SyntaxTree tree)
{
return new Cci.DebugSourceDocument(normalizedPath, Cci.DebugSourceDocument.CorSymLanguageTypeCSharp, () => tree.GetChecksumAndAlgorithm());
}
private void SetupWin32Resources(PEModuleBuilder moduleBeingBuilt, Stream win32Resources, DiagnosticBag diagnostics)
{
if (win32Resources == null)
return;
switch (DetectWin32ResourceForm(win32Resources))
{
case Win32ResourceForm.COFF:
moduleBeingBuilt.Win32ResourceSection = MakeWin32ResourcesFromCOFF(win32Resources, diagnostics);
break;
case Win32ResourceForm.RES:
moduleBeingBuilt.Win32Resources = MakeWin32ResourceList(win32Resources, diagnostics);
break;
default:
diagnostics.Add(ErrorCode.ERR_BadWin32Res, NoLocation.Singleton, "Unrecognized file format.");
break;
}
}
internal override bool HasCodeToEmit()
{
foreach (var syntaxTree in this.SyntaxTrees)
{
var unit = syntaxTree.GetCompilationUnitRoot();
if (unit.Members.Count > 0)
{
return true;
}
}
return false;
}
#endregion
#region Common Members
protected override Compilation CommonWithReferences(IEnumerable<MetadataReference> newReferences)
{
return WithReferences(newReferences);
}
protected override Compilation CommonWithAssemblyName(string assemblyName)
{
return WithAssemblyName(assemblyName);
}
protected override ITypeSymbol CommonGetSubmissionResultType(out bool hasValue)
{
return GetSubmissionResultType(out hasValue);
}
protected override IAssemblySymbol CommonAssembly
{
get { return this.Assembly; }
}
protected override INamespaceSymbol CommonGlobalNamespace
{
get { return this.GlobalNamespace; }
}
protected override CompilationOptions CommonOptions
{
get { return _options; }
}
protected override Compilation CommonPreviousSubmission
{
get { return _previousSubmission; }
}
protected override SemanticModel CommonGetSemanticModel(SyntaxTree syntaxTree, bool ignoreAccessibility)
{
return this.GetSemanticModel((SyntaxTree)syntaxTree, ignoreAccessibility);
}
protected override IEnumerable<SyntaxTree> CommonSyntaxTrees
{
get
{
return this.SyntaxTrees;
}
}
protected override Compilation CommonAddSyntaxTrees(IEnumerable<SyntaxTree> trees)
{
return this.AddSyntaxTrees(trees);
}
protected override Compilation CommonRemoveSyntaxTrees(IEnumerable<SyntaxTree> trees)
{
return this.RemoveSyntaxTrees(trees);
}
protected override Compilation CommonRemoveAllSyntaxTrees()
{
return this.RemoveAllSyntaxTrees();
}
protected override Compilation CommonReplaceSyntaxTree(SyntaxTree oldTree, SyntaxTree newTree)
{
return this.ReplaceSyntaxTree((SyntaxTree)oldTree, (SyntaxTree)newTree);
}
protected override Compilation CommonWithOptions(CompilationOptions options)
{
return this.WithOptions((CSharpCompilationOptions)options);
}
protected override Compilation CommonWithPreviousSubmission(Compilation newPreviousSubmission)
{
return this.WithPreviousSubmission((CSharpCompilation)newPreviousSubmission);
}
protected override bool CommonContainsSyntaxTree(SyntaxTree syntaxTree)
{
return this.ContainsSyntaxTree((SyntaxTree)syntaxTree);
}
protected override ISymbol CommonGetAssemblyOrModuleSymbol(MetadataReference reference)
{
return this.GetAssemblyOrModuleSymbol(reference);
}
protected override Compilation CommonClone()
{
return this.Clone();
}
protected override IModuleSymbol CommonSourceModule
{
get { return this.SourceModule; }
}
protected override INamedTypeSymbol CommonGetSpecialType(SpecialType specialType)
{
return this.GetSpecialType(specialType);
}
protected override INamespaceSymbol CommonGetCompilationNamespace(INamespaceSymbol namespaceSymbol)
{
return this.GetCompilationNamespace(namespaceSymbol);
}
protected override INamedTypeSymbol CommonGetTypeByMetadataName(string metadataName)
{
return this.GetTypeByMetadataName(metadataName);
}
protected override INamedTypeSymbol CommonScriptClass
{
get { return this.ScriptClass; }
}
protected override IArrayTypeSymbol CommonCreateArrayTypeSymbol(ITypeSymbol elementType, int rank)
{
return CreateArrayTypeSymbol(elementType.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("elementType"), rank);
}
protected override IPointerTypeSymbol CommonCreatePointerTypeSymbol(ITypeSymbol elementType)
{
return CreatePointerTypeSymbol(elementType.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("elementType"));
}
protected override ITypeSymbol CommonDynamicType
{
get { return DynamicType; }
}
protected override INamedTypeSymbol CommonObjectType
{
get { return this.ObjectType; }
}
protected override IMethodSymbol CommonGetEntryPoint(CancellationToken cancellationToken)
{
return this.GetEntryPoint(cancellationToken);
}
internal override int CompareSourceLocations(Location loc1, Location loc2)
{
Debug.Assert(loc1.IsInSource);
Debug.Assert(loc2.IsInSource);
var comparison = CompareSyntaxTreeOrdering(loc1.SourceTree, loc2.SourceTree);
if (comparison != 0)
{
return comparison;
}
return loc1.SourceSpan.Start - loc2.SourceSpan.Start;
}
/// <summary>
/// Return true if there is a source declaration symbol name that meets given predicate.
/// </summary>
public override bool ContainsSymbolsWithName(Func<string, bool> predicate, SymbolFilter filter = SymbolFilter.TypeAndMember, CancellationToken cancellationToken = default(CancellationToken))
{
if (predicate == null)
{
throw new ArgumentNullException(nameof(predicate));
}
if (filter == SymbolFilter.None)
{
throw new ArgumentException(CSharpResources.NoNoneSearchCriteria, nameof(filter));
}
return this.Declarations.ContainsName(predicate, filter, cancellationToken);
}
/// <summary>
/// Return source declaration symbols whose name meets given predicate.
/// </summary>
public override IEnumerable<ISymbol> GetSymbolsWithName(Func<string, bool> predicate, SymbolFilter filter = SymbolFilter.TypeAndMember, CancellationToken cancellationToken = default(CancellationToken))
{
if (predicate == null)
{
throw new ArgumentNullException(nameof(predicate));
}
if (filter == SymbolFilter.None)
{
throw new ArgumentException(CSharpResources.NoNoneSearchCriteria, nameof(filter));
}
return new SymbolSearcher(this).GetSymbolsWithName(predicate, filter, cancellationToken);
}
#endregion
/// <summary>
/// Returns if the compilation has all of the members necessary to emit metadata about
/// dynamic types.
/// </summary>
/// <returns></returns>
internal bool HasDynamicEmitAttributes()
{
return
(object)GetWellKnownTypeMember(WellKnownMember.System_Runtime_CompilerServices_DynamicAttribute__ctor) != null &&
(object)GetWellKnownTypeMember(WellKnownMember.System_Runtime_CompilerServices_DynamicAttribute__ctorTransformFlags) != null;
}
internal override AnalyzerDriver AnalyzerForLanguage(ImmutableArray<DiagnosticAnalyzer> analyzers, AnalyzerManager analyzerManager)
{
return new AnalyzerDriver<SyntaxKind>(analyzers, n => n.Kind(), analyzerManager);
}
internal void SymbolDeclaredEvent(Symbol symbol)
{
if (EventQueue != null) EventQueue.Enqueue(new SymbolDeclaredCompilationEvent(this, symbol));
}
/// <summary>
/// Determine if enum arrays can be initialized using block initialization.
/// </summary>
/// <returns>True if it's safe to use block initialization for enum arrays.</returns>
/// <remarks>
/// In NetFx 4.0, block array initializers do not work on all combinations of {32/64 X Debug/Retail} when array elements are enums.
/// This is fixed in 4.5 thus enabling block array initialization for a very common case.
/// We look for the presence of <see cref="System.Runtime.GCLatencyMode.SustainedLowLatency"/> which was introduced in .Net 4.5
/// </remarks>
internal bool EnableEnumArrayBlockInitialization
{
get
{
var sustainedLowLatency = GetWellKnownTypeMember(WellKnownMember.System_Runtime_GCLatencyMode__SustainedLowLatency);
return sustainedLowLatency != null && sustainedLowLatency.ContainingAssembly == Assembly.CorLibrary;
}
}
private class SymbolSearcher
{
private readonly Dictionary<Declaration, NamespaceOrTypeSymbol> _cache;
private readonly CSharpCompilation _compilation;
public SymbolSearcher(CSharpCompilation compilation)
{
_cache = new Dictionary<Declaration, NamespaceOrTypeSymbol>();
_compilation = compilation;
}
public IEnumerable<ISymbol> GetSymbolsWithName(Func<string, bool> predicate, SymbolFilter filter, CancellationToken cancellationToken)
{
var result = new HashSet<ISymbol>();
var spine = new List<MergedNamespaceOrTypeDeclaration>();
AppendSymbolsWithName(spine, _compilation.Declarations.MergedRoot, predicate, filter, result, cancellationToken);
return result;
}
private void AppendSymbolsWithName(
List<MergedNamespaceOrTypeDeclaration> spine, MergedNamespaceOrTypeDeclaration current,
Func<string, bool> predicate, SymbolFilter filter, HashSet<ISymbol> set, CancellationToken cancellationToken)
{
var includeNamespace = (filter & SymbolFilter.Namespace) == SymbolFilter.Namespace;
var includeType = (filter & SymbolFilter.Type) == SymbolFilter.Type;
var includeMember = (filter & SymbolFilter.Member) == SymbolFilter.Member;
if (current.Kind == DeclarationKind.Namespace)
{
if (includeNamespace && predicate(current.Name))
{
var container = GetSpineSymbol(spine);
set.Add(GetSymbol(container, current));
}
}
else
{
if (includeType && predicate(current.Name))
{
var container = GetSpineSymbol(spine);
set.Add(GetSymbol(container, current));
}
if (includeMember)
{
AppendMemberSymbolsWithName(spine, current, predicate, set, cancellationToken);
}
}
spine.Add(current);
foreach (var child in current.Children.OfType<MergedNamespaceOrTypeDeclaration>())
{
if (includeMember || includeType)
{
AppendSymbolsWithName(spine, child, predicate, filter, set, cancellationToken);
continue;
}
if (child.Kind == DeclarationKind.Namespace)
{
AppendSymbolsWithName(spine, child, predicate, filter, set, cancellationToken);
}
}
// pop last one
spine.RemoveAt(spine.Count - 1);
}
private void AppendMemberSymbolsWithName(
List<MergedNamespaceOrTypeDeclaration> spine, MergedNamespaceOrTypeDeclaration current,
Func<string, bool> predicate, HashSet<ISymbol> set, CancellationToken cancellationToken)
{
spine.Add(current);
var container = GetSpineSymbol(spine);
foreach (var member in container.GetMembers())
{
if (!member.IsTypeOrTypeAlias() &&
(member.CanBeReferencedByName || member.IsExplicitInterfaceImplementation() || member.IsIndexer()) &&
predicate(member.Name))
{
set.Add(member);
}
}
spine.RemoveAt(spine.Count - 1);
}
private NamespaceOrTypeSymbol GetSpineSymbol(List<MergedNamespaceOrTypeDeclaration> spine)
{
if (spine.Count == 0)
{
return null;
}
var symbol = GetCachedSymbol(spine[spine.Count - 1]);
if (symbol != null)
{
return symbol;
}
var current = _compilation.GlobalNamespace as NamespaceOrTypeSymbol;
for (var i = 1; i < spine.Count; i++)
{
current = GetSymbol(current, spine[i]);
}
return current;
}
private NamespaceOrTypeSymbol GetCachedSymbol(MergedNamespaceOrTypeDeclaration declaration)
{
NamespaceOrTypeSymbol symbol;
if (_cache.TryGetValue(declaration, out symbol))
{
return symbol;
}
return null;
}
private NamespaceOrTypeSymbol GetSymbol(NamespaceOrTypeSymbol container, MergedNamespaceOrTypeDeclaration declaration)
{
if (container == null)
{
return _compilation.GlobalNamespace;
}
if (declaration.Kind == DeclarationKind.Namespace)
{
AddCache(container.GetMembers(declaration.Name).OfType<NamespaceOrTypeSymbol>());
}
else
{
AddCache(container.GetTypeMembers(declaration.Name));
}
return GetCachedSymbol(declaration);
}
private void AddCache(IEnumerable<NamespaceOrTypeSymbol> symbols)
{
foreach (var symbol in symbols)
{
var mergedNamespace = symbol as MergedNamespaceSymbol;
if (mergedNamespace != null)
{
_cache[mergedNamespace.ConstituentNamespaces.OfType<SourceNamespaceSymbol>().First().MergedDeclaration] = symbol;
continue;
}
var sourceNamespace = symbol as SourceNamespaceSymbol;
if (sourceNamespace != null)
{
_cache[sourceNamespace.MergedDeclaration] = sourceNamespace;
continue;
}
var sourceType = symbol as SourceMemberContainerTypeSymbol;
if (sourceType != null)
{
_cache[sourceType.MergedDeclaration] = sourceType;
}
}
}
}
}
}
| EricArndt/roslyn | src/Compilers/CSharp/Portable/Compilation/CSharpCompilation.cs | C# | apache-2.0 | 126,286 |
/*******************************************************************************
* Copyright 2015 Software Evolution and Architecture Lab, University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package CloudWave;
public enum CloudWaveJNI {
instance;
public static final String CLOUDWAVE_LIB = "cloudwavejni";
CloudWaveJNI() {System.loadLibrary(CLOUDWAVE_LIB);}
public static CloudWaveJNI getInstance(){return instance;}
public void init() throws CloudWaveException{
int r = initJNI();
if (r<0) {
System.err.println("initJNI returned " + r);
throw new CloudWaveException();
}
}
public void free(){
freeJNI();
}
protected IEventHandler eventHandler;
public IEventHandler getEventHandler() {
return eventHandler;
}
public void setEventHandler(IEventHandler eh) {
synchronized(this){ eventHandler = eh;}
}
public void doEvent(String event){
synchronized(this) {
if (eventHandler!=null)
eventHandler.doEvent(event);
}
}
protected synchronized static void callback(String event){
instance.doEvent(event);
}
//#: Init/Free
public native int initJNI();
protected native int freeJNI();
//:#
//#: Log
protected native int initLog();
protected native int freeLog();
protected native int setLogId(String id);
protected native String getLogId();
protected native int recordLog(int level, String message);
protected native int recordLogL(int level, String message, long id);
//:#
//#: Metric
protected native int initMetric();
protected native int freeMetric();
protected native int recordMetricL(int source, String name, String mdata, String munit, int type, long value);
protected native int recordMetricD(int source, String name, String mdata, String munit, int type, double value);
protected native int recordMetricS(int source, String name, String mdata, String munit, int type, String value);
protected native int recordEventL(int source, String name, String mdata, String munit, int type, long value);
protected native int recordEventD(int source, String name, String mdata, String munit, int type, double value);
protected native int recordEventS(int source, String name, String mdata, String munit, int type, String value);
//:#
//#: Events
protected native int initEvent();
protected native int freeEvent();
protected native int postEvent(String event_json);
protected native long subscribe(String event_id);
protected native int unsubscribe(long id);
//:#
}
| harinigunabalan/PerformanceHat | cw-feedback-handler/src/main/java/CloudWave/CloudWaveJNI.java | Java | apache-2.0 | 3,270 |
const NamingMixin = {
_name: null,
getName() {
return this._name;
},
_shortName: null,
getShortName() {
return this._shortName || this.getName();
},
_abbreviation: null,
getAbbreviation() {
return this._abbreviation || this.getShortName();
},
};
export default NamingMixin;
| kjirou/reversi-tactics | src/mixins/NamingMixin.js | JavaScript | apache-2.0 | 309 |
from mainapp import create_app
app = create_app()
if __name__ == '__main__':
app.run(host='0.0.0.0')
| jonaubf/flask-mongo-testapp | testapp/run.py | Python | apache-2.0 | 107 |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.entitlement.filter.callback;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.entitlement.filter.exception.EntitlementFilterException;
import javax.servlet.http.HttpServletRequest;
public class BasicAuthCallBackHandler extends EntitlementFilterCallBackHandler {
private static final Log log = LogFactory.getLog(BasicAuthCallBackHandler.class);
public BasicAuthCallBackHandler(HttpServletRequest request) throws EntitlementFilterException {
String authHeaderEn = null;
if (!(request.getHeader("Authorization") == null || request.getHeader("Authorization").equals("null"))) {
authHeaderEn = request.getHeader("Authorization");
String tempArr[] = authHeaderEn.split(" ");
if (tempArr.length == 2) {
String authHeaderDc = new String(Base64.decodeBase64(tempArr[1].getBytes()));
tempArr = authHeaderDc.split(":");
if (tempArr.length == 2) {
setUserName(tempArr[0]);
}
}
throw new EntitlementFilterException("Unable to retrieve username from Authorization header");
}
}
}
| wattale/carbon-identity | components/identity/org.wso2.carbon.identity.entitlement.filter/src/main/java/org/wso2/carbon/identity/entitlement/filter/callback/BasicAuthCallBackHandler.java | Java | apache-2.0 | 1,973 |
package main
import (
"github.com/ActiveState/tail"
"github.com/ugorji/go/codec"
"io/ioutil"
"log"
"os"
"reflect"
"regexp"
"strconv"
"strings"
"time"
)
type inputTail struct {
path string
format string
tag string
pos_file string
offset int64
sync_interval int
codec *codec.JsonHandle
time_key string
}
func (self *inputTail) Init(f map[string]string) error {
self.sync_interval = 2
value := f["path"]
if len(value) > 0 {
self.path = value
}
value = f["format"]
if len(value) > 0 {
self.format = value
if value == "json" {
_codec := codec.JsonHandle{}
_codec.MapType = reflect.TypeOf(map[string]interface{}(nil))
self.codec = &_codec
value = f["time_key"]
if len(value) > 0 {
self.time_key = value
} else {
self.time_key = "time"
}
}
}
value = f["tag"]
if len(value) > 0 {
self.tag = value
}
value = f["pos_file"]
if len(value) > 0 {
self.pos_file = value
str, err := ioutil.ReadFile(self.pos_file)
if err != nil {
log.Println("ioutil.ReadFile:", err)
}
f, err := os.Open(self.path)
if err != nil {
log.Println("os.Open:", err)
}
info, err := f.Stat()
if err != nil {
log.Println("f.Stat:", err)
self.offset = 0
} else {
offset, _ := strconv.Atoi(string(str))
if int64(offset) > info.Size() {
self.offset = info.Size()
} else {
self.offset = int64(offset)
}
}
}
value = f["sync_interval"]
if len(value) > 0 {
sync_interval, err := strconv.Atoi(value)
if err != nil {
return err
}
self.sync_interval = sync_interval
}
return nil
}
func (self *inputTail) Run(runner InputRunner) error {
defer func() {
if err := recover(); err != nil {
logs.Fatalln("recover panic at err:", err)
}
}()
var seek int
if self.offset > 0 {
seek = os.SEEK_SET
} else {
seek = os.SEEK_END
}
t, err := tail.TailFile(self.path, tail.Config{
Poll: true,
ReOpen: true,
Follow: true,
MustExist: false,
Location: &tail.SeekInfo{int64(self.offset), seek}})
if err != nil {
return err
}
f, err := os.OpenFile(self.pos_file, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0600)
if err != nil {
log.Fatalln("os.OpenFile", err)
}
defer f.Close()
var re regexp.Regexp
if string(self.format[0]) == string("/") || string(self.format[len(self.format)-1]) == string("/") {
format := strings.Trim(self.format, "/")
trueformat := regexp.MustCompile("\\(\\?<").ReplaceAllString(format, "(?P<")
if trueformat != format {
log.Printf("pos_file:%s, format:%s", self.path, trueformat)
}
re = *regexp.MustCompile(trueformat)
self.format = "regexp"
} else if self.format == "json" {
}
tick := time.NewTicker(time.Second * time.Duration(self.sync_interval))
count := 0
for {
select {
case <-tick.C:
{
if count > 0 {
offset, err := t.Tell()
if err != nil {
log.Println("Tell return error: ", err)
continue
}
str := strconv.Itoa(int(offset))
_, err = f.WriteAt([]byte(str), 0)
if err != nil {
log.Println("f.WriteAt", err)
return err
}
count = 0
}
}
case line := <-t.Lines:
{
pack := <-runner.InChan()
pack.MsgBytes = []byte(line.Text)
pack.Msg.Tag = self.tag
pack.Msg.Timestamp = line.Time.Unix()
if self.format == "regexp" {
text := re.FindSubmatch([]byte(line.Text))
if text == nil {
pack.Recycle()
continue
}
for i, name := range re.SubexpNames() {
if len(name) > 0 {
pack.Msg.Data[name] = string(text[i])
}
}
} else if self.format == "json" {
dec := codec.NewDecoderBytes([]byte(line.Text), self.codec)
err := dec.Decode(&pack.Msg.Data)
if err != nil {
log.Println("json.Unmarshal", err)
pack.Recycle()
continue
} else {
t, ok := pack.Msg.Data[self.time_key]
if ok {
if time, xx := t.(uint64); xx {
pack.Msg.Timestamp = int64(time)
delete(pack.Msg.Data, self.time_key)
} else if time64, oo := t.(int64); oo {
pack.Msg.Timestamp = time64
delete(pack.Msg.Data, self.time_key)
} else {
log.Println("time is not int64, ", t, " typeof:", reflect.TypeOf(t))
pack.Recycle()
continue
}
}
}
}
count++
runner.RouterChan() <- pack
}
}
}
err = t.Wait()
if err != nil {
return err
}
return err
}
func init() {
RegisterInput("tail", func() interface{} {
return new(inputTail)
})
}
| hnlq715/gofluent | in_tail.go | GO | apache-2.0 | 4,521 |
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.logging;
import org.apache.commons.logging.Log;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
/**
* Tests for {@link DeferredLog}.
*
* @author Phillip Webb
*/
public class DeferredLogTests {
private DeferredLog deferredLog = new DeferredLog();
private Object message = "Message";
private Throwable throwable = new IllegalStateException();
private Log log = mock(Log.class);
@Test
public void isTraceEnabled() throws Exception {
assertThat(this.deferredLog.isTraceEnabled()).isTrue();
}
@Test
public void isDebugEnabled() throws Exception {
assertThat(this.deferredLog.isDebugEnabled()).isTrue();
}
@Test
public void isInfoEnabled() throws Exception {
assertThat(this.deferredLog.isInfoEnabled()).isTrue();
}
@Test
public void isWarnEnabled() throws Exception {
assertThat(this.deferredLog.isWarnEnabled()).isTrue();
}
@Test
public void isErrorEnabled() throws Exception {
assertThat(this.deferredLog.isErrorEnabled()).isTrue();
}
@Test
public void isFatalEnabled() throws Exception {
assertThat(this.deferredLog.isFatalEnabled()).isTrue();
}
@Test
public void trace() throws Exception {
this.deferredLog.trace(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).trace(this.message, null);
}
@Test
public void traceWithThrowable() throws Exception {
this.deferredLog.trace(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).trace(this.message, this.throwable);
}
@Test
public void debug() throws Exception {
this.deferredLog.debug(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).debug(this.message, null);
}
@Test
public void debugWithThrowable() throws Exception {
this.deferredLog.debug(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).debug(this.message, this.throwable);
}
@Test
public void info() throws Exception {
this.deferredLog.info(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).info(this.message, null);
}
@Test
public void infoWithThrowable() throws Exception {
this.deferredLog.info(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).info(this.message, this.throwable);
}
@Test
public void warn() throws Exception {
this.deferredLog.warn(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).warn(this.message, null);
}
@Test
public void warnWithThrowable() throws Exception {
this.deferredLog.warn(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).warn(this.message, this.throwable);
}
@Test
public void error() throws Exception {
this.deferredLog.error(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).error(this.message, null);
}
@Test
public void errorWithThrowable() throws Exception {
this.deferredLog.error(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).error(this.message, this.throwable);
}
@Test
public void fatal() throws Exception {
this.deferredLog.fatal(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).fatal(this.message, null);
}
@Test
public void fatalWithThrowable() throws Exception {
this.deferredLog.fatal(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).fatal(this.message, this.throwable);
}
@Test
public void clearsOnReplayTo() throws Exception {
this.deferredLog.info("1");
this.deferredLog.fatal("2");
Log log2 = mock(Log.class);
this.deferredLog.replayTo(this.log);
this.deferredLog.replayTo(log2);
verify(this.log).info("1", null);
verify(this.log).fatal("2", null);
verifyNoMoreInteractions(this.log);
verifyZeroInteractions(log2);
}
}
| vakninr/spring-boot | spring-boot-project/spring-boot/src/test/java/org/springframework/boot/logging/DeferredLogTests.java | Java | apache-2.0 | 4,690 |
# Copyright 2012 OpenStack Foundation
# Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Identity v2 EC2 Credentials action implementations"""
import logging
import six
from cliff import command
from cliff import lister
from cliff import show
from openstackclient.common import utils
from openstackclient.i18n import _ # noqa
class CreateEC2Creds(show.ShowOne):
"""Create EC2 credentials"""
log = logging.getLogger(__name__ + ".CreateEC2Creds")
def get_parser(self, prog_name):
parser = super(CreateEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'--project',
metavar='<project>',
help=_('Specify a project [admin only]'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.project:
project = utils.find_resource(
identity_client.tenants,
parsed_args.project,
).id
else:
# Get the project from the current auth
project = identity_client.auth_tenant_id
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
creds = identity_client.ec2.create(user, project)
info = {}
info.update(creds._info)
return zip(*sorted(six.iteritems(info)))
class DeleteEC2Creds(command.Command):
"""Delete EC2 credentials"""
log = logging.getLogger(__name__ + '.DeleteEC2Creds')
def get_parser(self, prog_name):
parser = super(DeleteEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'access_key',
metavar='<access-key>',
help=_('Credentials access key'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
identity_client.ec2.delete(user, parsed_args.access_key)
class ListEC2Creds(lister.Lister):
"""List EC2 credentials"""
log = logging.getLogger(__name__ + '.ListEC2Creds')
def get_parser(self, prog_name):
parser = super(ListEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
columns = ('access', 'secret', 'tenant_id', 'user_id')
column_headers = ('Access', 'Secret', 'Project ID', 'User ID')
data = identity_client.ec2.list(user)
return (column_headers,
(utils.get_item_properties(
s, columns,
formatters={},
) for s in data))
class ShowEC2Creds(show.ShowOne):
"""Show EC2 credentials"""
log = logging.getLogger(__name__ + '.ShowEC2Creds')
def get_parser(self, prog_name):
parser = super(ShowEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'access_key',
metavar='<access-key>',
help=_('Credentials access key'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
creds = identity_client.ec2.get(user, parsed_args.access_key)
info = {}
info.update(creds._info)
return zip(*sorted(six.iteritems(info)))
| varunarya10/python-openstackclient | openstackclient/identity/v2_0/ec2creds.py | Python | apache-2.0 | 5,662 |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.runtime.pipeline;
/**
*
* <p>This api is experimental and thus the classes and the interfaces returned are subject to change.</p>
*/
public interface Transformer
extends
Emitter,
Receiver,
Stage {
}
| mariofusco/droolsjbpm-integration | drools-pipeline/src/main/java/org/drools/runtime/pipeline/Transformer.java | Java | apache-2.0 | 836 |
<div class="container userContainer">
<div class="row">
<input type="text" ng-model="model.data.search" placeholder="Search user..." ng-change="model.search()" autofocus>
</div>
<div class="row userContainer-row" ng-click="model.userSelect(user)" ng-show="model.users.length" ng-repeat="user in model.users">
{{user.firstName}} {{user.lastName}}
</div>
<div class="row userContainer-row" ng-show="!model.users.length">
No user found ...
</div>
</div>
| liviuignat/PlayScalaAngularOAuth | app/assets/client/src/app/search/searchuser.tpl.html | HTML | apache-2.0 | 486 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package testpkg
import scala.concurrent.duration._
// starts svr using server-test/completions and perform sbt/completion tests
object ServerCompletionsTest extends AbstractServerTest {
override val testDirectory: String = "completions"
test("return basic completions on request") { _ =>
val completionStr = """{ "query": "" }"""
svr.sendJsonRpc(
s"""{ "jsonrpc": "2.0", "id": 15, "method": "sbt/completion", "params": $completionStr }"""
)
assert(svr.waitForString(10.seconds) { s =>
println(s)
s contains """"result":{"items":["""
})
}
test("return completion for custom tasks") { _ =>
val completionStr = """{ "query": "hell" }"""
svr.sendJsonRpc(
s"""{ "jsonrpc": "2.0", "id": 16, "method": "sbt/completion", "params": $completionStr }"""
)
assert(svr.waitForString(10.seconds) { s =>
s contains """"result":{"items":["hello"]"""
})
}
test("return completions for user classes") { _ =>
val completionStr = """{ "query": "testOnly org." }"""
svr.sendJsonRpc(
s"""{ "jsonrpc": "2.0", "id": 17, "method": "sbt/completion", "params": $completionStr }"""
)
assert(svr.waitForString(10.seconds) { s =>
s contains """"result":{"items":["testOnly org.sbt.ExampleSpec"]"""
})
}
}
| xuwei-k/xsbt | server-test/src/test/scala/testpkg/ServerCompletionsTest.scala | Scala | apache-2.0 | 1,442 |
/*******************************************************************************
* Copyright (c) 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.ibm.ws.lars.rest;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import com.ibm.ws.lars.rest.model.Asset;
import com.ibm.ws.lars.rest.model.AssetList;
/**
*
*/
public class TestUtils {
/**
* Reads the specified InputStream and returns a byte array containing all the bytes read.
*/
public static byte[] slurp(InputStream is) throws IOException {
byte[] buffer = new byte[1024];
int length;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while ((length = is.read(buffer)) != -1) {
baos.write(buffer, 0, length);
}
return baos.toByteArray();
}
/**
* Assert that an AssetList contains exactly the given list of assets
* <p>
* This method assumes that all assets have an ID and there are no duplicates in the asset list.
*/
public static void assertAssetList(AssetList list, Asset... assets) {
Map<String, Asset> assetIdMap = new HashMap<>();
for (Asset asset : assets) {
if (assetIdMap.put(asset.get_id(), asset) != null) {
throw new AssertionError("Duplicate found in list of expected assets:\n" + asset.toJson());
}
}
for (Asset asset : list) {
if (assetIdMap.remove(asset.get_id()) == null) {
throw new AssertionError("Unexpected asset found in the asset list:\n" + asset.toJson());
}
}
if (!assetIdMap.isEmpty()) {
StringBuilder message = new StringBuilder("Assets missing from asset list:\n");
for (Asset asset : assetIdMap.values()) {
message.append(asset.toJson());
message.append("\n");
}
throw new AssertionError(message.toString());
}
}
}
| antelder/tool.lars | server/src/test/java/com/ibm/ws/lars/rest/TestUtils.java | Java | apache-2.0 | 2,655 |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.topic.impl.reliable;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.partition.MigrationState;
import com.hazelcast.partition.MigrationListener;
import com.hazelcast.partition.ReplicaMigrationEvent;
import com.hazelcast.ringbuffer.impl.RingbufferService;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.OverridePropertyRule;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.topic.ITopic;
import com.hazelcast.topic.Message;
import com.hazelcast.topic.MessageListener;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertTrue;
@Category({QuickTest.class, ParallelJVMTest.class})
@RunWith(HazelcastParallelClassRunner.class)
public class SubscriptionMigrationTest extends HazelcastTestSupport {
@Rule
public OverridePropertyRule overridePropertyRule = OverridePropertyRule.set("hazelcast.partition.count", "2");
// gh issue: https://github.com/hazelcast/hazelcast/issues/13602
@Test
public void testListenerReceivesMessagesAfterPartitionIsMigratedBack() {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory();
HazelcastInstance instance1 = factory.newHazelcastInstance();
final String rtNameOnPartition0 = generateReliableTopicNameForPartition(instance1, 0);
final String rtNameOnPartition1 = generateReliableTopicNameForPartition(instance1, 1);
ITopic<String> topic0 = instance1.getReliableTopic(rtNameOnPartition0);
ITopic<String> topic1 = instance1.getReliableTopic(rtNameOnPartition1);
final CountingMigrationListener migrationListener = new CountingMigrationListener();
instance1.getPartitionService().addMigrationListener(migrationListener);
final PayloadMessageListener<String> listener0 = new PayloadMessageListener<String>();
final PayloadMessageListener<String> listener1 = new PayloadMessageListener<String>();
topic0.addMessageListener(listener0);
topic1.addMessageListener(listener1);
topic0.publish("itemA");
topic1.publish("item1");
HazelcastInstance instance2 = factory.newHazelcastInstance();
// 1 primary, 1 backup migration
assertEqualsEventually(2, migrationListener.partitionMigrationCount);
instance2.shutdown();
assertEqualsEventually(3, migrationListener.partitionMigrationCount);
topic0.publish("itemB");
topic1.publish("item2");
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertTrue(listener0.isReceived("itemA"));
assertTrue(listener0.isReceived("itemB"));
assertTrue(listener1.isReceived("item1"));
assertTrue(listener1.isReceived("item2"));
}
});
}
public class PayloadMessageListener<V> implements MessageListener<V> {
private Collection<V> receivedMessages = new HashSet<V>();
@Override
public void onMessage(Message<V> message) {
receivedMessages.add(message.getMessageObject());
}
boolean isReceived(V message) {
return receivedMessages.contains(message);
}
}
public class CountingMigrationListener implements MigrationListener {
AtomicInteger partitionMigrationCount = new AtomicInteger();
@Override
public void migrationStarted(MigrationState state) {
}
@Override
public void migrationFinished(MigrationState state) {
}
@Override
public void replicaMigrationCompleted(ReplicaMigrationEvent event) {
partitionMigrationCount.incrementAndGet();
}
@Override
public void replicaMigrationFailed(ReplicaMigrationEvent event) {
}
}
private String generateReliableTopicNameForPartition(HazelcastInstance instance, int partitionId) {
return generateKeyForPartition(instance, RingbufferService.TOPIC_RB_PREFIX, partitionId);
}
}
| mdogan/hazelcast | hazelcast/src/test/java/com/hazelcast/topic/impl/reliable/SubscriptionMigrationTest.java | Java | apache-2.0 | 5,092 |
# Copyright 2012 SINA Corporation
# Copyright 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Extracts OpenStack config option info from module(s)."""
from __future__ import print_function
import argparse
import imp
import os
import re
import socket
import sys
import textwrap
from oslo.config import cfg
import six
import stevedore.named
from climate.openstack.common import gettextutils
from climate.openstack.common import importutils
gettextutils.install('climate')
STROPT = "StrOpt"
BOOLOPT = "BoolOpt"
INTOPT = "IntOpt"
FLOATOPT = "FloatOpt"
LISTOPT = "ListOpt"
DICTOPT = "DictOpt"
MULTISTROPT = "MultiStrOpt"
OPT_TYPES = {
STROPT: 'string value',
BOOLOPT: 'boolean value',
INTOPT: 'integer value',
FLOATOPT: 'floating point value',
LISTOPT: 'list value',
DICTOPT: 'dict value',
MULTISTROPT: 'multi valued',
}
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
FLOATOPT, LISTOPT, DICTOPT,
MULTISTROPT]))
PY_EXT = ".py"
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../../../"))
WORDWRAP_WIDTH = 60
def generate(argv):
parser = argparse.ArgumentParser(
description='generate sample configuration file',
)
parser.add_argument('-m', dest='modules', action='append')
parser.add_argument('-l', dest='libraries', action='append')
parser.add_argument('srcfiles', nargs='*')
parsed_args = parser.parse_args(argv)
mods_by_pkg = dict()
for filepath in parsed_args.srcfiles:
pkg_name = filepath.split(os.sep)[1]
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
os.path.basename(filepath).split('.')[0]])
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
# NOTE(lzyeval): place top level modules before packages
pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT))
ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names)
pkg_names.extend(ext_names)
# opts_by_group is a mapping of group name to an options list
# The options list is a list of (module, options) tuples
opts_by_group = {'DEFAULT': []}
if parsed_args.modules:
for module_name in parsed_args.modules:
module = _import_module(module_name)
if module:
for group, opts in _list_opts(module):
opts_by_group.setdefault(group, []).append((module_name,
opts))
# Look for entry points defined in libraries (or applications) for
# option discovery, and include their return values in the output.
#
# Each entry point should be a function returning an iterable
# of pairs with the group name (or None for the default group)
# and the list of Opt instances for that group.
if parsed_args.libraries:
loader = stevedore.named.NamedExtensionManager(
'oslo.config.opts',
names=list(set(parsed_args.libraries)),
invoke_on_load=False,
)
for ext in loader:
for group, opts in ext.plugin():
opt_list = opts_by_group.setdefault(group or 'DEFAULT', [])
opt_list.append((ext.name, opts))
for pkg_name in pkg_names:
mods = mods_by_pkg.get(pkg_name)
mods.sort()
for mod_str in mods:
if mod_str.endswith('.__init__'):
mod_str = mod_str[:mod_str.rfind(".")]
mod_obj = _import_module(mod_str)
if not mod_obj:
raise RuntimeError("Unable to import module %s" % mod_str)
for group, opts in _list_opts(mod_obj):
opts_by_group.setdefault(group, []).append((mod_str, opts))
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
for group in sorted(opts_by_group.keys()):
print_group_opts(group, opts_by_group[group])
def _import_module(mod_str):
try:
if mod_str.startswith('bin.'):
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
return sys.modules[mod_str[4:]]
else:
return importutils.import_module(mod_str)
except Exception as e:
sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e)))
return None
def _is_in_group(opt, group):
"Check if opt is in group."
for value in group._opts.values():
# NOTE(llu): Temporary workaround for bug #1262148, wait until
# newly released oslo.config support '==' operator.
if not(value['opt'] != opt):
return True
return False
def _guess_groups(opt, mod_obj):
# is it in the DEFAULT group?
if _is_in_group(opt, cfg.CONF):
return 'DEFAULT'
# what other groups is it in?
for value in cfg.CONF.values():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
raise RuntimeError(
"Unable to find group for option %s, "
"maybe it's defined twice in the same group?"
% opt.name
)
def _list_opts(obj):
def is_opt(o):
return (isinstance(o, cfg.Opt) and
not isinstance(o, cfg.SubCommandOpt))
opts = list()
for attr_str in dir(obj):
attr_obj = getattr(obj, attr_str)
if is_opt(attr_obj):
opts.append(attr_obj)
elif (isinstance(attr_obj, list) and
all(map(lambda x: is_opt(x), attr_obj))):
opts.extend(attr_obj)
ret = {}
for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
return ret.items()
def print_group_opts(group, opts_by_module):
print("[%s]" % group)
print('')
for mod, opts in opts_by_module:
print('#')
print('# Options defined in %s' % mod)
print('#')
print('')
for opt in opts:
_print_opt(opt)
print('')
def _get_my_ip():
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return None
def _sanitize_default(name, value):
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the
# second part is an absolute pathname and therefore drop the first
# part.
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
elif value.startswith(BASEDIR):
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
elif BASEDIR in value:
return value.replace(BASEDIR, '')
elif value == _get_my_ip():
return '10.0.0.1'
elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name:
return 'climate'
elif value.strip() != value:
return '"%s"' % value
return value
def _print_opt(opt):
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = ""
opt_type = None
try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
opt_help = u'%s (%s)' % (opt_help,
OPT_TYPES[opt_type])
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
if opt.deprecated_opts:
for deprecated_opt in opt.deprecated_opts:
if deprecated_opt.name:
deprecated_group = (deprecated_opt.group if
deprecated_opt.group else "DEFAULT")
print('# Deprecated group/name - [%s]/%s' %
(deprecated_group,
deprecated_opt.name))
try:
if opt_default is None:
print('#%s=<None>' % opt_name)
elif opt_type == STROPT:
assert(isinstance(opt_default, six.string_types))
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
opt_default)))
elif opt_type == BOOLOPT:
assert(isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, str(opt_default).lower()))
elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and
not isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == FLOATOPT:
assert(isinstance(opt_default, float))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == LISTOPT:
assert(isinstance(opt_default, list))
print('#%s=%s' % (opt_name, ','.join(opt_default)))
elif opt_type == DICTOPT:
assert(isinstance(opt_default, dict))
opt_default_strlist = [str(key) + ':' + str(value)
for (key, value) in opt_default.items()]
print('#%s=%s' % (opt_name, ','.join(opt_default_strlist)))
elif opt_type == MULTISTROPT:
assert(isinstance(opt_default, list))
if not opt_default:
opt_default = ['']
for default in opt_default:
print('#%s=%s' % (opt_name, default))
print('')
except Exception:
sys.stderr.write('Error in option "%s"\n' % opt_name)
sys.exit(1)
def main():
generate(sys.argv[1:])
if __name__ == '__main__':
main()
| frossigneux/blazar | climate/openstack/common/config/generator.py | Python | apache-2.0 | 10,412 |
registerNpc(1007, {
walk_speed = 0,
run_speed = 0,
scale = 130,
r_weapon = 0,
l_weapon = 0,
level = 10,
hp = 100,
attack = 100,
hit = 100,
def = 100,
res = 100,
avoid = 100,
attack_spd = 100,
is_magic_damage = 0,
ai_type = 0,
give_exp = 39,
drop_type = 58,
drop_money = 0,
drop_item = 0,
union_number = 0,
need_summon_count = 225,
sell_tab0 = 225,
sell_tab1 = 0,
sell_tab2 = 0,
sell_tab3 = 0,
can_target = 0,
attack_range = 200,
npc_type = 999,
hit_material_type = 0,
face_icon = 17,
summon_mob_type = 17,
quest_type = 0,
height = 0
});
function OnInit(entity)
return true
end
function OnCreate(entity)
return true
end
function OnDelete(entity)
return true
end
function OnDead(entity)
end
function OnDamaged(entity)
end | RavenX8/osIROSE-new | scripts/npcs/ai/[gypsy_jewel_seller]_mina.lua | Lua | apache-2.0 | 1,070 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import java.util.List;
import java.util.Set;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.apache.geode.cache.execute.ResultCollector;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.cli.ConverterHint;
import org.apache.geode.management.cli.Result;
import org.apache.geode.management.internal.cli.CliUtil;
import org.apache.geode.management.internal.cli.domain.SubscriptionQueueSizeResult;
import org.apache.geode.management.internal.cli.functions.GetSubscriptionQueueSizeFunction;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.result.ResultBuilder;
import org.apache.geode.management.internal.security.ResourceOperation;
import org.apache.geode.security.ResourcePermission;
public class CountDurableCQEventsCommand extends InternalGfshCommand {
DurableClientCommandsResultBuilder builder = new DurableClientCommandsResultBuilder();
@CliCommand(value = CliStrings.COUNT_DURABLE_CQ_EVENTS,
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__HELP)
@CliMetaData()
@ResourceOperation(resource = ResourcePermission.Resource.CLUSTER,
operation = ResourcePermission.Operation.READ)
public Result countDurableCqEvents(
@CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID, mandatory = true,
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID__HELP) final String durableClientId,
@CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME,
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME__HELP) final String cqName,
@CliOption(key = {CliStrings.MEMBER, CliStrings.MEMBERS},
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__MEMBER__HELP,
optionContext = ConverterHint.MEMBERIDNAME) final String[] memberNameOrId,
@CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS},
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__GROUP__HELP,
optionContext = ConverterHint.MEMBERGROUP) final String[] group) {
Result result;
try {
Set<DistributedMember> targetMembers = findMembers(group, memberNameOrId);
if (targetMembers.isEmpty()) {
return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE);
}
String[] params = new String[2];
params[0] = durableClientId;
params[1] = cqName;
final ResultCollector<?, ?> rc =
CliUtil.executeFunction(new GetSubscriptionQueueSizeFunction(), params, targetMembers);
final List<SubscriptionQueueSizeResult> funcResults =
(List<SubscriptionQueueSizeResult>) rc.getResult();
String queueSizeColumnName;
if (cqName != null && !cqName.isEmpty()) {
queueSizeColumnName = CliStrings
.format(CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, cqName);
} else {
queueSizeColumnName = CliStrings.format(
CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, durableClientId);
}
result = builder.buildTableResultForQueueSize(funcResults, queueSizeColumnName);
} catch (Exception e) {
result = ResultBuilder.createGemFireErrorResult(e.getMessage());
}
return result;
}
}
| pdxrunner/geode | geode-core/src/main/java/org/apache/geode/management/internal/cli/commands/CountDurableCQEventsCommand.java | Java | apache-2.0 | 4,255 |
/**
* @author Oleksandr Prunyak (987456987p@gmail.com)
* @version $Id$
* @since 0.1
*/
package ru.job4j.loop; | Alex0889/oprunyak | chapter_001/src/test/java/ru/job4j/loop/package-info.java | Java | apache-2.0 | 110 |
/**
* @license
* Copyright 2018 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as Lint from "../../index";
// tslint:disable: object-literal-sort-keys
export const codeExamples = [
{
description: "Prefer `while` loops instead of `for` loops without an initializer and incrementor.",
config: Lint.Utils.dedent`
"rules": { "prefer-while": true }
`,
pass: Lint.Utils.dedent`
for(let i = 1; i < 10; i++) {
console.log(i);
}
for (let i = 0; i < 10; i+=1) {
console.log(i);
}
for (let i = 0; i < 10;) {
i += 1;
}
`,
fail: Lint.Utils.dedent`
for(;;) {
console.log('Hello World');
}
for(;true===true;) {
console.log('Hello World');
}
`,
},
];
| andy-hanson/tslint | src/rules/code-examples/preferWhile.examples.ts | TypeScript | apache-2.0 | 1,470 |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.properties;
import org.junit.Test;
import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.customproperties.CustomElement;
import org.kie.workbench.common.stunner.bpmn.definition.property.general.SLADueDate;
import static junit.framework.TestCase.assertTrue;
import static org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.Factories.bpmn2;
public class EmbeddedSubprocessPropertyWriterTest {
private SubProcessPropertyWriter tested = new SubProcessPropertyWriter(bpmn2.createSubProcess(),
new FlatVariableScope());
@Test
public void testSetIsAsync() {
tested.setAsync(Boolean.TRUE);
assertTrue(CustomElement.async.of(tested.getFlowElement()).get());
}
@Test
public void testSetSlaDueDate() {
String slaDueDate = "12/25/1983";
tested.setSlaDueDate(new SLADueDate(slaDueDate));
assertTrue(CustomElement.slaDueDate.of(tested.getFlowElement()).get().contains(slaDueDate));
}
}
| Rikkola/kie-wb-common | kie-wb-common-stunner/kie-wb-common-stunner-sets/kie-wb-common-stunner-bpmn/kie-wb-common-stunner-bpmn-marshalling/src/test/java/org/kie/workbench/common/stunner/bpmn/client/marshall/converters/fromstunner/properties/EmbeddedSubprocessPropertyWriterTest.java | Java | apache-2.0 | 1,772 |
{% macro warnings_and_loader() -%}
<div ng-cloak>
<div class="oppia-toast-container toast-top-center">
<div ng-repeat="warning in (alertsService.warnings | limitTo:5) track by $index" class="toast toast-warning oppia-toast">
<button type="button" class="toast-close-button" ng-click="alertsService.deleteWarning(warning)" role="button">×</button>
<div class="toast-message">
<[warning.content]>
</div>
</div>
</div>
<div>
<div ng-repeat="message in alertsService.messages track by $index">
<alert-message message-object="message" message-index="$index"></alert-message>
</div>
</div>
<div ng-show="loadingMessage" class="oppia-loading-fullpage">
<div class="oppia-align-center">
<span translate="<[loadingMessage]>"></span>
<span class="oppia-loading-dot-one">.</span>
<span class="oppia-loading-dot-two">.</span>
<span class="oppia-loading-dot-three">.</span>
</div>
</div>
<div ng-show="!loadingMessage">
{% block content %}{% endblock %}
{% block footer %}{% endblock %}
</div>
</div>
{%- endmacro %}
<!DOCTYPE html>
<html ng-app="oppia" ng-controller="Base" itemscope itemtype="http://schema.org/Organization">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes">
<!-- Tiles for Internet Explorer. -->
<meta name="application-name" content="{{SITE_NAME}}">
<meta name="msapplication-TileColor" content="#ffffff">
<meta name="msapplication-square70x70logo" content="{{DOMAIN_URL}}/images/logo/msapplication-tiny.png">
<meta name="msapplication-square150x150logo" content="{{DOMAIN_URL}}/images/logo/msapplication-square.png">
<meta name="msapplication-wide310x150logo" content="{{DOMAIN_URL}}/images/logo/msapplication-wide.png">
<meta name="msapplication-square310x310logo" content="{{DOMAIN_URL}}/images/logo/msapplication-large.png">
<!-- The itemprops are for G+ sharing. -->
<meta itemprop="name" content="{{meta_name}}">
<meta itemprop="description" content="{{meta_description}}">
<!-- The og tags are for Facebook sharing. -->
<meta property="og:title" content="{{meta_name}}">
<meta property="og:site_name" content="Oppia">
<meta property="og:url" content="{{FULL_URL}}">
<meta property="og:description" content="{{meta_description}}">
<meta property="og:type" content="article">
<meta property="og:image" content="{{DOMAIN_URL}}/images/logo/288x288_logo_mint.png">
<link rel="apple-touch-icon" href="/images/logo/favicon.png">
<!-- The title is bound to the rootScope. The content of the block
maintitle can be a string or a translation id. If it is a translation it
will be replaced by its translation when the page is loading. If it is a
string it would be displayed as is. This is the only way to translate
the page title because the head of the file is outside the scope of
any other controller. -->
<title itemprop="name" translate="{% block maintitle %}Oppia{% endblock maintitle %}"></title>
{% block base_url %}{% endblock base_url %}
{% block header_css %}
{% include 'header_css_libs.html' %}
{% endblock header_css %}
<script>
var GLOBALS = {
ADDITIONAL_ANGULAR_MODULES: [],
csrf_token: JSON.parse('{{csrf_token|js_string}}'),
csrf_token_i18n: JSON.parse('{{csrf_token_i18n|js_string}}'),
csrf_token_create_exploration: JSON.parse('{{csrf_token_create_exploration|js_string}}'),
DEV_MODE: JSON.parse('{{DEV_MODE|js_string}}'),
INVALID_NAME_CHARS: JSON.parse('{{INVALID_NAME_CHARS|js_string}}'),
ACTIVITY_STATUS_PRIVATE: JSON.parse(
'{{ACTIVITY_STATUS_PRIVATE|js_string}}'),
ACTIVITY_STATUS_PUBLIC: JSON.parse(
'{{ACTIVITY_STATUS_PUBLIC|js_string}}'),
ACTIVITY_STATUS_PUBLICIZED: JSON.parse(
'{{ACTIVITY_STATUS_PUBLICIZED|js_string}}'),
ALL_CATEGORIES: JSON.parse('{{ALL_CATEGORIES|js_string}}'),
ALL_LANGUAGE_CODES: JSON.parse('{{ALL_LANGUAGE_CODES|js_string}}'),
DEFAULT_LANGUAGE_CODE: JSON.parse(
'{{DEFAULT_LANGUAGE_CODE|js_string}}'),
RTE_COMPONENT_SPECS: JSON.parse('{{RTE_COMPONENT_SPECS|js_string}}'),
CAN_SEND_ANALYTICS_EVENTS: JSON.parse(
'{{CAN_SEND_ANALYTICS_EVENTS|js_string}}'),
/* A list of functions to be called when an exploration is completed. */
POST_COMPLETION_HOOKS: [],
SYSTEM_USERNAMES: JSON.parse('{{SYSTEM_USERNAMES|js_string}}'),
userIsLoggedIn: JSON.parse('{{user_is_logged_in|js_string}}'),
preferredSiteLanguageCode: JSON.parse('{{preferred_site_language_code|js_string}}'),
SUPPORTED_SITE_LANGUAGES: JSON.parse('{{SUPPORTED_SITE_LANGUAGES|js_string}}')
};
{% if additional_angular_modules %}
GLOBALS.ADDITIONAL_ANGULAR_MODULES = JSON.parse('{{additional_angular_modules|js_string}}');
{% endif %}
</script>
{% block header_js %}
{% include 'header_js_libs.html' %}
{% endblock header_js %}
{{BEFORE_END_HEAD_TAG_HOOK}}
</head>
<body>
{% if iframed %}
{{ warnings_and_loader() }}
{% else %}
<div class="oppia-base-container" ng-class="{'oppia-sidebar-menu-open': sidebarIsShown, 'oppia-sidebar-menu-closed': !sidebarIsShown}">
<div class="oppia-content-container">
<div id="wrapper">
<div class="oppia-main-body">
<!-- Top navigation. -->
<nav class="navbar navbar-default oppia-navbar oppia-prevent-selection" role="navigation">
<div class="navbar-container">
<div class="navbar-header protractor-test-navbar-header pull-left">
<a ng-if="windowIsNarrow" ng-click="openSidebar()" class="navbar-brand oppia-navbar-menu oppia-transition-200">
<i class="material-icons oppia-navbar-menu-icon"></i>
</a>
<a class="oppia-navbar-brand-name oppia-transition-200" href="/" focus-on="<[LABEL_FOR_CLEARING_FOCUS]>">
<img src="/images/logo/288x128_logo_white.png" class="oppia-logo" ng-class="windowIsNarrow ? 'oppia-logo-small' : 'oppia-logo-wide'">
</a>
<!-- This is needed for the correct image to appear when an exploration is shared using G+. -->
<a style="display: none;">
<img src="/images/logo/288x128_logo_mint.png" itemprop="image">
</a>
</div>
{% if nav_mode != 'signup' %}
<div ng-cloak class="navbar-header pull-right">
<ul class="nav oppia-navbar-nav oppia-navbar-profile">
{% if username %}
<li class="dropdown pull-right">
<a class="dropdown-toggle oppia-navbar-dropdown-toggle" data-toggle="dropdown" ng-mouseover="onMouseoverProfilePictureOrDropdown($event)" ng-mouseleave="onMouseoutProfilePictureOrDropdown($event)">
<div class="oppia-navbar-profile-picture-container" ng-cloak>
{% if profile_picture_data_url %}
<img src="{{profile_picture_data_url}}" class="oppia-navbar-profile-picture img-circle">
<span class="caret" style="margin-top: 10px;"></span>
{% else %}
<i class="material-icons md-40" style="margin-top: -1px;"></i>
<span class="caret" style="margin-top: -26px;"></span>
{% endif %}
<div class="oppia-navbar-dashboard-indicator ng-cloak" ng-if="numUnseenNotifications > 0">
<span class="oppia-navbar-dashboard-indicator-text">
<[numUnseenNotifications]>
</span>
</div>
<div style="display: none;" class="oppia-user-email">
{{user_email}}
</div>
{% if is_admin or is_moderator %}
<div class="oppia-navbar-role-indicator">
{% if is_admin %}
<!-- "right: 4px;" is necessary here but not in moderator to prevent 'A' from appearing off-center because 'A' is slightly thinner than 'M' in this font -->
<span class="oppia-navbar-role-text" style="right: 4px;">A</span>
{% elif is_moderator %}
<span class="oppia-navbar-role-text">M</span>
{% endif %}
</div>
{% endif %}
</div>
</a>
<ul class="dropdown-menu ng-cloak oppia-navbar-dropdown" role="menu" ng-mouseover="onMouseoverProfilePictureOrDropdown($event)" ng-mouseleave="onMouseoutProfilePictureOrDropdown($event)" ng-show="profileDropdownIsActive">
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/profile/{{username}}">
<strong>{{username}}</strong>
</a>
</li>
<hr class="oppia-top-right-menu-item-separator">
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/dashboard">
<span translate="I18N_TOPNAV_DASHBOARD"></span>
</a>
</li>
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/notifications_dashboard">
<span translate="I18N_TOPNAV_NOTIFICATIONS"></span>
<span ng-if="numUnseenNotifications > 0">
(<[numUnseenNotifications]>)
</span>
</a>
</li>
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/preferences">
<span translate="I18N_TOPNAV_PREFERENCES"></span>
</a>
</li>
{% if is_moderator %}
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/moderator" target="_blank">
<span translate="I18N_TOPNAV_MODERATOR_PAGE"></span>
</a>
</li>
{% endif %}
{% if is_super_admin %}
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/admin" target="_blank">
<span translate="I18N_TOPNAV_ADMIN_PAGE"></span>
</a>
</li>
{% endif %}
<hr class="oppia-top-right-menu-item-separator">
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="{{logout_url}}">
<span translate="I18N_TOPNAV_LOGOUT"></span>
</a>
</li>
</ul>
</li>
{% else %}
<li class="dropdown oppia-navbar-clickable-dropdown pull-right">
<div class="oppia-navbar-button-container" style="margin-right: 10px;">
<button class="btn oppia-navbar-button"
ng-click="onLoginButtonClicked('{{login_url}}')">
<span translate="I18N_TOPNAV_SIGN_IN"></span>
<span class="caret"></span>
</button>
</div>
<ul class="dropdown-menu oppia-navbar-dropdown" role="menu" style="margin-right: 15px; padding: 0;" ng-mouseover="onMouseoverDropdownMenu($event)" ng-mouseleave="onMouseoutDropdownMenu($event)">
<li>
<a href style="padding: 0; width: 200px;" ng-click="onLoginButtonClicked('{{login_url}}')">
<img src="/images/signin/Red-signin-Long-base-44dp.png">
</a>
</li>
</ul>
</li>
{% endif %}
</ul>
<ul class="nav oppia-navbar-nav">
{% if nav_mode != 'create' and nav_mode != 'explore' %}
<ul ng-if="windowIsNarrow" class="nav oppia-navbar-tabs-narrow">
<create-activity-button></create-activity-button>
</ul>
<ul ng-if="!windowIsNarrow" class="nav oppia-navbar-tabs">
<create-activity-button></create-activity-button>
{% if SHOW_CUSTOM_PAGES %}
<li class="oppia-clickable-navbar-element pull-right">
<a class="oppia-navbar-tab" href="/forum" translate="I18N_TOPNAV_FORUM">
</a>
</li>
{% endif %}
<li class="dropdown oppia-navbar-clickable-dropdown pull-right">
<a class="oppia-navbar-tab">
<span translate="I18N_TOPNAV_ABOUT"></span>
<span class="caret"></span>
</a>
<ul class="dropdown-menu oppia-navbar-dropdown" ng-mouseover="onMouseoverDropdownMenu($event)" ng-mouseleave="onMouseoutDropdownMenu($event)">
<li><a href="/about" translate="I18N_TOPNAV_ABOUT_OPPIA"></a></li>
<li><a href="/teach" translate="I18N_TOPNAV_TEACH_WITH_OPPIA"></a></li>
{% for additional_link in SIDEBAR_MENU_ADDITIONAL_LINKS %}
<li><a href="{{additional_link['link']}}" target="_blank">{{additional_link['name']}}</a></li>
{% endfor %}
{% if SHOW_CUSTOM_PAGES %}
<li><a href="/terms" translate="I18N_TOPNAV_TERMS_OF_SERVICE"></a></li>
<li><a href="/privacy" translate="I18N_TOPNAV_PRIVACY_POLICY"></a></li>
{% endif %}
</ul>
</li>
<li class="oppia-clickable-navbar-element pull-right">
<a class="oppia-navbar-tab" href="/library" translate="I18N_TOPNAV_LIBRARY"></a>
</li>
</ul>
{% endif %}
</ul>
</div>
{% endif %}
<div class="collapse navbar-collapse ng-cloak">
{% block navbar_breadcrumb %}
{% endblock navbar_breadcrumb %}
{% block local_top_nav_options %}
{% endblock %}
</div>
</div>
</nav>
<div class="oppia-top-of-page-padding">
</div>
{{ warnings_and_loader() }}
</div>
<noscript>
<div class="oppia-page-cards-container">
<div class="md-default-theme oppia-page-card oppia-long-text">
<h2>
<span translate="I18N_SPLASH_JAVASCRIPT_ERROR_TITLE"></span>
<i class="material-icons"></i>
</h2>
<p translate="I18N_SPLASH_JAVASCRIPT_ERROR_DESCRIPTION"
translate-values="{hrefUrl: 'http://www.enable-javascript.com/'}"></p>
<p translate="I18N_SPLASH_JAVASCRIPT_ERROR_THANKS"></p>
</div>
</div>
</noscript>
{% include 'side_nav.html' %}
</div>
</div>
</div>
{% if DEV_MODE %}
<div class="oppia-dev-mode">
Dev Mode
</div>
{% endif %}
{% if SITE_FEEDBACK_FORM_URL %}
<a href="{{SITE_FEEDBACK_FORM_URL}}" target="_blank"
class="oppia-site-feedback oppia-transition-200">
<i class="material-icons md-18" style="vertical-align: middle;"></i>
<span translate="I18N_SPLASH_SITE_FEEDBACK"></span>
</i>
</a>
{% endif %}
{% endif %}
{% include 'directives.html' %}
{% include 'forms/form_builder_templates.html' %}
{% include 'footer_js_libs.html' %}
{% include 'components/collection_summary_tile_directive.html' %}
{% include 'components/exploration_summary_tile_directive.html' %}
{% include 'components/rating_display.html' %}
{% include 'components/create_activity_button_directive.html' %}
{% include 'components/activity_tiles_infinity_grid_directive.html' %}
{% include 'components/loading_dots_directive.html' %}
<script>
{{ include_js_file('app.js') }}
{{ include_js_file('base.js') }}
{{ include_js_file('directives.js') }}
{{ include_js_file('filters.js') }}
{{ include_js_file('i18n.js') }}
{{ include_js_file('forms/formBuilder.js') }}
{{ include_js_file('services/alertsService.js') }}
{{ include_js_file('services/explorationContextService.js') }}
{{ include_js_file('services/autoplayedVideosService.js') }}
{{ include_js_file('services/searchService.js') }}
{{ include_js_file('components/ActivityTilesInfinityGridDirective.js') }}
{{ include_js_file('components/AlertMessageDirective.js') }}
{{ include_js_file('components/CollectionCreationService.js') }}
{{ include_js_file('components/CollectionSummaryTileDirective.js') }}
{{ include_js_file('components/CreateActivityButtonDirective.js') }}
{{ include_js_file('components/ExplorationCreationService.js') }}
{{ include_js_file('components/ExplorationSummaryTileDirective.js') }}
{{ include_js_file('components/LoadingDotsDirective.js') }}
{{ include_js_file('components/ObjectEditorDirective.js') }}
{{ include_js_file('components/RatingComputationService.js')}}
{{ include_js_file('components/RatingDisplayDirective.js')}}
{{ include_js_file('components/Select2DropdownDirective.js') }}
{{ include_js_file('domain/utilities/UrlInterpolationService.js') }}
{{ include_js_file('expressions/expressionSyntaxTree.js') }}
{{ include_js_file('expressions/evaluator.js') }}
{{ include_js_file('expressions/parser.js') }}
{{ include_js_file('domain/utilities/UrlInterpolationService.js') }}
{{ OBJECT_EDITORS_JS }}
</script>
{% block footer_js %}
{% endblock footer_js %}
{{BEFORE_END_BODY_TAG_HOOK}}
</body>
</html>
| mit0110/oppia | core/templates/dev/head/base.html | HTML | apache-2.0 | 20,182 |
<!DOCTYPE html><html lang="en"><head><title>src/Parser</title></head><meta http-equiv="Content-Type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0"><meta name="groc-relative-root" content="../"><meta name="groc-document-path" content="src/Parser.coffee"><meta name="groc-project-path" content="src/Parser.coffee"><meta name="groc-github-url" content="https://github.com/sjorek/goatee-rules.js"><link rel="stylesheet" type="text/css" media="all" href="../assets/style.css"><script type="text/javascript" src="../assets/behavior.js"></script><body><div id="meta"><div class="file-path"><a href="https://github.com/sjorek/goatee-rules.js/blob/master/src/Parser.coffee">src/Parser.coffee</a></div></div><div id="document"><div class="segment"><div class="code folded"><div class="wrapper marker"><span class="c1">### </span></div><div class="wrapper"><span class="hljs-comment">###
BSD 3-Clause License
Copyright (c) 2017, Stephan Jorek
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###</span>
<span class="hljs-keyword">try</span>
exports = <span class="hljs-built_in">require</span> <span class="hljs-string">'./ParserImpl'</span>
<span class="hljs-keyword">catch</span>
exports = <span class="hljs-literal">null</span>
<span class="hljs-keyword">if</span> exports <span class="hljs-keyword">is</span> <span class="hljs-literal">null</span>
Grammar = <span class="hljs-built_in">require</span> <span class="hljs-string">'./Grammar'</span>
exports = <span class="hljs-built_in">module</span>?.exports ? <span class="hljs-keyword">this</span></div></div></div><div class="segment"><div class="comments "><div class="wrapper"><h1 id="parser">Parser</h1>
<hr>
<p>A thin compatibillity layer providing an
“on-the-fly” generated goatee-rules parser.</p></div></div></div><div class="segment"><div class="comments doc-section doc-section-static"><div class="wrapper"><p><span class='doc-section-header'>Static property parser of type <em>Parser</em></span></p>
<hr></div></div><div class="code"><div class="wrapper"> exports.parser = parser = Grammar.createParser()</div></div></div><div class="segment"><div class="comments doc-section"><div class="wrapper"><p><span class='doc-section-header'> class Parser and namespace GoateeScript</span></p>
<hr></div></div><div class="code"><div class="wrapper"> exports.Parser = parser.Parser</div></div></div><div class="segment"><div class="comments doc-section doc-section-static"><div class="wrapper"><p><span class='doc-section-header'>Static function parse</span></p>
<hr></div></div><div class="code"><div class="wrapper"> exports.parse = <span class="hljs-function"><span class="hljs-params">()</span> -></span> parser.parse.apply(parser, arguments)</div></div></div><div class="segment"><div class="comments doc-section doc-section-static"><div class="wrapper"><p><span class='doc-section-header'>Static function main</span></p>
<hr>
<p>Parameters:</p>
<ul>
<li><strong>args must be an Array.</strong></li>
</ul></div></div><div class="code"><div class="wrapper"> exports.main = <span class="hljs-function"><span class="hljs-params">(args)</span> -></span>
<span class="hljs-keyword">if</span> <span class="hljs-keyword">not</span> args[<span class="hljs-number">1</span>]
<span class="hljs-built_in">console</span>.log <span class="hljs-string">"Usage: <span class="hljs-subst">#{args[<span class="hljs-number">0</span>]}</span> FILE"</span>
process.exit <span class="hljs-number">1</span>
source = <span class="hljs-built_in">require</span>(<span class="hljs-string">'fs'</span>).readFileSync(
<span class="hljs-built_in">require</span>(<span class="hljs-string">'path'</span>).normalize(args[<span class="hljs-number">1</span>]), <span class="hljs-string">"utf8"</span>
)
parser.parse(source)
<span class="hljs-built_in">module</span>.exports = exports</div></div></div><div class="segment"><div class="comments "><div class="wrapper"><p>execute main automatically</p></div></div><div class="code"><div class="wrapper"><span class="hljs-keyword">if</span> (<span class="hljs-built_in">module</span> <span class="hljs-keyword">isnt</span> <span class="hljs-literal">undefined</span> && <span class="hljs-built_in">require</span>.main <span class="hljs-keyword">is</span> <span class="hljs-built_in">module</span>)
exports.main process.argv.slice(<span class="hljs-number">1</span>)</div></div></div></div></body></html> | sjorek/goatee-rules | doc/src/Parser.coffee.html | HTML | apache-2.0 | 5,944 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_22) on Wed Sep 14 22:21:32 CEST 2011 -->
<META http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
<TITLE>
net.sourceforge.pmd.typeresolution.rules.imports Class Hierarchy (PMD 4.2.6 API)
</TITLE>
<META NAME="date" CONTENT="2011-09-14">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="net.sourceforge.pmd.typeresolution.rules.imports Class Hierarchy (PMD 4.2.6 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/rules/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/visitors/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?net/sourceforge/pmd/typeresolution/rules/imports/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
Hierarchy For Package net.sourceforge.pmd.typeresolution.rules.imports
</H2>
</CENTER>
<DL>
<DT><B>Package Hierarchies:</B><DD><A HREF="../../../../../../overview-tree.html">All Packages</A></DL>
<HR>
<H2>
Class Hierarchy
</H2>
<UL>
<LI TYPE="circle">java.lang.Object<UL>
<LI TYPE="circle">net.sourceforge.pmd.<A HREF="../../../../../../net/sourceforge/pmd/CommonAbstractRule.html" title="class in net.sourceforge.pmd"><B>CommonAbstractRule</B></A> (implements net.sourceforge.pmd.<A HREF="../../../../../../net/sourceforge/pmd/Rule.html" title="interface in net.sourceforge.pmd">Rule</A>)
<UL>
<LI TYPE="circle">net.sourceforge.pmd.<A HREF="../../../../../../net/sourceforge/pmd/AbstractJavaRule.html" title="class in net.sourceforge.pmd"><B>AbstractJavaRule</B></A> (implements net.sourceforge.pmd.ast.<A HREF="../../../../../../net/sourceforge/pmd/ast/JavaParserVisitor.html" title="interface in net.sourceforge.pmd.ast">JavaParserVisitor</A>)
<UL>
<LI TYPE="circle">net.sourceforge.pmd.<A HREF="../../../../../../net/sourceforge/pmd/AbstractRule.html" title="class in net.sourceforge.pmd"><B>AbstractRule</B></A><UL>
<LI TYPE="circle">net.sourceforge.pmd.rules.imports.<A HREF="../../../../../../net/sourceforge/pmd/rules/imports/UnusedImportsRule.html" title="class in net.sourceforge.pmd.rules.imports"><B>UnusedImportsRule</B></A><UL>
<LI TYPE="circle">net.sourceforge.pmd.typeresolution.rules.imports.<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/rules/imports/UnusedImports.html" title="class in net.sourceforge.pmd.typeresolution.rules.imports"><B>UnusedImports</B></A></UL>
</UL>
</UL>
</UL>
</UL>
</UL>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/rules/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/visitors/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?net/sourceforge/pmd/typeresolution/rules/imports/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2002-2011 InfoEther. All Rights Reserved.
</BODY>
</HTML>
| iconnor/pmd | docs/apidocs/net/sourceforge/pmd/typeresolution/rules/imports/package-tree.html | HTML | apache-2.0 | 7,723 |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "Firestore/core/src/local/memory_persistence.h"
#include "Firestore/core/test/unit/local/bundle_cache_test.h"
#include "Firestore/core/test/unit/local/persistence_testing.h"
namespace firebase {
namespace firestore {
namespace local {
namespace {
std::unique_ptr<Persistence> PersistenceFactory() {
return MemoryPersistenceWithEagerGcForTesting();
}
} // namespace
INSTANTIATE_TEST_SUITE_P(MemoryBundleCacheTest,
BundleCacheTest,
testing::Values(PersistenceFactory));
} // namespace local
} // namespace firestore
} // namespace firebase
| firebase/firebase-ios-sdk | Firestore/core/test/unit/local/memory_bundle_cache_test.cc | C++ | apache-2.0 | 1,204 |
<?php
/**
* Copyright 2011 Crucial Web Studio, LLC or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* https://raw.githubusercontent.com/chargely/chargify-sdk-php/master/LICENSE.md
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
namespace Crucial\Service;
use GuzzleHttp\Client;
use GuzzleHttp\Psr7;
use GuzzleHttp\Psr7\Request;
use GuzzleHttp\Psr7\Response;
use GuzzleHttp\Exception\RequestException;
use GuzzleHttp\HandlerStack;
use Crucial\Service\Chargify\Exception\BadMethodCallException;
use Crucial\Service\Chargify\Adjustment;
use Crucial\Service\Chargify\Charge;
use Crucial\Service\Chargify\Component;
use Crucial\Service\Chargify\Coupon;
use Crucial\Service\Chargify\Customer;
use Crucial\Service\Chargify\Event;
use Crucial\Service\Chargify\Product;
use Crucial\Service\Chargify\Refund;
use Crucial\Service\Chargify\Statement;
use Crucial\Service\Chargify\Stats;
use Crucial\Service\Chargify\Subscription;
use Crucial\Service\Chargify\Transaction;
use Crucial\Service\Chargify\Webhook;
class Chargify
{
/**
* Version
*/
const VERSION = '0.1.1';
/**
* Guzzle http client
*
* @var Client
*/
private $httpClient;
/**
* The complete hostname; e.g. "my-app-subdomain.chargify.com",
* not just "my-app-subdomain"
*
* @var string
*/
protected $hostname;
/**
* Your http authentication password. The password is always "x".
*
* @var string
*/
protected $password = 'x';
/**
* Your api key
*
* @var string
*/
protected $apiKey;
/**
* Shared key
*
* @var string
*/
protected $sharedKey;
/**
* Timeout
*
* @var int
*/
protected $timeout = 10;
/*
* json
*
* @var string
*/
protected $format = 'json';
/**
* Config used in constructor.
*
* @var array
*/
protected $config;
/**
* @var Response|false|null
*/
protected $lastResponse;
/**
* Initialize the service
*
* @param array $config
*/
public function __construct($config)
{
// store a copy
$this->config = $config;
// set individual properties
$this->hostname = trim($config['hostname'], '/');
$this->apiKey = $config['api_key'];
$this->sharedKey = $config['shared_key'];
if (!empty($config['timeout'])) {
$this->timeout = $config['timeout'];
}
$this->httpClient = new Client([
'base_uri' => 'https://' . $this->hostname . '/',
'handler' => HandlerStack::create(),
'timeout' => $this->timeout,
'allow_redirects' => false,
'auth' => [$this->apiKey, $this->password],
'headers' => [
'User-Agent' => 'chargify-sdk-php/' . self::VERSION . ' (https://github.com/chargely/chargify-sdk-php)',
'Content-Type' => 'application/' . $this->format
]
]);
}
/**
* @return Client
*/
public function getHttpClient()
{
return $this->httpClient;
}
/**
* Returns config sent in constructor
*
* @return array
*/
public function getConfig()
{
return $this->config;
}
/**
* Send the request to Chargify
*
* @param string $path URL path we are requesting such as: /subscriptions/<subscription_id>/adjustments
* @param string $method GET, POST, PUT, DELETE
* @param string $rawData
* @param array $params
*
* @return Response|FALSE Response object or FALSE if there was no response (networking error, timeout, etc.)
*/
public function request($path, $method, $rawData = null, $params = [])
{
$method = strtoupper($method);
$path = ltrim($path, '/');
$path = $path . '.' . $this->format;
$client = $this->getHttpClient();
$method = strtoupper($method);
$options = [
'query' => $params,
'body' => null,
];
$request = new Request($method, $path);
if (in_array($method, array('POST', 'PUT'))) {
if (null === $rawData) {
throw new BadMethodCallException('You must send raw data in a POST or PUT request');
}
}
if (!empty($rawData)) {
$options['body'] = Psr7\stream_for($rawData);
}
try {
$response = $client->send($request, $options);
} catch (RequestException $e) {
if ($e->hasResponse()) {
$response = $e->getResponse();
} else {
$response = false;
}
}
$this->lastResponse = $response;
return $response;
}
/**
* @return Response
*/
public function getLastResponse()
{
return $this->lastResponse;
}
/**
* Helper for instantiating an instance of Customer
*
* @return Customer
*/
public function customer()
{
return new Customer($this);
}
/**
* Helper for instantiating an instance of Subscription
*
* @return Subscription
*/
public function subscription()
{
return new Subscription($this);
}
/**
* Helper for instantiating an instance of Product
*
* @return Product
*/
public function product()
{
return new Product($this);
}
/**
* Helper for instantiating an instance of Adjustment
*
* @return Adjustment
*/
public function adjustment()
{
return new Adjustment($this);
}
/**
* Helper for instantiating an instance of Charge
*
* @return Charge
*/
public function charge()
{
return new Charge($this);
}
/**
* Helper for instantiating an instance of Component
*
* @return Component
*/
public function component()
{
return new Component($this);
}
/**
* Helper for instantiating an instance of Coupon
*
* @return Coupon
*/
public function coupon()
{
return new Coupon($this);
}
/**
* Helper for instantiating an instance of Transaction
*
* @return Transaction
*/
public function transaction()
{
return new Transaction($this);
}
/**
* Helper for instantiating an instance of Refund
*
* @return Refund
*/
public function refund()
{
return new Refund($this);
}
/**
* Helper for instantiating an instance of Statement
*
* @return Statement
*/
public function statement()
{
return new Statement($this);
}
/**
* Helper for instantiating an instance of Event
*
* @return Event
*/
public function event()
{
return new Event($this);
}
/**
* Helper for instantiating an instance of Webhook
*
* @return Webhook
*/
public function webhook()
{
return new Webhook($this);
}
/**
* Helper for instantiating an instance of Stats
*
* @return Stats
*/
public function stats()
{
return new Stats($this);
}
}
| chargely/chargify-sdk-php | src/Crucial/Service/Chargify.php | PHP | apache-2.0 | 7,766 |
/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.exprtree;
/**
* Container of nodes representing operators.
*
* <p> Important: Do not use outside of Soy code (treat as superpackage-private).
*
* @author Kai Huang
*/
public class OperatorNodes {
private OperatorNodes() {}
/**
* Node representing the unary '-' (negative) operator.
*/
public static class NegativeOpNode extends AbstractOperatorNode {
public NegativeOpNode() { super(Operator.NEGATIVE); }
protected NegativeOpNode(NegativeOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NEGATIVE_OP_NODE; }
@Override public NegativeOpNode clone() { return new NegativeOpNode(this); }
}
/**
* Node representing the 'not' operator.
*/
public static class NotOpNode extends AbstractOperatorNode {
public NotOpNode() { super(Operator.NOT); }
protected NotOpNode(NotOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NOT_OP_NODE; }
@Override public NotOpNode clone() { return new NotOpNode(this); }
}
/**
* Node representing the '*' (times) operator.
*/
public static class TimesOpNode extends AbstractOperatorNode {
public TimesOpNode() { super(Operator.TIMES); }
protected TimesOpNode(TimesOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.TIMES_OP_NODE; }
@Override public TimesOpNode clone() { return new TimesOpNode(this); }
}
/**
* Node representing the '/' (divde by) operator.
*/
public static class DivideByOpNode extends AbstractOperatorNode {
public DivideByOpNode() { super(Operator.DIVIDE_BY); }
protected DivideByOpNode(DivideByOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.DIVIDE_BY_OP_NODE; }
@Override public DivideByOpNode clone() { return new DivideByOpNode(this); }
}
/**
* Node representing the '%' (mod) operator.
*/
public static class ModOpNode extends AbstractOperatorNode {
public ModOpNode() { super(Operator.MOD); }
protected ModOpNode(ModOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.MOD_OP_NODE; }
@Override public ModOpNode clone() { return new ModOpNode(this); }
}
/**
* Node representing the '+' (plus) operator.
*/
public static class PlusOpNode extends AbstractOperatorNode {
public PlusOpNode() { super(Operator.PLUS); }
protected PlusOpNode(PlusOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.PLUS_OP_NODE; }
@Override public PlusOpNode clone() { return new PlusOpNode(this); }
}
/**
* Node representing the binary '-' (minus) operator.
*/
public static class MinusOpNode extends AbstractOperatorNode {
public MinusOpNode() { super(Operator.MINUS); }
protected MinusOpNode(MinusOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.MINUS_OP_NODE; }
@Override public MinusOpNode clone() { return new MinusOpNode(this); }
}
/**
* Node representing the '<' (less than) operator.
*/
public static class LessThanOpNode extends AbstractOperatorNode {
public LessThanOpNode() { super(Operator.LESS_THAN); }
protected LessThanOpNode(LessThanOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.LESS_THAN_OP_NODE; }
@Override public LessThanOpNode clone() { return new LessThanOpNode(this); }
}
/**
* Node representing the '>' (greater than) operator.
*/
public static class GreaterThanOpNode extends AbstractOperatorNode {
public GreaterThanOpNode() { super(Operator.GREATER_THAN); }
protected GreaterThanOpNode(GreaterThanOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.GREATER_THAN_OP_NODE; }
@Override public GreaterThanOpNode clone() { return new GreaterThanOpNode(this); }
}
/**
* Node representing the '<=' (less than or equal) operator.
*/
public static class LessThanOrEqualOpNode extends AbstractOperatorNode {
public LessThanOrEqualOpNode() { super(Operator.LESS_THAN_OR_EQUAL); }
protected LessThanOrEqualOpNode(LessThanOrEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.LESS_THAN_OR_EQUAL_OP_NODE; }
@Override public LessThanOrEqualOpNode clone() { return new LessThanOrEqualOpNode(this); }
}
/**
* Node representing the '>=' (greater than or equal) operator.
*/
public static class GreaterThanOrEqualOpNode extends AbstractOperatorNode {
public GreaterThanOrEqualOpNode() { super(Operator.GREATER_THAN_OR_EQUAL); }
protected GreaterThanOrEqualOpNode(GreaterThanOrEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.GREATER_THAN_OR_EQUAL_OP_NODE; }
@Override public GreaterThanOrEqualOpNode clone() { return new GreaterThanOrEqualOpNode(this); }
}
/**
* Node representing the '==' (equal) operator.
*/
public static class EqualOpNode extends AbstractOperatorNode {
public EqualOpNode() { super(Operator.EQUAL); }
protected EqualOpNode(EqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.EQUAL_OP_NODE; }
@Override public EqualOpNode clone() { return new EqualOpNode(this); }
}
/**
* Node representing the '!=' (not equal) operator.
*/
public static class NotEqualOpNode extends AbstractOperatorNode {
public NotEqualOpNode() { super(Operator.NOT_EQUAL); }
protected NotEqualOpNode(NotEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NOT_EQUAL_OP_NODE; }
@Override public NotEqualOpNode clone() { return new NotEqualOpNode(this); }
}
/**
* Node representing the 'and' operator.
*/
public static class AndOpNode extends AbstractOperatorNode {
public AndOpNode() { super(Operator.AND); }
protected AndOpNode(AndOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.AND_OP_NODE; }
@Override public AndOpNode clone() { return new AndOpNode(this); }
}
/**
* Node representing the 'or' operator.
*/
public static class OrOpNode extends AbstractOperatorNode {
public OrOpNode() { super(Operator.OR); }
protected OrOpNode(OrOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.OR_OP_NODE; }
@Override public OrOpNode clone() { return new OrOpNode(this); }
}
/**
* Node representing the ternary '? :' (conditional) operator.
*/
public static class ConditionalOpNode extends AbstractOperatorNode {
public ConditionalOpNode() { super(Operator.CONDITIONAL); }
protected ConditionalOpNode(ConditionalOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.CONDITIONAL_OP_NODE; }
@Override public ConditionalOpNode clone() { return new ConditionalOpNode(this); }
}
}
| Digaku/closure-template | java/src/com/google/template/soy/exprtree/OperatorNodes.java | Java | apache-2.0 | 7,510 |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.remote.work.artifact;
import com.thoughtworks.go.plugin.access.artifact.ArtifactExtensionConstants;
import com.thoughtworks.go.plugin.api.request.GoApiRequest;
import com.thoughtworks.go.plugin.api.response.DefaultGoApiResponse;
import com.thoughtworks.go.plugin.api.response.GoApiResponse;
import com.thoughtworks.go.plugin.infra.GoPluginApiRequestProcessor;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import com.thoughtworks.go.remote.work.artifact.ConsoleLogMessage.LogLevel;
import com.thoughtworks.go.util.command.*;
import com.thoughtworks.go.work.GoPublisher;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static java.lang.String.format;
public class ArtifactRequestProcessor implements GoPluginApiRequestProcessor {
private static final List<String> goSupportedVersions = ArtifactExtensionConstants.SUPPORTED_VERSIONS;
private final SafeOutputStreamConsumer safeOutputStreamConsumer;
private final ProcessType processType;
private enum ProcessType {
FETCH, PUBLISH
}
private static final Map<LogLevel, String> FETCH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{
put(LogLevel.INFO, TaggedStreamConsumer.OUT);
put(LogLevel.ERROR, TaggedStreamConsumer.ERR);
}};
private static final Map<LogLevel, String> PUBLISH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{
put(LogLevel.INFO, TaggedStreamConsumer.PUBLISH);
put(LogLevel.ERROR, TaggedStreamConsumer.PUBLISH_ERR);
}};
private ArtifactRequestProcessor(GoPublisher publisher, ProcessType processType, EnvironmentVariableContext environmentVariableContext) {
CompositeConsumer errorStreamConsumer = new CompositeConsumer(CompositeConsumer.ERR, publisher);
CompositeConsumer outputStreamConsumer = new CompositeConsumer(CompositeConsumer.OUT, publisher);
this.safeOutputStreamConsumer = new SafeOutputStreamConsumer(new ProcessOutputStreamConsumer(errorStreamConsumer, outputStreamConsumer));
safeOutputStreamConsumer.addSecrets(environmentVariableContext.secrets());
this.processType = processType;
}
public static ArtifactRequestProcessor forFetchArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) {
return new ArtifactRequestProcessor(goPublisher, ProcessType.FETCH, environmentVariableContext);
}
public static ArtifactRequestProcessor forPublishArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) {
return new ArtifactRequestProcessor(goPublisher, ProcessType.PUBLISH, environmentVariableContext);
}
@Override
public GoApiResponse process(GoPluginDescriptor pluginDescriptor, GoApiRequest request) {
validatePluginRequest(request);
switch (Request.fromString(request.api())) {
case CONSOLE_LOG:
return processConsoleLogRequest(pluginDescriptor, request);
default:
return DefaultGoApiResponse.error("Illegal api request");
}
}
private GoApiResponse processConsoleLogRequest(GoPluginDescriptor pluginDescriptor, GoApiRequest request) {
final ConsoleLogMessage consoleLogMessage = ConsoleLogMessage.fromJSON(request.requestBody());
final String message = format("[%s] %s", pluginDescriptor.id(), consoleLogMessage.getMessage());
Optional<String> parsedTag = parseTag(processType, consoleLogMessage.getLogLevel());
if (parsedTag.isPresent()) {
safeOutputStreamConsumer.taggedStdOutput(parsedTag.get(), message);
return DefaultGoApiResponse.success(null);
}
return DefaultGoApiResponse.error(format("Unsupported log level `%s`.", consoleLogMessage.getLogLevel()));
}
private Optional<String> parseTag(ProcessType requestType, LogLevel logLevel) {
switch (requestType) {
case FETCH:
return Optional.ofNullable(FETCH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel));
case PUBLISH:
return Optional.ofNullable(PUBLISH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel));
}
return Optional.empty();
}
private void validatePluginRequest(GoApiRequest goPluginApiRequest) {
if (!goSupportedVersions.contains(goPluginApiRequest.apiVersion())) {
throw new RuntimeException(format("Unsupported '%s' API version: %s. Supported versions: %s", goPluginApiRequest.api(), goPluginApiRequest.apiVersion(), goSupportedVersions));
}
}
public enum Request {
CONSOLE_LOG("go.processor.artifact.console-log");
private final String requestName;
Request(String requestName) {
this.requestName = requestName;
}
public static Request fromString(String requestName) {
if (requestName != null) {
for (Request request : Request.values()) {
if (requestName.equalsIgnoreCase(request.requestName)) {
return request;
}
}
}
return null;
}
public String requestName() {
return requestName;
}
}
}
| gocd/gocd | common/src/main/java/com/thoughtworks/go/remote/work/artifact/ArtifactRequestProcessor.java | Java | apache-2.0 | 5,906 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>com.cloudera.oryx.api.serving (Oryx 2.8.0 API)</title>
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<h1 class="bar"><a href="../../../../../com/cloudera/oryx/api/serving/package-summary.html" target="classFrame">com.cloudera.oryx.api.serving</a></h1>
<div class="indexContainer">
<h2 title="Interfaces">Interfaces</h2>
<ul title="Interfaces">
<li><a href="HasCSV.html" title="interface in com.cloudera.oryx.api.serving" target="classFrame"><span class="interfaceName">HasCSV</span></a></li>
<li><a href="ServingModel.html" title="interface in com.cloudera.oryx.api.serving" target="classFrame"><span class="interfaceName">ServingModel</span></a></li>
<li><a href="ServingModelManager.html" title="interface in com.cloudera.oryx.api.serving" target="classFrame"><span class="interfaceName">ServingModelManager</span></a></li>
</ul>
<h2 title="Classes">Classes</h2>
<ul title="Classes">
<li><a href="AbstractServingModelManager.html" title="class in com.cloudera.oryx.api.serving" target="classFrame">AbstractServingModelManager</a></li>
<li><a href="OryxResource.html" title="class in com.cloudera.oryx.api.serving" target="classFrame">OryxResource</a></li>
</ul>
<h2 title="Exceptions">Exceptions</h2>
<ul title="Exceptions">
<li><a href="OryxServingException.html" title="class in com.cloudera.oryx.api.serving" target="classFrame">OryxServingException</a></li>
</ul>
</div>
</body>
</html>
| OryxProject/oryx | docs/apidocs/com/cloudera/oryx/api/serving/package-frame.html | HTML | apache-2.0 | 1,774 |
package com.senseidb.search.node.impl;
import org.json.JSONObject;
import com.senseidb.search.node.SenseiQueryBuilder;
import com.senseidb.search.node.SenseiQueryBuilderFactory;
import com.senseidb.search.req.SenseiQuery;
import com.senseidb.util.JSONUtil.FastJSONObject;
public abstract class AbstractJsonQueryBuilderFactory implements SenseiQueryBuilderFactory {
@Override
public SenseiQueryBuilder getQueryBuilder(SenseiQuery query) throws Exception {
JSONObject jsonQuery = null;
if (query != null) {
byte[] bytes = query.toBytes();
jsonQuery = new FastJSONObject(new String(bytes, SenseiQuery.utf8Charset));
}
return buildQueryBuilder(jsonQuery);
}
public abstract SenseiQueryBuilder buildQueryBuilder(JSONObject jsonQuery);
}
| javasoze/sensei | sensei-core/src/main/java/com/senseidb/search/node/impl/AbstractJsonQueryBuilderFactory.java | Java | apache-2.0 | 774 |
/*
* Copyright (c) 2018 STMicroelectronics
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <zephyr.h>
#include <misc/printk.h>
#include <board.h>
#include <gpio.h>
#include <i2c.h>
#include <spi.h>
#include <sensor.h>
/* #define ARGONKEY_TEST_LOG 1 */
#define WHOAMI_REG 0x0F
#define WHOAMI_ALT_REG 0x4F
static inline float out_ev(struct sensor_value *val)
{
return (val->val1 + (float)val->val2 / 1000000);
}
static int lsm6dsl_trig_cnt;
#ifdef CONFIG_LSM6DSL_TRIGGER
static void lsm6dsl_trigger_handler(struct device *dev,
struct sensor_trigger *trig)
{
#ifdef ARGONKEY_TEST_LOG
char out_str[64];
#endif
struct sensor_value accel_x, accel_y, accel_z;
struct sensor_value gyro_x, gyro_y, gyro_z;
#if defined(CONFIG_LSM6DSL_EXT0_LIS2MDL)
struct sensor_value magn_x, magn_y, magn_z;
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LPS22HB)
struct sensor_value press, temp;
#endif
lsm6dsl_trig_cnt++;
sensor_sample_fetch_chan(dev, SENSOR_CHAN_ACCEL_XYZ);
sensor_channel_get(dev, SENSOR_CHAN_ACCEL_X, &accel_x);
sensor_channel_get(dev, SENSOR_CHAN_ACCEL_Y, &accel_y);
sensor_channel_get(dev, SENSOR_CHAN_ACCEL_Z, &accel_z);
#ifdef ARGONKEY_TEST_LOG
sprintf(out_str, "accel (%f %f %f) m/s2", out_ev(&accel_x),
out_ev(&accel_y),
out_ev(&accel_z));
printk("TRIG %s\n", out_str);
#endif
/* lsm6dsl gyro */
sensor_sample_fetch_chan(dev, SENSOR_CHAN_GYRO_XYZ);
sensor_channel_get(dev, SENSOR_CHAN_GYRO_X, &gyro_x);
sensor_channel_get(dev, SENSOR_CHAN_GYRO_Y, &gyro_y);
sensor_channel_get(dev, SENSOR_CHAN_GYRO_Z, &gyro_z);
#ifdef ARGONKEY_TEST_LOG
sprintf(out_str, "gyro (%f %f %f) dps", out_ev(&gyro_x),
out_ev(&gyro_y),
out_ev(&gyro_z));
printk("TRIG %s\n", out_str);
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LIS2MDL)
/* lsm6dsl magn */
sensor_sample_fetch_chan(dev, SENSOR_CHAN_MAGN_XYZ);
sensor_channel_get(dev, SENSOR_CHAN_MAGN_X, &magn_x);
sensor_channel_get(dev, SENSOR_CHAN_MAGN_Y, &magn_y);
sensor_channel_get(dev, SENSOR_CHAN_MAGN_Z, &magn_z);
#ifdef ARGONKEY_TEST_LOG
sprintf(out_str, "magn (%f %f %f) gauss", out_ev(&magn_x),
out_ev(&magn_y),
out_ev(&magn_z));
printk("TRIG %s\n", out_str);
#endif
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LPS22HB)
/* lsm6dsl press/temp */
sensor_sample_fetch_chan(dev, SENSOR_CHAN_PRESS);
sensor_channel_get(dev, SENSOR_CHAN_PRESS, &press);
sensor_sample_fetch_chan(dev, SENSOR_CHAN_AMBIENT_TEMP);
sensor_channel_get(dev, SENSOR_CHAN_AMBIENT_TEMP, &temp);
#ifdef ARGONKEY_TEST_LOG
sprintf(out_str, "press (%f) kPa - temp (%f) deg", out_ev(&press),
out_ev(&temp));
printk("%s\n", out_str);
#endif
#endif
}
#endif
void main(void)
{
int cnt = 0;
char out_str[64];
static struct device *led0, *led1;
int i, on = 1;
led0 = device_get_binding(LED0_GPIO_CONTROLLER);
gpio_pin_configure(led0, LED0_GPIO_PIN, GPIO_DIR_OUT);
gpio_pin_write(led0, LED0_GPIO_PIN, 1);
led1 = device_get_binding(LED1_GPIO_CONTROLLER);
gpio_pin_configure(led1, LED1_GPIO_PIN, GPIO_DIR_OUT);
for (i = 0; i < 5; i++) {
gpio_pin_write(led1, LED1_GPIO_PIN, on);
k_sleep(200);
on = (on == 1) ? 0 : 1;
}
printk("ArgonKey test!!\n");
#ifdef CONFIG_LPS22HB
struct device *baro_dev = device_get_binding(CONFIG_LPS22HB_DEV_NAME);
if (!baro_dev) {
printk("Could not get pointer to %s sensor\n",
CONFIG_LPS22HB_DEV_NAME);
return;
}
#endif
#ifdef CONFIG_HTS221
struct device *hum_dev = device_get_binding(CONFIG_HTS221_NAME);
if (!hum_dev) {
printk("Could not get pointer to %s sensor\n",
CONFIG_HTS221_NAME);
return;
}
#endif
#ifdef CONFIG_LSM6DSL
struct device *accel_dev = device_get_binding(CONFIG_LSM6DSL_DEV_NAME);
if (!accel_dev) {
printk("Could not get pointer to %s sensor\n",
CONFIG_LSM6DSL_DEV_NAME);
return;
}
#if defined(CONFIG_LSM6DSL_ACCEL_ODR) && (CONFIG_LSM6DSL_ACCEL_ODR == 0)
struct sensor_value a_odr_attr;
/* set sampling frequency to 104Hz for accel */
a_odr_attr.val1 = 104;
a_odr_attr.val2 = 0;
if (sensor_attr_set(accel_dev, SENSOR_CHAN_ACCEL_XYZ,
SENSOR_ATTR_SAMPLING_FREQUENCY, &a_odr_attr) < 0) {
printk("Cannot set sampling frequency for accelerometer.\n");
return;
}
#endif
#if defined(CONFIG_LSM6DSL_ACCEL_FS) && (CONFIG_LSM6DSL_ACCEL_FS == 0)
struct sensor_value a_fs_attr;
/* set full scale to 16g for accel */
sensor_g_to_ms2(16, &a_fs_attr);
if (sensor_attr_set(accel_dev, SENSOR_CHAN_ACCEL_XYZ,
SENSOR_ATTR_FULL_SCALE, &a_fs_attr) < 0) {
printk("Cannot set fs for accelerometer.\n");
return;
}
#endif
#if defined(CONFIG_LSM6DSL_GYRO_ODR) && (CONFIG_LSM6DSL_GYRO_ODR == 0)
struct sensor_value g_odr_attr;
/* set sampling frequency to 104Hz for accel */
g_odr_attr.val1 = 104;
g_odr_attr.val2 = 0;
if (sensor_attr_set(accel_dev, SENSOR_CHAN_GYRO_XYZ,
SENSOR_ATTR_SAMPLING_FREQUENCY, &g_odr_attr) < 0) {
printk("Cannot set sampling frequency for gyro.\n");
return;
}
#endif
#if defined(CONFIG_LSM6DSL_GYRO_FS) && (CONFIG_LSM6DSL_GYRO_FS == 0)
struct sensor_value g_fs_attr;
/* set full scale to 245dps for accel */
sensor_g_to_ms2(245, &g_fs_attr);
if (sensor_attr_set(accel_dev, SENSOR_CHAN_GYRO_XYZ,
SENSOR_ATTR_FULL_SCALE, &g_fs_attr) < 0) {
printk("Cannot set fs for gyroscope.\n");
return;
}
#endif
#endif
#ifdef CONFIG_VL53L0X
struct device *tof_dev = device_get_binding(CONFIG_VL53L0X_NAME);
if (!tof_dev) {
printk("Could not get pointer to %s sensor\n",
CONFIG_VL53L0X_NAME);
return;
}
#endif
#ifdef CONFIG_LSM6DSL_TRIGGER
struct sensor_trigger trig;
trig.type = SENSOR_TRIG_DATA_READY;
trig.chan = SENSOR_CHAN_ACCEL_XYZ;
sensor_trigger_set(accel_dev, &trig, lsm6dsl_trigger_handler);
#endif
while (1) {
#ifdef CONFIG_LPS22HB
struct sensor_value temp, press;
#endif
#ifdef CONFIG_HTS221
struct sensor_value humidity;
#endif
#ifdef CONFIG_LSM6DSL
struct sensor_value accel_x, accel_y, accel_z;
struct sensor_value gyro_x, gyro_y, gyro_z;
#if defined(CONFIG_LSM6DSL_EXT0_LIS2MDL)
struct sensor_value magn_x, magn_y, magn_z;
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LPS22HB)
struct sensor_value press, temp;
#endif
#endif
#ifdef CONFIG_VL53L0X
struct sensor_value prox;
#endif
#ifdef CONFIG_VL53L0X
sensor_sample_fetch(tof_dev);
sensor_channel_get(tof_dev, SENSOR_CHAN_PROX, &prox);
printk("proxy: %d ;\n", prox.val1);
sensor_channel_get(tof_dev, SENSOR_CHAN_DISTANCE, &prox);
printk("distance: %d -- %3d mm;\n", prox.val1, prox.val2);
#endif
#ifdef CONFIG_LPS22HB
sensor_sample_fetch(baro_dev);
sensor_channel_get(baro_dev, SENSOR_CHAN_AMBIENT_TEMP, &temp);
sensor_channel_get(baro_dev, SENSOR_CHAN_PRESS, &press);
printk("temp: %d.%02d C; press: %d.%06d\n",
temp.val1, temp.val2, press.val1, press.val2);
#endif
#ifdef CONFIG_HTS221
sensor_sample_fetch(hum_dev);
sensor_channel_get(hum_dev, SENSOR_CHAN_HUMIDITY, &humidity);
printk("humidity: %d.%06d\n",
humidity.val1, humidity.val2);
#endif
#ifdef CONFIG_LSM6DSL
/* lsm6dsl accel */
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_ACCEL_XYZ);
sensor_channel_get(accel_dev, SENSOR_CHAN_ACCEL_X, &accel_x);
sensor_channel_get(accel_dev, SENSOR_CHAN_ACCEL_Y, &accel_y);
sensor_channel_get(accel_dev, SENSOR_CHAN_ACCEL_Z, &accel_z);
sprintf(out_str, "accel (%f %f %f) m/s2", out_ev(&accel_x),
out_ev(&accel_y),
out_ev(&accel_z));
printk("%s\n", out_str);
/* lsm6dsl gyro */
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_GYRO_XYZ);
sensor_channel_get(accel_dev, SENSOR_CHAN_GYRO_X, &gyro_x);
sensor_channel_get(accel_dev, SENSOR_CHAN_GYRO_Y, &gyro_y);
sensor_channel_get(accel_dev, SENSOR_CHAN_GYRO_Z, &gyro_z);
sprintf(out_str, "gyro (%f %f %f) dps", out_ev(&gyro_x),
out_ev(&gyro_y),
out_ev(&gyro_z));
printk("%s\n", out_str);
#if defined(CONFIG_LSM6DSL_EXT0_LIS2MDL)
/* lsm6dsl magn */
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_MAGN_XYZ);
sensor_channel_get(accel_dev, SENSOR_CHAN_MAGN_X, &magn_x);
sensor_channel_get(accel_dev, SENSOR_CHAN_MAGN_Y, &magn_y);
sensor_channel_get(accel_dev, SENSOR_CHAN_MAGN_Z, &magn_z);
sprintf(out_str, "magn (%f %f %f) gauss", out_ev(&magn_x),
out_ev(&magn_y),
out_ev(&magn_z));
printk("%s\n", out_str);
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LPS22HB)
/* lsm6dsl press/temp */
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_PRESS);
sensor_channel_get(accel_dev, SENSOR_CHAN_PRESS, &press);
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_AMBIENT_TEMP);
sensor_channel_get(accel_dev, SENSOR_CHAN_AMBIENT_TEMP, &temp);
sprintf(out_str, "press (%f) kPa - temp (%f) deg",
out_ev(&press), out_ev(&temp));
printk("%s\n", out_str);
#endif
#endif /* CONFIG_LSM6DSL */
printk("- (%d) (trig_cnt: %d)\n\n", ++cnt, lsm6dsl_trig_cnt);
k_sleep(2000);
}
}
| mbolivar/zephyr | samples/boards/96b_argonkey/src/main.c | C | apache-2.0 | 8,806 |
var searchData=
[
['value',['value',['../structguac__pool__int.html#af76ff5f21c6e0f69d95cdd1385ea24a4',1,'guac_pool_int']]],
['vguac_5fclient_5fabort',['vguac_client_abort',['../client_8h.html#a4c0eccd7d0ed3dbf3e7941ce297e0224',1,'client.h']]],
['vguac_5fclient_5flog',['vguac_client_log',['../client_8h.html#a37a0fa9cfc4c02236085e3852972f494',1,'client.h']]],
['vguac_5fprotocol_5fsend_5flog',['vguac_protocol_send_log',['../protocol_8h.html#a3a783d771e1727ba2a82b2298acf4ee4',1,'protocol.h']]],
['video_5fmimetypes',['video_mimetypes',['../structguac__client__info.html#aa58dc4ee1e3b8801e9b0abbf9135d8b6',1,'guac_client_info']]]
];
| mike-jumper/incubator-guacamole-website | doc/0.9.9/libguac/search/all_12.js | JavaScript | apache-2.0 | 644 |
package org.vertexium.util;
import org.vertexium.Authorizations;
import org.vertexium.Direction;
import org.vertexium.Vertex;
import java.util.Iterator;
public class VerticesToEdgeIdsIterable implements Iterable<String> {
private final Iterable<? extends Vertex> vertices;
private final Authorizations authorizations;
public VerticesToEdgeIdsIterable(Iterable<? extends Vertex> vertices, Authorizations authorizations) {
this.vertices = vertices;
this.authorizations = authorizations;
}
@Override
public Iterator<String> iterator() {
return new SelectManyIterable<Vertex, String>(this.vertices) {
@Override
public Iterable<String> getIterable(Vertex vertex) {
return vertex.getEdgeIds(Direction.BOTH, authorizations);
}
}.iterator();
}
}
| visallo/vertexium | core/src/main/java/org/vertexium/util/VerticesToEdgeIdsIterable.java | Java | apache-2.0 | 855 |
package com.kit.db;
public class Obj {
}
| BigAppOS/BigApp_Discuz_Android | libs/ZUtils/src/com/kit/db/Obj.java | Java | apache-2.0 | 43 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.test.functions.recompile;
import java.util.HashMap;
import org.junit.Assert;
import org.junit.Test;
import org.apache.sysds.conf.CompilerConfig;
import org.apache.sysds.hops.OptimizerUtils;
import org.apache.sysds.runtime.matrix.data.MatrixValue.CellIndex;
import org.apache.sysds.test.AutomatedTestBase;
import org.apache.sysds.test.TestConfiguration;
import org.apache.sysds.test.TestUtils;
import org.apache.sysds.utils.Statistics;
public class FunctionRecompileTest extends AutomatedTestBase
{
private final static String TEST_NAME1 = "funct_recompile";
private final static String TEST_DIR = "functions/recompile/";
private final static String TEST_CLASS_DIR = TEST_DIR + FunctionRecompileTest.class.getSimpleName() + "/";
private final static double eps = 1e-10;
private final static int rows = 20;
private final static int cols = 10;
private final static double sparsity = 1.0;
@Override
public void setUp() {
TestUtils.clearAssertionInformation();
addTestConfiguration(TEST_NAME1,
new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] { "Rout" }) );
}
@Test
public void testFunctionWithoutRecompileWithoutIPA() {
runFunctionTest(false, false);
}
@Test
public void testFunctionWithoutRecompileWithIPA() {
runFunctionTest(false, true);
}
@Test
public void testFunctionWithRecompileWithoutIPA() {
runFunctionTest(true, false);
}
@Test
public void testFunctionWithRecompileWithIPA() {
runFunctionTest(true, true);
}
private void runFunctionTest( boolean recompile, boolean IPA )
{
boolean oldFlagRecompile = CompilerConfig.FLAG_DYN_RECOMPILE;
boolean oldFlagIPA = OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS;
try
{
TestConfiguration config = getTestConfiguration(TEST_NAME1);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME1 + ".dml";
programArgs = new String[]{"-args", input("V"),
Integer.toString(rows), Integer.toString(cols), output("R") };
fullRScriptName = HOME + TEST_NAME1 + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
long seed = System.nanoTime();
double[][] V = getRandomMatrix(rows, cols, 0, 1, sparsity, seed);
writeInputMatrix("V", V, true);
CompilerConfig.FLAG_DYN_RECOMPILE = recompile;
OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS = IPA;
boolean exceptionExpected = false;
runTest(true, exceptionExpected, null, -1);
runRScript(true);
//note: change from previous version due to fix in op selection (unknown size XtX and mapmult)
//CHECK compiled MR jobs
int expectNumCompiled = -1;
if( IPA ) expectNumCompiled = 1; //reblock
else expectNumCompiled = 5; //reblock, GMR,GMR,GMR,GMR (last two should piggybacked)
Assert.assertEquals("Unexpected number of compiled MR jobs.",
expectNumCompiled, Statistics.getNoOfCompiledSPInst());
//CHECK executed MR jobs
int expectNumExecuted = -1;
if( recompile ) expectNumExecuted = 0;
else if( IPA ) expectNumExecuted = 1; //reblock
else expectNumExecuted = 41; //reblock, 10*(GMR,GMR,GMR, GMR) (last two should piggybacked)
Assert.assertEquals("Unexpected number of executed MR jobs.",
expectNumExecuted, Statistics.getNoOfExecutedSPInst());
//compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromOutputDir("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromExpectedDir("Rout");
TestUtils.compareMatrices(dmlfile, rfile, eps, "DML", "R");
}
finally {
CompilerConfig.FLAG_DYN_RECOMPILE = oldFlagRecompile;
OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS = oldFlagIPA;
}
}
}
| apache/incubator-systemml | src/test/java/org/apache/sysds/test/functions/recompile/FunctionRecompileTest.java | Java | apache-2.0 | 4,614 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.jobmanager;
import akka.actor.ActorSystem;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.akka.ListeningBehaviour;
import org.apache.flink.runtime.blob.BlobClient;
import org.apache.flink.runtime.blob.BlobKey;
import org.apache.flink.runtime.client.JobExecutionException;
import org.apache.flink.runtime.instance.ActorGateway;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;
import org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings;
import org.apache.flink.runtime.jobgraph.tasks.JobCheckpointingSettings;
import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
import org.apache.flink.runtime.messages.JobManagerMessages;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.runtime.testtasks.NoOpInvokable;
import org.apache.flink.runtime.util.LeaderRetrievalUtils;
import org.apache.flink.util.NetUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import scala.Tuple2;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Tests that the JobManager handles Jobs correctly that fail in
* the initialization during the submit phase.
*/
public class JobSubmitTest {
private static final FiniteDuration timeout = new FiniteDuration(60000, TimeUnit.MILLISECONDS);
private static ActorSystem jobManagerSystem;
private static ActorGateway jmGateway;
private static Configuration jmConfig;
@BeforeClass
public static void setupJobManager() {
jmConfig = new Configuration();
int port = NetUtils.getAvailablePort();
jmConfig.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, "localhost");
jmConfig.setInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, port);
scala.Option<Tuple2<String, Object>> listeningAddress = scala.Option.apply(new Tuple2<String, Object>("localhost", port));
jobManagerSystem = AkkaUtils.createActorSystem(jmConfig, listeningAddress);
// only start JobManager (no ResourceManager)
JobManager.startJobManagerActors(
jmConfig,
jobManagerSystem,
TestingUtils.defaultExecutor(),
TestingUtils.defaultExecutor(),
JobManager.class,
MemoryArchivist.class)._1();
try {
LeaderRetrievalService lrs = LeaderRetrievalUtils.createLeaderRetrievalService(jmConfig);
jmGateway = LeaderRetrievalUtils.retrieveLeaderGateway(
lrs,
jobManagerSystem,
timeout
);
} catch (Exception e) {
fail("Could not retrieve the JobManager gateway. " + e.getMessage());
}
}
@AfterClass
public static void teardownJobmanager() {
if (jobManagerSystem != null) {
jobManagerSystem.shutdown();
}
}
@Test
public void testFailureWhenJarBlobsMissing() {
try {
// create a simple job graph
JobVertex jobVertex = new JobVertex("Test Vertex");
jobVertex.setInvokableClass(NoOpInvokable.class);
JobGraph jg = new JobGraph("test job", jobVertex);
// request the blob port from the job manager
Future<Object> future = jmGateway.ask(JobManagerMessages.getRequestBlobManagerPort(), timeout);
int blobPort = (Integer) Await.result(future, timeout);
// upload two dummy bytes and add their keys to the job graph as dependencies
BlobKey key1, key2;
BlobClient bc = new BlobClient(new InetSocketAddress("localhost", blobPort), jmConfig);
try {
key1 = bc.put(new byte[10]);
key2 = bc.put(new byte[10]);
// delete one of the blobs to make sure that the startup failed
bc.delete(key2);
}
finally {
bc.close();
}
jg.addBlob(key1);
jg.addBlob(key2);
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(
jg,
ListeningBehaviour.EXECUTION_RESULT),
timeout);
try {
Await.result(submitFuture, timeout);
}
catch (JobExecutionException e) {
// that is what we expect
assertTrue(e.getCause() instanceof IOException);
}
catch (Exception e) {
fail("Wrong exception type");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
/**
* Verifies a correct error message when vertices with master initialization
* (input formats / output formats) fail.
*/
@Test
public void testFailureWhenInitializeOnMasterFails() {
try {
// create a simple job graph
JobVertex jobVertex = new JobVertex("Vertex that fails in initializeOnMaster") {
private static final long serialVersionUID = -3540303593784587652L;
@Override
public void initializeOnMaster(ClassLoader loader) throws Exception {
throw new RuntimeException("test exception");
}
};
jobVertex.setInvokableClass(NoOpInvokable.class);
JobGraph jg = new JobGraph("test job", jobVertex);
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(
jg,
ListeningBehaviour.EXECUTION_RESULT),
timeout);
try {
Await.result(submitFuture, timeout);
}
catch (JobExecutionException e) {
// that is what we expect
// test that the exception nesting is not too deep
assertTrue(e.getCause() instanceof RuntimeException);
}
catch (Exception e) {
fail("Wrong exception type");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testAnswerFailureWhenSavepointReadFails() throws Exception {
// create a simple job graph
JobGraph jg = createSimpleJobGraph();
jg.setSavepointRestoreSettings(SavepointRestoreSettings.forPath("pathThatReallyDoesNotExist..."));
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(jg, ListeningBehaviour.DETACHED), timeout);
Object result = Await.result(submitFuture, timeout);
assertEquals(JobManagerMessages.JobResultFailure.class, result.getClass());
}
private JobGraph createSimpleJobGraph() {
JobVertex jobVertex = new JobVertex("Vertex");
jobVertex.setInvokableClass(NoOpInvokable.class);
List<JobVertexID> vertexIdList = Collections.singletonList(jobVertex.getID());
JobGraph jg = new JobGraph("test job", jobVertex);
jg.setSnapshotSettings(new JobCheckpointingSettings(vertexIdList, vertexIdList, vertexIdList,
5000, 5000, 0L, 10, ExternalizedCheckpointSettings.none(), null, true));
return jg;
}
}
| hwstreaming/flink | flink-runtime/src/test/java/org/apache/flink/runtime/jobmanager/JobSubmitTest.java | Java | apache-2.0 | 7,730 |
#import "DockSquadImporter.h"
@interface DockSquadImporteriOS : DockSquadImporter
@end
| tblackwe/spacedock | ios/DockSquadImporteriOS.h | C | apache-2.0 | 89 |
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.base.accumulators;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;
/**
* An implementation of an accumulator capable of counting occurences
*/
public class CountAccumulateFunction extends AbstractAccumulateFunction<CountAccumulateFunction.CountData> {
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
protected static class CountData implements Externalizable {
public long count = 0;
public CountData() {}
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
count = in.readLong();
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeLong(count);
}
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#createContext()
*/
public CountData createContext() {
return new CountData();
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#init(java.lang.Object)
*/
public void init(CountData data) {
data.count = 0;
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#accumulate(java.lang.Object, java.lang.Object)
*/
public void accumulate(CountData data,
Object value) {
data.count++;
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#reverse(java.lang.Object, java.lang.Object)
*/
public void reverse(CountData data,
Object value) {
data.count--;
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#getResult(java.lang.Object)
*/
public Object getResult(CountData data) {
return new Long( data.count );
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#supportsReverse()
*/
public boolean supportsReverse() {
return true;
}
/**
* {@inheritDoc}
*/
public Class< ? > getResultType() {
return Long.class;
}
}
| ngs-mtech/drools | drools-core/src/main/java/org/drools/core/base/accumulators/CountAccumulateFunction.java | Java | apache-2.0 | 2,915 |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_51) on Fri Jul 19 02:59:04 EDT 2013 -->
<META http-equiv="Content-Type" content="text/html; charset=utf-8">
<TITLE>
Uses of Class org.apache.solr.client.solrj.response.RangeFacet.Count (Solr 4.4.0 API)
</TITLE>
<META NAME="date" CONTENT="2013-07-19">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.solr.client.solrj.response.RangeFacet.Count (Solr 4.4.0 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../index.html?org/apache/solr/client/solrj/response//class-useRangeFacet.Count.html" target="_top"><B>FRAMES</B></A>
<A HREF="RangeFacet.Count.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.solr.client.solrj.response.RangeFacet.Count</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response">RangeFacet.Count</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.solr.client.solrj.response"><B>org.apache.solr.client.solrj.response</B></A></TD>
<TD>Convenience classes for dealing with various types of Solr responses. </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.solr.client.solrj.response"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response">RangeFacet.Count</A> in <A HREF="../../../../../../../org/apache/solr/client/solrj/response/package-summary.html">org.apache.solr.client.solrj.response</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../../../org/apache/solr/client/solrj/response/package-summary.html">org.apache.solr.client.solrj.response</A> that return types with arguments of type <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response">RangeFacet.Count</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="http://download.oracle.com/javase/6/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</A><<A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response">RangeFacet.Count</A>></CODE></FONT></TD>
<TD><CODE><B>RangeFacet.</B><B><A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.html#getCounts()">getCounts</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../index.html?org/apache/solr/client/solrj/response//class-useRangeFacet.Count.html" target="_top"><B>FRAMES</B></A>
<A HREF="RangeFacet.Count.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
<i>Copyright © 2000-2013 Apache Software Foundation. All Rights Reserved.</i>
<script src='../../../../../../../prettify.js' type='text/javascript'></script>
<script type='text/javascript'>
(function(){
var oldonload = window.onload;
if (typeof oldonload != 'function') {
window.onload = prettyPrint;
} else {
window.onload = function() {
oldonload();
prettyPrint();
}
}
})();
</script>
</BODY>
</HTML>
| tenaciousjzh/titan-solr-cloud-test | solr-4.4.0/docs/solr-solrj/org/apache/solr/client/solrj/response/class-use/RangeFacet.Count.html | HTML | apache-2.0 | 8,940 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.type;
import com.facebook.presto.operator.scalar.AbstractTestFunctions;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.google.common.net.InetAddresses;
import io.airlift.slice.Slices;
import org.testng.annotations.Test;
import static com.facebook.presto.spi.function.OperatorType.HASH_CODE;
import static com.facebook.presto.spi.function.OperatorType.INDETERMINATE;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.type.IpAddressType.IPADDRESS;
import static com.facebook.presto.type.IpPrefixType.IPPREFIX;
import static java.lang.System.arraycopy;
public class TestIpPrefixOperators
extends AbstractTestFunctions
{
@Test
public void testVarcharToIpPrefixCast()
{
assertFunction("CAST('::ffff:1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24");
assertFunction("CAST('192.168.0.0/24' AS IPPREFIX)", IPPREFIX, "192.168.0.0/24");
assertFunction("CAST('255.2.3.4/0' AS IPPREFIX)", IPPREFIX, "0.0.0.0/0");
assertFunction("CAST('255.2.3.4/1' AS IPPREFIX)", IPPREFIX, "128.0.0.0/1");
assertFunction("CAST('255.2.3.4/2' AS IPPREFIX)", IPPREFIX, "192.0.0.0/2");
assertFunction("CAST('255.2.3.4/4' AS IPPREFIX)", IPPREFIX, "240.0.0.0/4");
assertFunction("CAST('1.2.3.4/8' AS IPPREFIX)", IPPREFIX, "1.0.0.0/8");
assertFunction("CAST('1.2.3.4/16' AS IPPREFIX)", IPPREFIX, "1.2.0.0/16");
assertFunction("CAST('1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24");
assertFunction("CAST('1.2.3.255/25' AS IPPREFIX)", IPPREFIX, "1.2.3.128/25");
assertFunction("CAST('1.2.3.255/26' AS IPPREFIX)", IPPREFIX, "1.2.3.192/26");
assertFunction("CAST('1.2.3.255/28' AS IPPREFIX)", IPPREFIX, "1.2.3.240/28");
assertFunction("CAST('1.2.3.255/30' AS IPPREFIX)", IPPREFIX, "1.2.3.252/30");
assertFunction("CAST('1.2.3.255/32' AS IPPREFIX)", IPPREFIX, "1.2.3.255/32");
assertFunction("CAST('2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
assertFunction("CAST('2001:db8::ff00:42:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
assertFunction("CAST('2001:db8:0:0:1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:db8:0:0:1::1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:db8::1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:DB8::FF00:ABCD:12EF/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:abcd:12ef/128");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/0' AS IPPREFIX)", IPPREFIX, "::/0");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/1' AS IPPREFIX)", IPPREFIX, "8000::/1");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/2' AS IPPREFIX)", IPPREFIX, "c000::/2");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/4' AS IPPREFIX)", IPPREFIX, "f000::/4");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/8' AS IPPREFIX)", IPPREFIX, "ff00::/8");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/16' AS IPPREFIX)", IPPREFIX, "ffff::/16");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/32' AS IPPREFIX)", IPPREFIX, "ffff:ffff::/32");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/48' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff::/48");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff::/64");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/80' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff::/80");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/96' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff::/96");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/112' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:0/112");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/120' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00/120");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/124' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0/124");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/126' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffc/126");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/127' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128");
assertFunction("IPPREFIX '10.0.0.0/32'", IPPREFIX, "10.0.0.0/32");
assertFunction("IPPREFIX '64:ff9b::10.0.0.0/128'", IPPREFIX, "64:ff9b::a00:0/128");
assertInvalidCast("CAST('facebook.com/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: facebook.com/32");
assertInvalidCast("CAST('localhost/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: localhost/32");
assertInvalidCast("CAST('2001:db8::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:db8::1::1/128");
assertInvalidCast("CAST('2001:zxy::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:zxy::1::1/128");
assertInvalidCast("CAST('789.1.1.1/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 789.1.1.1/32");
assertInvalidCast("CAST('192.1.1.1' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1");
assertInvalidCast("CAST('192.1.1.1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1/128");
}
@Test
public void testIpPrefixToVarcharCast()
{
assertFunction("CAST(IPPREFIX '::ffff:1.2.3.4/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(IPPREFIX '::ffff:102:304/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128");
assertFunction("CAST(IPPREFIX '2001:db8:0:0:1:0:0:1/128' AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128");
assertFunction("CAST(CAST('1.2.3.4/32' AS IPPREFIX) AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(CAST('2001:db8:0:0:1::1/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128");
assertFunction("CAST(CAST('64:ff9b::10.0.0.0/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "64:ff9b::a00:0/128");
}
@Test
public void testIpPrefixToIpAddressCast()
{
assertFunction("CAST(IPPREFIX '1.2.3.4/32' AS IPADDRESS)", IPADDRESS, "1.2.3.4");
assertFunction("CAST(IPPREFIX '1.2.3.4/24' AS IPADDRESS)", IPADDRESS, "1.2.3.0");
assertFunction("CAST(IPPREFIX '::1/128' AS IPADDRESS)", IPADDRESS, "::1");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS IPADDRESS)", IPADDRESS, "2001:db8::ff00:42:8329");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/64' AS IPADDRESS)", IPADDRESS, "2001:db8::");
}
@Test
public void testIpAddressToIpPrefixCast()
{
assertFunction("CAST(IPADDRESS '1.2.3.4' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32");
assertFunction("CAST(IPADDRESS '::ffff:102:304' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32");
assertFunction("CAST(IPADDRESS '::1' AS IPPREFIX)", IPPREFIX, "::1/128");
assertFunction("CAST(IPADDRESS '2001:db8::ff00:42:8329' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
}
@Test
public void testEquals()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' = IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '192.168.0.0/32' = IPPREFIX '::ffff:192.168.0.0/32'", BOOLEAN, true);
assertFunction("IPPREFIX '10.0.0.0/32' = IPPREFIX '::ffff:a00:0/32'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/24' AS IPPREFIX) = IPPREFIX '1.2.3.5/24'", BOOLEAN, true);
assertFunction("IPPREFIX '2001:db8::ff00:42:8329/128' = IPPREFIX '2001:db8::ff00:42:8300/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = IPPREFIX '1.2.3.5/32'", BOOLEAN, false);
assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) = IPPREFIX '1.2.0.0/25'", BOOLEAN, false);
}
@Test
public void testDistinctFrom()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false);
assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, false);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true);
}
@Test
public void testNotEquals()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' != IPPREFIX '1.2.3.4/32'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) != IPPREFIX '1.2.3.4/32'", BOOLEAN, false);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' <> IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false);
}
@Test
public void testOrderOperators()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' > IPPREFIX '1.2.3.4/32'", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.4/32' > IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) < CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST('1.2.3.5/32' AS IPPREFIX) < CAST('1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false);
assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) < CAST('1.2.0.0/25' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '::1/128' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.5/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.6/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, false);
assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::1/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::/128' >= IPPREFIX '::1/128'", BOOLEAN, false);
assertFunction("IPPREFIX '::1/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::2222/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, false);
}
@Test
public void testIndeterminate()
{
assertOperator(INDETERMINATE, "CAST(null AS IPPREFIX)", BOOLEAN, true);
assertOperator(INDETERMINATE, "IPPREFIX '::2222/128'", BOOLEAN, false);
}
@Test
public void testHash()
{
assertOperator(HASH_CODE, "CAST(null AS IPPREFIX)", BIGINT, null);
assertOperator(HASH_CODE, "IPPREFIX '::2222/128'", BIGINT, hashFromType("::2222/128"));
}
private static long hashFromType(String address)
{
BlockBuilder blockBuilder = IPPREFIX.createBlockBuilder(null, 1);
String[] parts = address.split("/");
byte[] bytes = new byte[IPPREFIX.getFixedSize()];
byte[] addressBytes = InetAddresses.forString(parts[0]).getAddress();
arraycopy(addressBytes, 0, bytes, 0, 16);
bytes[IPPREFIX.getFixedSize() - 1] = (byte) Integer.parseInt(parts[1]);
IPPREFIX.writeSlice(blockBuilder, Slices.wrappedBuffer(bytes));
Block block = blockBuilder.build();
return IPPREFIX.hash(block, 0);
}
}
| ptkool/presto | presto-main/src/test/java/com/facebook/presto/type/TestIpPrefixOperators.java | Java | apache-2.0 | 13,240 |
---
external help file: Microsoft.Azure.Commands.HDInsight.dll-Help.xml
ms.assetid: 4ED47646-542B-4983-B46B-B603BE33D499
online version:
schema: 2.0.0
---
# New-AzureRmHDInsightSqoopJobDefinition
## SYNOPSIS
Creates a Sqoop job object.
## SYNTAX
```
New-AzureRmHDInsightSqoopJobDefinition [-Files <String[]>] [-StatusFolder <String>] [-File <String>]
[-Command <String>] [-LibDir <String>] [<CommonParameters>]
```
## DESCRIPTION
The **New-AzureRmHDInsightSqoopJobDefinition** cmdlet defines a Sqoop job object for use with an Azure HDInsight cluster.
## EXAMPLES
### Example 1: Create a Sqoop job definition
```
PS C:\># Cluster info
PS C:\>$clusterName = "your-hadoop-001"
PS C:\>$clusterCreds = Get-Credential
PS C:\>New-AzureRmHDInsightSqoopJobDefinition -StatusFolder $statusFolder `
-Command $sqoopCommand `
| Start-AzureRmHDInsightJob -ClusterName $clusterName `
-ClusterCredential $clusterCreds
```
This command creates a Sqoop job definition.
## PARAMETERS
### -Command
Specifies the Sqoop command.
```yaml
Type: String
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -File
Specifies the path to a file that contains the query to run.
The file must be available on the Storage account associated with the cluster.
You can use this parameter instead of the *Query* parameter.
```yaml
Type: String
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -Files
Specifies a collection of files that are associated with a Hive job.
```yaml
Type: String[]
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -LibDir
Specifies the library directory for the Sqoop job.
```yaml
Type: String
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -StatusFolder
Specifies the location of the folder that contains standard outputs and error outputs for a job.
```yaml
Type: String
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### CommonParameters
This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see about_CommonParameters (http://go.microsoft.com/fwlink/?LinkID=113216).
## INPUTS
## OUTPUTS
### Microsoft.Azure.Commands.HDInsight.Models.AzureHDInsightSqoopJobDefinition
## NOTES
## RELATED LINKS
[Start-AzureRmHDInsightJob](./Start-AzureRmHDInsightJob.md)
| hungmai-msft/azure-powershell | src/ResourceManager/HDInsight/Commands.HDInsight/help/New-AzureRmHDInsightSqoopJobDefinition.md | Markdown | apache-2.0 | 2,929 |
"""
IP Types
"""
import logging
from ipaddress import ip_address
from socket import AF_INET, AF_INET6
from vpp_papi import VppEnum
from vpp_object import VppObject
try:
text_type = unicode
except NameError:
text_type = str
_log = logging.getLogger(__name__)
class DpoProto:
DPO_PROTO_IP4 = 0
DPO_PROTO_IP6 = 1
DPO_PROTO_MPLS = 2
DPO_PROTO_ETHERNET = 3
DPO_PROTO_BIER = 4
DPO_PROTO_NSH = 5
INVALID_INDEX = 0xffffffff
def get_dpo_proto(addr):
if ip_address(addr).version == 6:
return DpoProto.DPO_PROTO_IP6
else:
return DpoProto.DPO_PROTO_IP4
class VppIpAddressUnion():
def __init__(self, addr):
self.addr = addr
self.ip_addr = ip_address(text_type(self.addr))
def encode(self):
if self.version == 6:
return {'ip6': self.ip_addr}
else:
return {'ip4': self.ip_addr}
@property
def version(self):
return self.ip_addr.version
@property
def address(self):
return self.addr
@property
def length(self):
return self.ip_addr.max_prefixlen
@property
def bytes(self):
return self.ip_addr.packed
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.ip_addr == other.ip_addr
elif hasattr(other, "ip4") and hasattr(other, "ip6"):
# vl_api_address_union_t
if 4 == self.version:
return self.ip_addr == other.ip4
else:
return self.ip_addr == other.ip6
else:
raise Exception("Comparing VppIpAddressUnions:%s"
" with incomparable type: %s",
self, other)
def __ne__(self, other):
return not (self == other)
def __str__(self):
return str(self.ip_addr)
class VppIpMPrefix():
def __init__(self, saddr, gaddr, glen):
self.saddr = saddr
self.gaddr = gaddr
self.glen = glen
if ip_address(self.saddr).version != \
ip_address(self.gaddr).version:
raise ValueError('Source and group addresses must be of the '
'same address family.')
def encode(self):
return {
'af': ip_address(self.gaddr).vapi_af,
'grp_address': {
ip_address(self.gaddr).vapi_af_name: self.gaddr
},
'src_address': {
ip_address(self.saddr).vapi_af_name: self.saddr
},
'grp_address_length': self.glen,
}
@property
def length(self):
return self.glen
@property
def version(self):
return ip_address(self.gaddr).version
def __str__(self):
return "(%s,%s)/%d" % (self.saddr, self.gaddr, self.glen)
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.glen == other.glen and
self.saddr == other.gaddr and
self.saddr == other.saddr)
elif (hasattr(other, "grp_address_length") and
hasattr(other, "grp_address") and
hasattr(other, "src_address")):
# vl_api_mprefix_t
if 4 == self.version:
return (self.glen == other.grp_address_length and
self.gaddr == str(other.grp_address.ip4) and
self.saddr == str(other.src_address.ip4))
else:
return (self.glen == other.grp_address_length and
self.gaddr == str(other.grp_address.ip6) and
self.saddr == str(other.src_address.ip6))
return NotImplemented
class VppIpPuntPolicer(VppObject):
def __init__(self, test, policer_index, is_ip6=False):
self._test = test
self._policer_index = policer_index
self._is_ip6 = is_ip6
def add_vpp_config(self):
self._test.vapi.ip_punt_police(policer_index=self._policer_index,
is_ip6=self._is_ip6, is_add=True)
def remove_vpp_config(self):
self._test.vapi.ip_punt_police(policer_index=self._policer_index,
is_ip6=self._is_ip6, is_add=False)
def query_vpp_config(self):
NotImplemented
class VppIpPuntRedirect(VppObject):
def __init__(self, test, rx_index, tx_index, nh_addr):
self._test = test
self._rx_index = rx_index
self._tx_index = tx_index
self._nh_addr = ip_address(nh_addr)
def encode(self):
return {"rx_sw_if_index": self._rx_index,
"tx_sw_if_index": self._tx_index, "nh": self._nh_addr}
def add_vpp_config(self):
self._test.vapi.ip_punt_redirect(punt=self.encode(), is_add=True)
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.ip_punt_redirect(punt=self.encode(), is_add=False)
def get_vpp_config(self):
is_ipv6 = True if self._nh_addr.version == 6 else False
return self._test.vapi.ip_punt_redirect_dump(
sw_if_index=self._rx_index, is_ipv6=is_ipv6)
def query_vpp_config(self):
if self.get_vpp_config():
return True
return False
class VppIpPathMtu(VppObject):
def __init__(self, test, nh, pmtu, table_id=0):
self._test = test
self.nh = nh
self.pmtu = pmtu
self.table_id = table_id
def add_vpp_config(self):
self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh,
'table_id': self.table_id,
'path_mtu': self.pmtu})
self._test.registry.register(self, self._test.logger)
return self
def modify(self, pmtu):
self.pmtu = pmtu
self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh,
'table_id': self.table_id,
'path_mtu': self.pmtu})
return self
def remove_vpp_config(self):
self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh,
'table_id': self.table_id,
'path_mtu': 0})
def query_vpp_config(self):
ds = list(self._test.vapi.vpp.details_iter(
self._test.vapi.ip_path_mtu_get))
for d in ds:
if self.nh == str(d.pmtu.nh) \
and self.table_id == d.pmtu.table_id \
and self.pmtu == d.pmtu.path_mtu:
return True
return False
def object_id(self):
return ("ip-path-mtu-%d-%s-%d" % (self.table_id,
self.nh,
self.pmtu))
def __str__(self):
return self.object_id()
| FDio/vpp | test/vpp_ip.py | Python | apache-2.0 | 6,921 |
package org.plasma.provisioning.rdb.mysql.v5_5.query;
import org.plasma.provisioning.rdb.mysql.v5_5.TableColumnConstraint;
import org.plasma.query.DataProperty;
import org.plasma.query.Expression;
import org.plasma.query.dsl.DataNode;
import org.plasma.query.dsl.DomainRoot;
import org.plasma.query.dsl.PathNode;
import org.plasma.sdo.helper.PlasmaTypeHelper;
/**
* Generated Domain Specific Language (DSL) implementation class representing
* the domain model entity <b>TableColumnConstraint</b>.
*
* <p>
* </p>
* <b>Data Store Mapping:</b> Corresponds to the physical data store entity
* <b>REFERENTIAL_CONSTRAINTS</b>.
*
*/
public class QTableColumnConstraint extends DomainRoot {
private QTableColumnConstraint() {
super(PlasmaTypeHelper.INSTANCE.getType(TableColumnConstraint.class));
}
/**
* Constructor which instantiates a domain query path node. A path may span
* multiple namespaces and therefore Java inplementation packages based on the
* <a href=
* "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html"
* >Condiguration</a>. Note: while this constructor is public, it is not for
* application use!
*
* @param source
* the source path node
* @param sourceProperty
* the source property logical name
*/
public QTableColumnConstraint(PathNode source, String sourceProperty) {
super(source, sourceProperty);
}
/**
* Constructor which instantiates a domain query path node. A path may span
* multiple namespaces and therefore Java inplementation packages based on the
* <a href=
* "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html"
* >Condiguration</a>. Note: while this constructor is public, it is not for
* application use!
*
* @param source
* the source path node
* @param sourceProperty
* the source property logical name
* @param expr
* the path predicate expression
*/
public QTableColumnConstraint(PathNode source, String sourceProperty, Expression expr) {
super(source, sourceProperty, expr);
}
/**
* Returns a new DSL query for <a
* href="http://docs.plasma-sdo.org/api/org/plasma/sdo/PlasmaType.html"
* >Type</a> <b>TableColumnConstraint</b> which can be used either as a query
* root or as the start (entry point) for a new path predicate expression.
*
* @return a new DSL query
*/
public static QTableColumnConstraint newQuery() {
return new QTableColumnConstraint();
}
/**
* Returns a DSL data element for property, <b>name</b>.
*
* @return a DSL data element for property, <b>name</b>.
*/
public DataProperty name() {
return new DataNode(this, TableColumnConstraint.PROPERTY.name.name());
}
/**
* Returns a DSL data element for property, <b>owner</b>.
*
* @return a DSL data element for property, <b>owner</b>.
*/
public DataProperty owner() {
return new DataNode(this, TableColumnConstraint.PROPERTY.owner.name());
}
/**
* Returns a DSL query element for reference property, <b>table</b>.
*
* @return a DSL query element for reference property, <b>table</b>.
*/
public QTable table() {
return new QTable(this, TableColumnConstraint.PROPERTY.table.name());
}
} | plasma-framework/plasma | plasma-provisioning/src/main/java/org/plasma/provisioning/rdb/mysql/v5_5/query/QTableColumnConstraint.java | Java | apache-2.0 | 3,301 |
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: hapi/chart/metadata.proto
package chart
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
type Metadata_Engine int32
const (
Metadata_UNKNOWN Metadata_Engine = 0
Metadata_GOTPL Metadata_Engine = 1
)
var Metadata_Engine_name = map[int32]string{
0: "UNKNOWN",
1: "GOTPL",
}
var Metadata_Engine_value = map[string]int32{
"UNKNOWN": 0,
"GOTPL": 1,
}
func (x Metadata_Engine) String() string {
return proto.EnumName(Metadata_Engine_name, int32(x))
}
func (Metadata_Engine) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_metadata_d6c714c73a051dcb, []int{1, 0}
}
// Maintainer describes a Chart maintainer.
type Maintainer struct {
// Name is a user name or organization name
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// Email is an optional email address to contact the named maintainer
Email string `protobuf:"bytes,2,opt,name=email,proto3" json:"email,omitempty"`
// Url is an optional URL to an address for the named maintainer
Url string `protobuf:"bytes,3,opt,name=url,proto3" json:"url,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Maintainer) Reset() { *m = Maintainer{} }
func (m *Maintainer) String() string { return proto.CompactTextString(m) }
func (*Maintainer) ProtoMessage() {}
func (*Maintainer) Descriptor() ([]byte, []int) {
return fileDescriptor_metadata_d6c714c73a051dcb, []int{0}
}
func (m *Maintainer) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Maintainer.Unmarshal(m, b)
}
func (m *Maintainer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Maintainer.Marshal(b, m, deterministic)
}
func (dst *Maintainer) XXX_Merge(src proto.Message) {
xxx_messageInfo_Maintainer.Merge(dst, src)
}
func (m *Maintainer) XXX_Size() int {
return xxx_messageInfo_Maintainer.Size(m)
}
func (m *Maintainer) XXX_DiscardUnknown() {
xxx_messageInfo_Maintainer.DiscardUnknown(m)
}
var xxx_messageInfo_Maintainer proto.InternalMessageInfo
func (m *Maintainer) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Maintainer) GetEmail() string {
if m != nil {
return m.Email
}
return ""
}
func (m *Maintainer) GetUrl() string {
if m != nil {
return m.Url
}
return ""
}
// Metadata for a Chart file. This models the structure of a Chart.yaml file.
//
// Spec: https://k8s.io/helm/blob/master/docs/design/chart_format.md#the-chart-file
type Metadata struct {
// The name of the chart
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// The URL to a relevant project page, git repo, or contact person
Home string `protobuf:"bytes,2,opt,name=home,proto3" json:"home,omitempty"`
// Source is the URL to the source code of this chart
Sources []string `protobuf:"bytes,3,rep,name=sources,proto3" json:"sources,omitempty"`
// A SemVer 2 conformant version string of the chart
Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"`
// A one-sentence description of the chart
Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"`
// A list of string keywords
Keywords []string `protobuf:"bytes,6,rep,name=keywords,proto3" json:"keywords,omitempty"`
// A list of name and URL/email address combinations for the maintainer(s)
Maintainers []*Maintainer `protobuf:"bytes,7,rep,name=maintainers,proto3" json:"maintainers,omitempty"`
// The name of the template engine to use. Defaults to 'gotpl'.
Engine string `protobuf:"bytes,8,opt,name=engine,proto3" json:"engine,omitempty"`
// The URL to an icon file.
Icon string `protobuf:"bytes,9,opt,name=icon,proto3" json:"icon,omitempty"`
// The API Version of this chart.
ApiVersion string `protobuf:"bytes,10,opt,name=apiVersion,proto3" json:"apiVersion,omitempty"`
// The condition to check to enable chart
Condition string `protobuf:"bytes,11,opt,name=condition,proto3" json:"condition,omitempty"`
// The tags to check to enable chart
Tags string `protobuf:"bytes,12,opt,name=tags,proto3" json:"tags,omitempty"`
// The version of the application enclosed inside of this chart.
AppVersion string `protobuf:"bytes,13,opt,name=appVersion,proto3" json:"appVersion,omitempty"`
// Whether or not this chart is deprecated
Deprecated bool `protobuf:"varint,14,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
// TillerVersion is a SemVer constraints on what version of Tiller is required.
// See SemVer ranges here: https://github.com/Masterminds/semver#basic-comparisons
TillerVersion string `protobuf:"bytes,15,opt,name=tillerVersion,proto3" json:"tillerVersion,omitempty"`
// Annotations are additional mappings uninterpreted by Tiller,
// made available for inspection by other applications.
Annotations map[string]string `protobuf:"bytes,16,rep,name=annotations,proto3" json:"annotations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
// KubeVersion is a SemVer constraint specifying the version of Kubernetes required.
KubeVersion string `protobuf:"bytes,17,opt,name=kubeVersion,proto3" json:"kubeVersion,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Metadata) Reset() { *m = Metadata{} }
func (m *Metadata) String() string { return proto.CompactTextString(m) }
func (*Metadata) ProtoMessage() {}
func (*Metadata) Descriptor() ([]byte, []int) {
return fileDescriptor_metadata_d6c714c73a051dcb, []int{1}
}
func (m *Metadata) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Metadata.Unmarshal(m, b)
}
func (m *Metadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Metadata.Marshal(b, m, deterministic)
}
func (dst *Metadata) XXX_Merge(src proto.Message) {
xxx_messageInfo_Metadata.Merge(dst, src)
}
func (m *Metadata) XXX_Size() int {
return xxx_messageInfo_Metadata.Size(m)
}
func (m *Metadata) XXX_DiscardUnknown() {
xxx_messageInfo_Metadata.DiscardUnknown(m)
}
var xxx_messageInfo_Metadata proto.InternalMessageInfo
func (m *Metadata) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Metadata) GetHome() string {
if m != nil {
return m.Home
}
return ""
}
func (m *Metadata) GetSources() []string {
if m != nil {
return m.Sources
}
return nil
}
func (m *Metadata) GetVersion() string {
if m != nil {
return m.Version
}
return ""
}
func (m *Metadata) GetDescription() string {
if m != nil {
return m.Description
}
return ""
}
func (m *Metadata) GetKeywords() []string {
if m != nil {
return m.Keywords
}
return nil
}
func (m *Metadata) GetMaintainers() []*Maintainer {
if m != nil {
return m.Maintainers
}
return nil
}
func (m *Metadata) GetEngine() string {
if m != nil {
return m.Engine
}
return ""
}
func (m *Metadata) GetIcon() string {
if m != nil {
return m.Icon
}
return ""
}
func (m *Metadata) GetApiVersion() string {
if m != nil {
return m.ApiVersion
}
return ""
}
func (m *Metadata) GetCondition() string {
if m != nil {
return m.Condition
}
return ""
}
func (m *Metadata) GetTags() string {
if m != nil {
return m.Tags
}
return ""
}
func (m *Metadata) GetAppVersion() string {
if m != nil {
return m.AppVersion
}
return ""
}
func (m *Metadata) GetDeprecated() bool {
if m != nil {
return m.Deprecated
}
return false
}
func (m *Metadata) GetTillerVersion() string {
if m != nil {
return m.TillerVersion
}
return ""
}
func (m *Metadata) GetAnnotations() map[string]string {
if m != nil {
return m.Annotations
}
return nil
}
func (m *Metadata) GetKubeVersion() string {
if m != nil {
return m.KubeVersion
}
return ""
}
func init() {
proto.RegisterType((*Maintainer)(nil), "hapi.chart.Maintainer")
proto.RegisterType((*Metadata)(nil), "hapi.chart.Metadata")
proto.RegisterMapType((map[string]string)(nil), "hapi.chart.Metadata.AnnotationsEntry")
proto.RegisterEnum("hapi.chart.Metadata_Engine", Metadata_Engine_name, Metadata_Engine_value)
}
func init() { proto.RegisterFile("hapi/chart/metadata.proto", fileDescriptor_metadata_d6c714c73a051dcb) }
var fileDescriptor_metadata_d6c714c73a051dcb = []byte{
// 435 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x52, 0x5d, 0x6b, 0xd4, 0x40,
0x14, 0x35, 0xcd, 0x66, 0x77, 0x73, 0x63, 0x35, 0x0e, 0x52, 0xc6, 0x22, 0x12, 0x16, 0x85, 0x7d,
0xda, 0x82, 0xbe, 0x14, 0x1f, 0x04, 0x85, 0x52, 0x41, 0xbb, 0x95, 0xe0, 0x07, 0xf8, 0x36, 0x4d,
0x2e, 0xdd, 0x61, 0x93, 0x99, 0x30, 0x99, 0xad, 0xec, 0xaf, 0xf0, 0x2f, 0xcb, 0xdc, 0x64, 0x9a,
0xac, 0xf4, 0xed, 0x9e, 0x73, 0x66, 0xce, 0xcc, 0xbd, 0xf7, 0xc0, 0x8b, 0x8d, 0x68, 0xe4, 0x59,
0xb1, 0x11, 0xc6, 0x9e, 0xd5, 0x68, 0x45, 0x29, 0xac, 0x58, 0x35, 0x46, 0x5b, 0xcd, 0xc0, 0x49,
0x2b, 0x92, 0x16, 0x9f, 0x01, 0xae, 0x84, 0x54, 0x56, 0x48, 0x85, 0x86, 0x31, 0x98, 0x28, 0x51,
0x23, 0x0f, 0xb2, 0x60, 0x19, 0xe7, 0x54, 0xb3, 0xe7, 0x10, 0x61, 0x2d, 0x64, 0xc5, 0x8f, 0x88,
0xec, 0x00, 0x4b, 0x21, 0xdc, 0x99, 0x8a, 0x87, 0xc4, 0xb9, 0x72, 0xf1, 0x37, 0x82, 0xf9, 0x55,
0xff, 0xd0, 0x83, 0x46, 0x0c, 0x26, 0x1b, 0x5d, 0x63, 0xef, 0x43, 0x35, 0xe3, 0x30, 0x6b, 0xf5,
0xce, 0x14, 0xd8, 0xf2, 0x30, 0x0b, 0x97, 0x71, 0xee, 0xa1, 0x53, 0xee, 0xd0, 0xb4, 0x52, 0x2b,
0x3e, 0xa1, 0x0b, 0x1e, 0xb2, 0x0c, 0x92, 0x12, 0xdb, 0xc2, 0xc8, 0xc6, 0x3a, 0x35, 0x22, 0x75,
0x4c, 0xb1, 0x53, 0x98, 0x6f, 0x71, 0xff, 0x47, 0x9b, 0xb2, 0xe5, 0x53, 0xb2, 0xbd, 0xc7, 0xec,
0x1c, 0x92, 0xfa, 0xbe, 0xe1, 0x96, 0xcf, 0xb2, 0x70, 0x99, 0xbc, 0x3d, 0x59, 0x0d, 0x23, 0x59,
0x0d, 0xf3, 0xc8, 0xc7, 0x47, 0xd9, 0x09, 0x4c, 0x51, 0xdd, 0x4a, 0x85, 0x7c, 0x4e, 0x4f, 0xf6,
0xc8, 0xf5, 0x25, 0x0b, 0xad, 0x78, 0xdc, 0xf5, 0xe5, 0x6a, 0xf6, 0x0a, 0x40, 0x34, 0xf2, 0x67,
0xdf, 0x00, 0x90, 0x32, 0x62, 0xd8, 0x4b, 0x88, 0x0b, 0xad, 0x4a, 0x49, 0x1d, 0x24, 0x24, 0x0f,
0x84, 0x73, 0xb4, 0xe2, 0xb6, 0xe5, 0x8f, 0x3b, 0x47, 0x57, 0x77, 0x8e, 0x8d, 0x77, 0x3c, 0xf6,
0x8e, 0x9e, 0x71, 0x7a, 0x89, 0x8d, 0xc1, 0x42, 0x58, 0x2c, 0xf9, 0x93, 0x2c, 0x58, 0xce, 0xf3,
0x11, 0xc3, 0x5e, 0xc3, 0xb1, 0x95, 0x55, 0x85, 0xc6, 0x5b, 0x3c, 0x25, 0x8b, 0x43, 0x92, 0x5d,
0x42, 0x22, 0x94, 0xd2, 0x56, 0xb8, 0x7f, 0xb4, 0x3c, 0xa5, 0xe9, 0xbc, 0x39, 0x98, 0x8e, 0xcf,
0xd2, 0xc7, 0xe1, 0xdc, 0x85, 0xb2, 0x66, 0x9f, 0x8f, 0x6f, 0xba, 0x25, 0x6d, 0x77, 0x37, 0xe8,
0x1f, 0x7b, 0xd6, 0x2d, 0x69, 0x44, 0x9d, 0x7e, 0x80, 0xf4, 0x7f, 0x0b, 0x97, 0xaa, 0x2d, 0xee,
0xfb, 0xd4, 0xb8, 0xd2, 0xa5, 0xef, 0x4e, 0x54, 0x3b, 0x9f, 0x9a, 0x0e, 0xbc, 0x3f, 0x3a, 0x0f,
0x16, 0x19, 0x4c, 0x2f, 0xba, 0x05, 0x24, 0x30, 0xfb, 0xb1, 0xfe, 0xb2, 0xbe, 0xfe, 0xb5, 0x4e,
0x1f, 0xb1, 0x18, 0xa2, 0xcb, 0xeb, 0xef, 0xdf, 0xbe, 0xa6, 0xc1, 0xa7, 0xd9, 0xef, 0x88, 0xfe,
0x7c, 0x33, 0xa5, 0xdc, 0xbf, 0xfb, 0x17, 0x00, 0x00, 0xff, 0xff, 0x36, 0xf9, 0x0d, 0xa6, 0x14,
0x03, 0x00, 0x00,
}
| appscode/helm | pkg/proto/hapi/chart/metadata.pb.go | GO | apache-2.0 | 11,564 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.