repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
SumoLogic/epigraph | idea-plugin/src/main/java/ws/epigraph/schema/parser/psi/stubs/SchemaListTypeDefStubElementType.java | 2214 | /*
* Copyright 2016 Sumo Logic
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ws.epigraph.schema.parser.psi.stubs;
import com.intellij.psi.stubs.StubElement;
import com.intellij.psi.stubs.StubInputStream;
import ws.epigraph.lang.Qn;
import ws.epigraph.schema.parser.psi.SchemaListTypeDef;
import ws.epigraph.schema.parser.psi.impl.SchemaListTypeDefImpl;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.util.List;
/**
* @author <a href="mailto:konstantin.sobolev@gmail.com">Konstantin Sobolev</a>
*/
public class SchemaListTypeDefStubElementType extends SchemaTypeDefStubElementTypeBase<SchemaListTypeDefStub, SchemaListTypeDef> {
public SchemaListTypeDefStubElementType(@NotNull @NonNls String debugName) {
super(debugName, "listtypedef");
}
@Override
public SchemaListTypeDef createPsi(@NotNull SchemaListTypeDefStub stub) {
return new SchemaListTypeDefImpl(stub, this);
}
@Override
public SchemaListTypeDefStub createStub(@NotNull SchemaListTypeDef typeDef, StubElement parentStub) {
return new SchemaListTypeDefStubImpl(
parentStub,
typeDef.getName(),
Qn.toNullableString(typeDef.getNamespace()),
getSerializedExtendsTypeRefs(typeDef));
}
@NotNull
@Override
protected SchemaListTypeDefStub deserialize(
@NotNull StubInputStream dataStream,
StubElement parentStub,
String name, String namespace,
@Nullable final List<SerializedFqnTypeRef> extendsTypeRefs) throws IOException {
return new SchemaListTypeDefStubImpl(parentStub, name, namespace, extendsTypeRefs);
}
}
| apache-2.0 |
bovlov/anothervote | common/mahonia/mahoniconv/mahoniconv.go | 707 | package main
import (
"flag"
"github.com/bovlov/anothervote/common/mahonia"
"io"
"log"
"os"
)
// An iconv workalike using mahonia.
var from = flag.String("f", "utf-8", "source character set")
var to = flag.String("t", "utf-8", "destination character set")
func main() {
flag.Parse()
var r io.Reader = os.Stdin
var w io.Writer = os.Stdout
if *from != "utf-8" {
decode := mahonia.NewDecoder(*from)
if decode == nil {
log.Fatalf("Could not create decoder for %s", *from)
}
r = decode.NewReader(r)
}
if *to != "utf-8" {
encode := mahonia.NewEncoder(*to)
if encode == nil {
log.Fatalf("Could not create decoder for %s", *to)
}
w = encode.NewWriter(w)
}
io.Copy(w, r)
}
| apache-2.0 |
flipper83/protohipster | protohipster/src/main/java/com/flipper83/protohipster/globalutils/cache/Cache.java | 255 | package com.flipper83.protohipster.globalutils.cache;
import java.util.List;
/**
* This is a contract for all cache implementations
*/
public interface Cache<T> {
T get(String key);
void put(String key, T value);
List<T> getAllValues();
}
| apache-2.0 |
wartremover/wartremover-contrib | core/src/test/scala/wartremover/contrib/warts/NoNeedForMonadTest.scala | 2795 | package org.wartremover
package contrib.test
import org.wartremover.contrib.warts.NoNeedForMonad
import org.wartremover.test.WartTestTraverser
import org.scalatest.funsuite.AnyFunSuite
class NoNeedForMonadTest extends AnyFunSuite with ResultAssertions {
test("Report cases where Applicative is enough") {
val withWarnings = WartTestTraverser(NoNeedForMonad) {
for {
x <- List(1, 2, 3)
y <- List(2, 3, 4)
} yield x * y
Option(1).flatMap(i => Option(2).map(j => i + j))
}
val noWarnings = WartTestTraverser(NoNeedForMonad) {
for {
x <- List(1, 2, 3)
y <- x to 3
} yield x * y
Option(1).flatMap(i => Option(i + 1).map(j => i + j))
}
assertWarnings(withWarnings)(NoNeedForMonad.message, 2)
assertEmpty(noWarnings)
}
test("Work properly with function literals, eta-expanded functions, objects with apply methods") {
val etaExpanded = WartTestTraverser(NoNeedForMonad) {
def fun(in: Int) = 14
val xs = for {
y <- Nil
x <- Option(3) map fun
} yield x
Option(3).flatMap { case t => Some(t) }
}
val extendsFunction = WartTestTraverser(NoNeedForMonad) {
object test extends Function1[Int, Option[Int]] {
def apply(i: Int) = Option(i + 2)
}
object test2 {
def apply(i: Int) = Option(i + 4)
}
for {
x <- Option(1)
res <- test(x)
} yield res
for {
x <- Option(2)
res <- test2(x)
} yield res
}
assertWarnings(etaExpanded)(NoNeedForMonad.message, 1)
assertEmpty(extendsFunction)
}
test("Handles unapply in for-comprehension") {
val noWarnings = WartTestTraverser(NoNeedForMonad) {
for {
Some(x) <- List(Option(1), Option(2))
(y, z) <- (0 to x).zipWithIndex
(a, _) <- (0 to y).zipWithIndex
} yield x + y * z
}
assertEmpty(noWarnings)
}
test("NoNeedForMonad wart obeys SuppressWarnings") {
val result = WartTestTraverser(NoNeedForMonad) {
@SuppressWarnings(Array("org.wartremover.contrib.warts.NoNeedForMonad"))
val foo = {
for {
x <- List(1, 2, 3)
y <- List(2, 3, 4)
} yield x * y
Option(1).flatMap(i => Option(2).map(j => i + j))
}
}
assertEmpty(result)
}
test("Does not produce false positives in one-level flatMaps") {
val result = WartTestTraverser(NoNeedForMonad) {
case class Group(singles: Seq[Int])
val groups = Seq(Group(Seq(1, 2)), Group(Seq(3, 4)))
groups flatMap (_.singles)
}
assertEmpty(result)
}
test("should not cause MatchError") {
WartTestTraverser(NoNeedForMonad) {
for {
a <- List(1)
b = a
} yield b
}
}
}
| apache-2.0 |
julienledem/redelm | parquet-hadoop/src/main/java/parquet/hadoop/PrintFooter.java | 9085 | /**
* Copyright 2012 Twitter, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package parquet.hadoop;
import static parquet.hadoop.ParquetFileWriter.PARQUET_SUMMARY;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Deque;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.Callable;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.LinkedBlockingDeque;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import parquet.column.ColumnDescriptor;
import parquet.hadoop.metadata.BlockMetaData;
import parquet.hadoop.metadata.ColumnChunkMetaData;
import parquet.hadoop.metadata.ParquetMetadata;
import parquet.schema.MessageType;
/**
* Utility to print footer information
* @author Julien Le Dem
*
*/
public class PrintFooter {
public static void main(String[] args) throws Exception {
if (args.length != 1) {
System.err.println("usage PrintFooter <path>");
return;
}
Path path = new Path(new URI(args[0]));
final Configuration configuration = new Configuration();
final FileSystem fs = path.getFileSystem(configuration);
FileStatus fileStatus = fs.getFileStatus(path);
Path summary = new Path(fileStatus.getPath(), PARQUET_SUMMARY);
if (fileStatus.isDir() && fs.exists(summary)) {
System.out.println("reading summary file");
FileStatus summaryStatus = fs.getFileStatus(summary);
List<Footer> readSummaryFile = ParquetFileReader.readSummaryFile(configuration, summaryStatus);
for (Footer footer : readSummaryFile) {
add(footer.getParquetMetadata());
}
} else {
List<FileStatus> statuses;
if (fileStatus.isDir()) {
System.out.println("listing files in " + fileStatus.getPath());
statuses = Arrays.asList(fs.listStatus(fileStatus.getPath(), new PathFilter() {
@Override
public boolean accept(Path path) {
return !path.getName().startsWith("_");
}
}));
} else {
statuses = new ArrayList<FileStatus>();
statuses.add(fileStatus);
}
System.out.println("opening " + statuses.size() + " files");
int i = 0;
ExecutorService threadPool = Executors.newFixedThreadPool(5);
try {
long t0 = System.currentTimeMillis();
Deque<Future<ParquetMetadata>> footers = new LinkedBlockingDeque<Future<ParquetMetadata>>();
for (final FileStatus currentFile : statuses) {
footers.add(threadPool.submit(new Callable<ParquetMetadata>() {
@Override
public ParquetMetadata call() throws Exception {
try {
ParquetMetadata footer = ParquetFileReader.readFooter(configuration, currentFile);
return footer;
} catch (Exception e) {
throw new RuntimeException("could not read footer", e);
}
}
}));
}
int previousPercent = 0;
int n = 60;
System.out.print("0% [");
for (int j = 0; j < n; j++) {
System.out.print(" ");
}
System.out.print("] 100%");
for (int j = 0; j < n + 6; j++) {
System.out.print('\b');
}
while (!footers.isEmpty()) {
Future<ParquetMetadata> futureFooter = footers.removeFirst();
if (!futureFooter.isDone()) {
footers.addLast(futureFooter);
continue;
}
ParquetMetadata footer = futureFooter.get();
int currentPercent = (++i * n / statuses.size());
while (currentPercent > previousPercent) {
System.out.print("*");
previousPercent ++;
}
add(footer);
}
System.out.println("");
long t1 = System.currentTimeMillis();
System.out.println("read all footers in " + (t1 - t0) + " ms");
} finally {
threadPool.shutdownNow();
}
}
Set<Entry<ColumnDescriptor, ColStats>> entries = stats.entrySet();
long total = 0;
long totalUnc = 0;
for (Entry<ColumnDescriptor, ColStats> entry : entries) {
ColStats colStats = entry.getValue();
total += colStats.allStats.total;
totalUnc += colStats.uncStats.total;
}
for (Entry<ColumnDescriptor, ColStats> entry : entries) {
ColStats colStats = entry.getValue();
System.out.println(entry.getKey() +" " + percent(colStats.allStats.total, total) + "% of all space " + colStats);
}
System.out.println("number of blocks: " + blockCount);
System.out.println("total data size: " + humanReadable(total) + " (raw " + humanReadable(totalUnc) + ")");
System.out.println("total record: " + humanReadable(recordCount));
System.out.println("average block size: " + humanReadable(total/blockCount) + " (raw " + humanReadable(totalUnc/blockCount) + ")");
System.out.println("average record count: " + humanReadable(recordCount/blockCount));
}
private static void add(ParquetMetadata footer) {
for (BlockMetaData blockMetaData : footer.getBlocks()) {
++ blockCount;
MessageType schema = footer.getFileMetaData().getSchema();
recordCount += blockMetaData.getRowCount();
List<ColumnChunkMetaData> columns = blockMetaData.getColumns();
for (ColumnChunkMetaData columnMetaData : columns) {
ColumnDescriptor desc = schema.getColumnDescription(columnMetaData.getPath());
add(
desc,
columnMetaData.getValueCount(),
columnMetaData.getTotalSize(),
columnMetaData.getTotalUncompressedSize());
}
}
}
private static void printTotalString(String message, long total, long totalUnc) {
System.out.println("total "+message+": " + humanReadable(total) + " (raw "+humanReadable(totalUnc)+" saved "+percentComp(totalUnc, total)+"%)");
}
private static float percentComp(long raw, long compressed) {
return percent(raw - compressed, raw);
}
private static float percent(long numerator, long denominator) {
return ((float)((numerator)*1000/denominator))/10;
}
private static String humanReadable(long size) {
if (size < 1000) {
return String.valueOf(size);
}
long currentSize = size;
long previousSize = size * 1000;
int count = 0;
String[] unit = {"", "K", "M", "G", "T", "P"};
while (currentSize >= 1000) {
previousSize = currentSize;
currentSize = currentSize / 1000;
++ count;
}
return ((float)previousSize/1000) + unit[count];
}
private static Map<ColumnDescriptor, ColStats> stats = new LinkedHashMap<ColumnDescriptor, ColStats>();
private static int blockCount = 0;
private static long recordCount = 0;
private static class Stats {
long min = Long.MAX_VALUE;
long max = Long.MIN_VALUE;
long total = 0;
public void add(long length) {
min = Math.min(length, min);
max = Math.max(length, max);
total += length;
}
public String toString(int blocks) {
return
"min: " + humanReadable(min) +
" max: " + humanReadable(max) +
" average: " + humanReadable(total/blocks) +
" total: " + humanReadable(total);
}
}
private static class ColStats {
Stats valueCountStats = new Stats();
Stats allStats = new Stats();
Stats uncStats = new Stats();
int blocks = 0;
public void add(long valueCount, long size, long uncSize) {
++blocks;
valueCountStats.add(valueCount);
allStats.add(size);
uncStats.add(uncSize);
}
@Override
public String toString() {
long raw = uncStats.total;
long compressed = allStats.total;
return allStats.toString(blocks) + " (raw data: " + humanReadable(raw) + (raw == 0 ? "" : " saving " + (raw - compressed)*100/raw + "%") + ")\n"
+ " values: "+valueCountStats.toString(blocks) + "\n"
+ " uncompressed: "+uncStats.toString(blocks);
}
}
private static void add(ColumnDescriptor desc, long valueCount, long size, long uncSize) {
ColStats colStats = stats.get(desc);
if (colStats == null) {
colStats = new ColStats();
stats.put(desc, colStats);
}
colStats.add(valueCount, size, uncSize);
}
}
| apache-2.0 |
IHTSDO/snow-owl | dependencies/org.eclipse.emf.cdo/src/org/eclipse/emf/internal/cdo/session/remote/CDORemoteSessionManagerImpl.java | 9952 | /*
* Copyright (c) 2004 - 2012 Eike Stepper (Berlin, Germany) and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Eike Stepper - initial API and implementation
*/
package org.eclipse.emf.internal.cdo.session.remote;
import org.eclipse.emf.cdo.session.remote.CDORemoteSession;
import org.eclipse.emf.cdo.session.remote.CDORemoteSessionEvent;
import org.eclipse.emf.cdo.session.remote.CDORemoteSessionManager;
import org.eclipse.emf.cdo.session.remote.CDORemoteSessionMessage;
import org.eclipse.net4j.util.collection.ArrayIterator;
import org.eclipse.net4j.util.container.Container;
import org.eclipse.net4j.util.container.ContainerEvent;
import org.eclipse.net4j.util.container.IContainerDelta;
import org.eclipse.net4j.util.event.Event;
import org.eclipse.net4j.util.event.IEvent;
import org.eclipse.emf.spi.cdo.InternalCDORemoteSession;
import org.eclipse.emf.spi.cdo.InternalCDORemoteSessionManager;
import org.eclipse.emf.spi.cdo.InternalCDOSession;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* @author Eike Stepper
*/
public class CDORemoteSessionManagerImpl extends Container<CDORemoteSession> implements InternalCDORemoteSessionManager
{
private InternalCDOSession localSession;
private boolean forceSubscription;
private boolean subscribed;
private Map<Integer, CDORemoteSession> remoteSessions = new HashMap<Integer, CDORemoteSession>();
public CDORemoteSessionManagerImpl()
{
}
public InternalCDOSession getLocalSession()
{
return localSession;
}
public void setLocalSession(InternalCDOSession localSession)
{
this.localSession = localSession;
}
public CDORemoteSession[] getRemoteSessions()
{
Collection<CDORemoteSession> remoteSessions;
synchronized (this)
{
if (subscribed)
{
remoteSessions = this.remoteSessions.values();
}
else
{
remoteSessions = localSession.getSessionProtocol().getRemoteSessions(this, false);
}
}
return remoteSessions.toArray(new CDORemoteSession[remoteSessions.size()]);
}
public CDORemoteSession[] getElements()
{
return getRemoteSessions();
}
public boolean isSubscribed()
{
synchronized (this)
{
return subscribed;
}
}
public boolean isForceSubscription()
{
synchronized (this)
{
return forceSubscription;
}
}
public void setForceSubscription(boolean forceSubscription)
{
IEvent[] events = null;
synchronized (this)
{
this.forceSubscription = forceSubscription;
if (forceSubscription)
{
if (!subscribed)
{
events = subscribe();
}
}
else
{
if (!hasListeners())
{
events = unsubscribe();
}
}
}
fireEvents(events);
}
public Set<CDORemoteSession> sendMessage(CDORemoteSessionMessage message, CDORemoteSession... recipients)
{
return sendMessage(message, new ArrayIterator<CDORemoteSession>(recipients));
}
public Set<CDORemoteSession> sendMessage(CDORemoteSessionMessage message, Collection<CDORemoteSession> recipients)
{
return sendMessage(message, recipients.iterator());
}
private Set<CDORemoteSession> sendMessage(CDORemoteSessionMessage message, Iterator<CDORemoteSession> recipients)
{
List<CDORemoteSession> subscribed = new ArrayList<CDORemoteSession>();
while (recipients.hasNext())
{
CDORemoteSession recipient = recipients.next();
if (recipient.isSubscribed())
{
subscribed.add(recipient);
}
}
if (subscribed.isEmpty())
{
return Collections.emptySet();
}
Set<Integer> sessionIDs = localSession.getSessionProtocol().sendRemoteMessage(message, subscribed);
Set<CDORemoteSession> result = new HashSet<CDORemoteSession>();
for (CDORemoteSession recipient : subscribed)
{
if (sessionIDs.contains(recipient.getSessionID()))
{
result.add(recipient);
}
}
return result;
}
public InternalCDORemoteSession createRemoteSession(int sessionID, String userID, boolean subscribed)
{
InternalCDORemoteSession remoteSession = new CDORemoteSessionImpl(this, sessionID, userID);
remoteSession.setSubscribed(subscribed);
return remoteSession;
}
public void handleRemoteSessionOpened(int sessionID, String userID)
{
CDORemoteSession remoteSession = createRemoteSession(sessionID, userID, false);
synchronized (this)
{
remoteSessions.put(sessionID, remoteSession);
}
fireElementAddedEvent(remoteSession);
}
public void handleRemoteSessionClosed(int sessionID)
{
CDORemoteSession remoteSession = null;
synchronized (this)
{
remoteSession = remoteSessions.remove(sessionID);
}
if (remoteSession != null)
{
fireElementRemovedEvent(remoteSession);
}
}
public void handleRemoteSessionSubscribed(int sessionID, boolean subscribed)
{
IEvent event = null;
synchronized (this)
{
InternalCDORemoteSession remoteSession = (InternalCDORemoteSession)remoteSessions.get(sessionID);
if (remoteSession != null)
{
remoteSession.setSubscribed(subscribed);
event = new SubscriptionChangedEventImpl(remoteSession, subscribed);
}
}
if (event != null)
{
fireEvent(event);
}
}
public void handleRemoteSessionMessage(int sessionID, final CDORemoteSessionMessage message)
{
IEvent event = null;
synchronized (this)
{
final CDORemoteSessionManager source = this;
final InternalCDORemoteSession remoteSession = (InternalCDORemoteSession)remoteSessions.get(sessionID);
if (remoteSession != null)
{
event = new CDORemoteSessionEvent.MessageReceived()
{
public CDORemoteSessionManager getSource()
{
return source;
}
public CDORemoteSession getRemoteSession()
{
return remoteSession;
}
public CDORemoteSessionMessage getMessage()
{
return message;
}
};
}
}
if (event != null)
{
fireEvent(event);
}
}
@Override
protected void firstListenerAdded()
{
IEvent[] events = null;
synchronized (this)
{
if (!subscribed)
{
events = subscribe();
}
}
fireEvents(events);
}
@Override
protected void lastListenerRemoved()
{
IEvent[] events = null;
synchronized (this)
{
if (!forceSubscription)
{
events = unsubscribe();
}
}
fireEvents(events);
}
/**
* Needs to be synchronized externally.
*/
private IEvent[] subscribe()
{
List<CDORemoteSession> result = localSession.getSessionProtocol().getRemoteSessions(this, true);
ContainerEvent<CDORemoteSession> event = new ContainerEvent<CDORemoteSession>(this);
for (CDORemoteSession remoteSession : result)
{
remoteSessions.put(remoteSession.getSessionID(), remoteSession);
event.addDelta(remoteSession, IContainerDelta.Kind.ADDED);
}
subscribed = true;
IEvent[] events = { new LocalSubscriptionChangedEventImpl(true), event.isEmpty() ? null : event };
return events;
}
/**
* Needs to be synchronized externally.
*/
private IEvent[] unsubscribe()
{
localSession.getSessionProtocol().unsubscribeRemoteSessions();
ContainerEvent<CDORemoteSession> event = new ContainerEvent<CDORemoteSession>(this);
for (CDORemoteSession remoteSession : remoteSessions.values())
{
event.addDelta(remoteSession, IContainerDelta.Kind.REMOVED);
}
remoteSessions.clear();
subscribed = false;
IEvent[] events = { new LocalSubscriptionChangedEventImpl(false), event.isEmpty() ? null : event };
return events;
}
private void fireEvents(IEvent[] events)
{
if (events != null)
{
for (int i = 0; i < events.length; i++)
{
IEvent event = events[i];
if (event != null)
{
fireEvent(event);
}
}
}
}
/**
* @author Eike Stepper
*/
private final class LocalSubscriptionChangedEventImpl extends Event implements LocalSubscriptionChangedEvent
{
private static final long serialVersionUID = 1L;
private boolean subscribed;
public LocalSubscriptionChangedEventImpl(boolean subscribed)
{
super(CDORemoteSessionManagerImpl.this);
this.subscribed = subscribed;
}
@Override
public CDORemoteSessionManager getSource()
{
return (CDORemoteSessionManager)super.getSource();
}
public boolean isSubscribed()
{
return subscribed;
}
}
/**
* @author Eike Stepper
*/
private final class SubscriptionChangedEventImpl extends Event implements CDORemoteSessionEvent.SubscriptionChanged
{
private static final long serialVersionUID = 1L;
private InternalCDORemoteSession remoteSession;
private boolean subscribed;
public SubscriptionChangedEventImpl(InternalCDORemoteSession remoteSession, boolean subscribed)
{
super(CDORemoteSessionManagerImpl.this);
this.remoteSession = remoteSession;
this.subscribed = subscribed;
}
@Override
public CDORemoteSessionManager getSource()
{
return (CDORemoteSessionManager)super.getSource();
}
public CDORemoteSession getRemoteSession()
{
return remoteSession;
}
public boolean isSubscribed()
{
return subscribed;
}
}
}
| apache-2.0 |
airingursb/two-life | src/components/react-native-calendars/src/calendar-list/item.js | 1875 | import React, {Component} from 'react';
import {Text, View} from 'react-native';
import Calendar from '../calendar';
import styleConstructor from './style';
class CalendarListItem extends Component {
constructor(props) {
super(props);
this.style = styleConstructor(props.theme);
}
shouldComponentUpdate(nextProps) {
const r1 = this.props.item;
const r2 = nextProps.item;
return r1.toString('yyyy MM') !== r2.toString('yyyy MM') || !!(r2.propbump && r2.propbump !== r1.propbump);
}
render() {
const row = this.props.item;
if (row.getTime) {
return (
<Calendar
theme={this.props.theme}
style={[{height: this.props.calendarHeight, width: this.props.calendarWidth}, this.style.calendar]}
current={row}
hideArrows
hideExtraDays={this.props.hideExtraDays === undefined ? true : this.props.hideExtraDays}
disableMonthChange
markedDates={this.props.markedDates}
markingType={this.props.markingType}
hideDayNames={this.props.hideDayNames}
onDayPress={this.props.onDayPress}
displayLoadingIndicator={this.props.displayLoadingIndicator}
minDate={this.props.minDate}
maxDate={this.props.maxDate}
firstDay={this.props.firstDay}
monthFormat={this.props.monthFormat}
dayComponent={this.props.dayComponent}
disabledByDefault={this.props.disabledByDefault}
showWeekNumbers={this.props.showWeekNumbers}
/>);
} else {
const text = row.toString();
return (
<View style={[{height: this.props.calendarHeight, width: this.props.calendarWidth}, this.style.placeholder]}>
<Text allowFontScaling={false} style={this.style.placeholderText}>{text}</Text>
</View>
);
}
}
}
export default CalendarListItem;
| apache-2.0 |
ktakagaki/breeze | src/main/scala/breeze/linalg/DenseVectorOps.scala | 7599 | package breeze.linalg
import operators._
import breeze.generic._
import breeze.linalg.support._
import breeze.math.{Complex, TensorSpace, Semiring, Ring}
import breeze.util.{ArrayUtil, Isomorphism}
import breeze.storage.DefaultArrayValue
import scala.reflect.ClassTag
import com.github.fommil.netlib.BLAS.{getInstance => blas}
import com.github.fommil.netlib.LAPACK.{getInstance => lapack}
import breeze.macros.expand
import breeze.numerics.IntMath
import scala.math.BigInt
trait DenseVectorOps extends DenseVector_GenericOps { this: DenseVector.type =>
import breeze.math.PowImplicits._
@expand
@expand.exclude(Complex, OpMod)
@expand.exclude(BigInt, OpPow)
@expand.valify
implicit def dv_dv_Op[@expand.args(Int, Double, Float, Long, BigInt, Complex) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _}, {_ * _}, {_ / _}, {(a,b) => b}, {_ % _}, {_ pow _})
op: BinaryOp[T, T, Op, T]):BinaryOp[DenseVector[T], DenseVector[T], Op, DenseVector[T]] = {
new BinaryOp[DenseVector[T], DenseVector[T], Op, DenseVector[T]] {
def apply(a: DenseVector[T], b: DenseVector[T]): DenseVector[T] = {
val ad = a.data
val bd = b.data
var aoff = a.offset
var boff = b.offset
val result = DenseVector.zeros[T](a.length)
val rd = result.data
var i = 0
while(i < a.length) {
rd(i) = op(ad(aoff), bd(boff))
aoff += a.stride
boff += b.stride
i += 1
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], Op, Vector[T]]].register(this)
}
}
@expand
@expand.valify
@expand.exclude(Complex, OpMod)
@expand.exclude(BigInt, OpPow)
implicit def dv_v_Op[@expand.args(Int, Double, Float, Long, BigInt, Complex) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _}, {_ * _}, {_ / _}, {(a,b) => b}, {_ % _}, {_ pow _})
op: BinaryOp[T, T, Op, T]):BinaryOp[DenseVector[T], Vector[T], Op, DenseVector[T]] = new BinaryOp[DenseVector[T], Vector[T], Op, DenseVector[T]] {
def apply(a: DenseVector[T], b: Vector[T]): DenseVector[T] = {
val ad = a.data
var aoff = a.offset
val result = DenseVector.zeros[T](a.length)
val rd = result.data
var i = 0
while(i < a.length) {
rd(i) = op(ad(aoff), b(i))
aoff += a.stride
i += 1
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], Op, Vector[T]]].register(this)
}
@expand
@expand.valify
@expand.exclude(Complex, OpMod)
@expand.exclude(BigInt, OpPow)
implicit def dv_s_Op[@expand.args(Int, Double, Float, Long, BigInt, Complex) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpMulMatrix, OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _}, {_ * _}, {_ * _}, {_ / _}, {(a,b) => b}, {_ % _}, {_ pow _})
op: BinaryOp[T, T, Op, T]):BinaryOp[DenseVector[T], T, Op, DenseVector[T]] = new BinaryOp[DenseVector[T], T, Op, DenseVector[T]] {
def apply(a: DenseVector[T], b: T): DenseVector[T] = {
val ad = a.data
var aoff = a.offset
val result = DenseVector.zeros[T](a.length)
val rd = result.data
var i = 0
while(i < a.length) {
rd(i) = op(ad(aoff), b)
aoff += a.stride
i += 1
}
result
}
implicitly[BinaryRegistry[Vector[T], T, Op, Vector[T]]].register(this)
}
@expand
@expand.valify
@expand.exclude(Complex, OpMod)
@expand.exclude(BigInt, OpPow)
implicit def dv_dv_UpdateOp[@expand.args(Int, Double, Float, Long, BigInt, Complex) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpDiv, OpSet, OpMod, OpPow) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _}, {_ * _}, {_ / _}, {(a,b) => b}, {_ % _}, {_ pow _})
op: BinaryOp[T, T, Op, T]):BinaryUpdateOp[DenseVector[T], DenseVector[T], Op] = new BinaryUpdateOp[DenseVector[T], DenseVector[T], Op] {
def apply(a: DenseVector[T], b: DenseVector[T]):Unit = {
val ad = a.data
val bd = b.data
var aoff = a.offset
var boff = b.offset
var i = 0
while(i < a.length) {
ad(aoff) = op(ad(aoff), bd(boff))
aoff += a.stride
boff += b.stride
i += 1
}
}
implicitly[BinaryUpdateRegistry[Vector[T], Vector[T], Op]].register(this)
}
@expand
@expand.valify
@expand.exclude(Complex, OpMod)
@expand.exclude(BigInt, OpPow)
implicit def dv_s_UpdateOp[@expand.args(Int, Double, Float, Long, BigInt, Complex) T,
@expand.args(OpAdd, OpSub, OpMulScalar, OpMulMatrix, OpDiv, OpSet, OpMod) Op <: OpType]
(implicit @expand.sequence[Op]({_ + _}, {_ - _}, {_ * _}, {_ * _}, {_ / _}, {(a,b) => b}, {_ % _})
op: BinaryOp[T, T, Op, T]):BinaryUpdateOp[DenseVector[T], T, Op] = new BinaryUpdateOp[DenseVector[T], T, Op] {
def apply(a: DenseVector[T], b: T):Unit = {
val ad = a.data
var aoff = a.offset
var i = 0
while(i < a.length) {
ad(aoff) = op(ad(aoff), b)
aoff += a.stride
i += 1
}
implicitly[BinaryUpdateRegistry[Vector[T], T, Op]].register(this)
}
}
@expand
@expand.valify
implicit def canDot_DV_DV[@expand.args(Int, Long, BigInt, Complex) T](implicit @expand.sequence[T](0, 0l, BigInt(0), Complex.zero) zero: T): BinaryOp[DenseVector[T], DenseVector[T], breeze.linalg.operators.OpMulInner, T] = {
new BinaryOp[DenseVector[T], DenseVector[T], breeze.linalg.operators.OpMulInner, T] {
def apply(a: DenseVector[T], b: DenseVector[T]) = {
require(b.length == a.length, "Vectors must be the same length!")
val ad = a.data
val bd = b.data
var aoff = a.offset
var boff = b.offset
var result : T = zero
var i = 0
while(i < a.length) {
result += ad(aoff) * bd(boff)
aoff += a.stride
boff += b.stride
i += 1
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], OpMulInner, T]].register(this)
}
}
@expand
@expand.valify
implicit def canDot_DV_V[@expand.args(Int, Double, Float, Long, BigInt, Complex) T](implicit @expand.sequence[T](0, 0.0, 0.0f, 0l, BigInt(0), Complex.zero) zero: T): BinaryOp[DenseVector[T], Vector[T], breeze.linalg.operators.OpMulInner, T] = {
new BinaryOp[DenseVector[T], Vector[T], breeze.linalg.operators.OpMulInner, T] {
def apply(a: DenseVector[T], b: Vector[T]) = {
require(b.length == a.length, "Vectors must be the same length!")
val ad = a.data
var aoff = a.offset
var result : T = zero
var i = 0
while(i < a.length) {
result += ad(aoff) * b(i)
aoff += a.stride
i += 1
}
result
}
implicitly[BinaryRegistry[Vector[T], Vector[T], OpMulInner, T]].register(this)
}
}
@expand
implicit def axpy[@expand.args(Int, Double, Float, Long, BigInt, Complex) V]: CanAxpy[V, DenseVector[V], DenseVector[V]] = {
new CanAxpy[V, DenseVector[V], DenseVector[V]] {
def apply(s: V, b: DenseVector[V], a: DenseVector[V]) {
require(b.length == a.length, "Vectors must be the same length!")
val ad = a.data
val bd = b.data
var aoff = a.offset
var boff = b.offset
var i = 0
while(i < a.length) {
ad(aoff) += s * bd(boff)
aoff += a.stride
boff += b.stride
i += 1
}
}
}
}
} | apache-2.0 |
abcodeworks/WebShortcutUtil-Java | src/main/java/com/abcodeworks/webshortcututil/write/WeblocBinaryShortcutWriter.java | 1573 | /**
* Copyright 2014 by Andre Beckus
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.abcodeworks.webshortcututil.write;
import java.io.IOException;
import java.io.OutputStream;
import com.dd.plist.NSObject;
import com.dd.plist.PropertyListParser;
/**
* Writes Webloc Binary shortcuts (Apple/Mac/OSX shortcuts with a .webloc extension).
*
*/
public class WeblocBinaryShortcutWriter extends WeblocShortcutWriter {
@Override
protected void writePlist(OutputStream stream, NSObject root)
throws ShortcutWriteException {
try {
// Note that the stream is automatically closed by the PropertyListParser
PropertyListParser.saveAsBinary(root, stream);
} catch (IOException e) {
throw new ShortcutWriteException(e);
}
}
}
| apache-2.0 |
RasaHQ/rasa_core | rasa/cli/train.py | 4908 | import argparse
import tempfile
import typing
from typing import List, Optional, Text
from rasa.cli.default_arguments import (
add_config_param, add_domain_param, add_nlu_data_param, add_stories_param)
from rasa.cli.utils import get_validated_path
from rasa.constants import (
DEFAULT_CONFIG_PATH, DEFAULT_DATA_PATH, DEFAULT_DOMAIN_PATH,
DEFAULT_MODELS_PATH)
if typing.TYPE_CHECKING:
from rasa_nlu.model import Interpreter
# noinspection PyProtectedMember
def add_subparser(subparsers: argparse._SubParsersAction,
parents: List[argparse.ArgumentParser]):
import rasa.core.cli.train as core_cli
train_parser = subparsers.add_parser(
"train", help="Train the Rasa bot")
train_subparsers = train_parser.add_subparsers()
train_core_parser = train_subparsers.add_parser(
"core",
conflict_handler="resolve",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="Train Rasa Core")
train_core_parser.set_defaults(func=train_core)
train_nlu_parser = train_subparsers.add_parser(
"nlu",
parents=parents,
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
help="Train Rasa NLU")
train_nlu_parser.set_defaults(func=train_nlu)
for p in [train_parser, train_core_parser, train_nlu_parser]:
add_general_arguments(p)
for p in [train_core_parser, train_parser]:
add_domain_param(p)
core_cli.add_general_args(p)
add_stories_param(train_core_parser)
_add_core_compare_arguments(train_core_parser)
add_nlu_data_param(train_nlu_parser)
add_joint_parser_arguments(train_parser)
train_parser.set_defaults(func=train)
def add_joint_parser_arguments(parser: argparse.ArgumentParser):
parser.add_argument("--force", action="store_true",
help="Force a model training even if the data "
"has not changed.")
parser.add_argument("--data", default=[DEFAULT_DATA_PATH],
nargs='+',
help="Paths to the Core and NLU training files.")
def add_general_arguments(parser: argparse.ArgumentParser):
add_config_param(parser)
parser.add_argument(
"-o", "--out",
type=str,
default=DEFAULT_MODELS_PATH,
help="Directory where your models are stored")
def _add_core_compare_arguments(parser: argparse.ArgumentParser):
parser.add_argument(
"--percentages",
nargs="*",
type=int,
default=[0, 5, 25, 50, 70, 90, 95],
help="Range of exclusion percentages")
parser.add_argument(
"--runs",
type=int,
default=3,
help="Number of runs for experiments")
parser.add_argument(
"-c", "--config",
nargs='+',
default=[DEFAULT_CONFIG_PATH],
help="The policy and NLU pipeline configuration of your bot."
"If multiple configuration files are provided, multiple dialogue "
"models are trained to compare policies.")
def train(args: argparse.Namespace) -> Optional[Text]:
import rasa
domain = get_validated_path(args.domain, "domain", DEFAULT_DOMAIN_PATH)
config = get_validated_path(args.config, "config", DEFAULT_CONFIG_PATH)
training_files = [get_validated_path(f, "data", DEFAULT_DATA_PATH)
for f in args.data]
return rasa.train(domain, config, training_files, args.out, args.force)
def train_core(args: argparse.Namespace,
train_path: Optional[Text] = None) -> Optional[Text]:
from rasa.train import train_core
import asyncio
loop = asyncio.get_event_loop()
output = train_path or args.out
args.domain = get_validated_path(args.domain, "domain", DEFAULT_DOMAIN_PATH)
stories = get_validated_path(args.stories, "stories", DEFAULT_DATA_PATH)
_train_path = train_path or tempfile.mkdtemp()
# Policies might be a list for the compare training. Do normal training
# if only list item was passed.
if not isinstance(args.config, list) or len(args.config) == 1:
if isinstance(args.config, list):
args.config = args.config[0]
config = get_validated_path(args.config, "config", DEFAULT_CONFIG_PATH)
return train_core(args.domain, config, stories, output, train_path)
else:
from rasa.core.train import do_compare_training
loop.run_until_complete(do_compare_training(args, stories, None))
return None
def train_nlu(args: argparse.Namespace, train_path: Optional[Text] = None
) -> Optional["Interpreter"]:
from rasa.train import train_nlu
output = train_path or args.out
config = get_validated_path(args.config, "config", DEFAULT_CONFIG_PATH)
nlu_data = get_validated_path(args.nlu, "nlu", DEFAULT_DATA_PATH)
return train_nlu(config, nlu_data, output, train_path)
| apache-2.0 |
yannstad/minicluster | src/main/java/com/minicluster/cluster/node/Nodes.java | 176 | package com.minicluster.cluster.node;
/**
* Factory
*/
public class Nodes {
public static Node newTcpNode(Integer nodeId) {
return new NodeTcp(nodeId);
}
}
| apache-2.0 |
jhwhetstone/cdsWebserver | directoryServer/src/main/java/org/pesc/api/repository/UserRepository.java | 1386 | /*
* Copyright (c) 2017. California Community Colleges Technology Center
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.pesc.api.repository;
import org.pesc.api.model.DirectoryUser;
import org.springframework.data.jpa.repository.JpaSpecificationExecutor;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
import java.util.List;
/**
* Created by James Whetstone (jwhetstone@ccctechcenter.org) on 3/21/16.
*/
@Repository
public interface UserRepository extends CrudRepository<DirectoryUser, Integer>, JpaSpecificationExecutor {
@Query("from DirectoryUser where name = ?1")
List<DirectoryUser> findByName(String name);
@Query("from DirectoryUser where username = ?1")
List<DirectoryUser> findByUserName(String username);
}
| apache-2.0 |
chr78rm/tracelogger | src/main/java/de/christofreichardt/diagnosis/ThreadMap.java | 3883 | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package de.christofreichardt.diagnosis;
import java.util.NoSuchElementException;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* This class maps threads on {@link TracingContext}s. It provides methods to set, access and remove the {@link TracingContext}
* of the current thread. Furthermore to be monitored methods can be pushed on or popped from the method stack of the current
* {@link TracingContext}. For internal use primarily.
*
* @author Christof Reichardt
*/
public class ThreadMap extends AbstractThreadMap {
// /**
// * Indicates a corruption of the method stack.
// */
// static public class RuntimeException extends java.lang.RuntimeException {
//
// public RuntimeException(String msg) {
// super(msg);
// }
//
// public RuntimeException(Throwable cause) {
// super(cause);
// }
// }
//
// /** denotes the maximal number of traced methods on the stack */
// public static final int STACK_SIZE = 50;
/** maps the thread context on thread names */
private final ConcurrentMap<Thread, TracingContext> tracingContextMap = new ConcurrentHashMap<>();
/**
* Returns the stack size of the current thread. The value -1 indicates that the current thread isn't registered.
*
* @return the current stack size
*/
@Override
public int getCurrentStackSize() {
int stackSize = -1;
if (this.tracingContextMap.containsKey(Thread.currentThread()))
stackSize = this.tracingContextMap.get(Thread.currentThread()).getMethodStack().size();
return stackSize;
}
@Override
TracingContext getCurrentTracingContext() { // TODO: rewrite this using Map.get()
TracingContext tracingContext = null;
if (this.tracingContextMap.containsKey(Thread.currentThread()))
tracingContext = this.tracingContextMap.get(Thread.currentThread());
return tracingContext;
}
@Override
void setCurrentTracingContext(TracingContext tracingContext) {
this.tracingContextMap.put(Thread.currentThread(), tracingContext);
}
@Override
TracingContext removeCurrentTracingContext() {
return this.tracingContextMap.remove(Thread.currentThread());
}
/**
* Pushs a method onto the stack of the current thread.
*
* @param traceMethod the method to be pushed
*/
@Override
boolean push(TraceMethod traceMethod) {
boolean success;
TracingContext currentTracingContext = getCurrentTracingContext();
if (currentTracingContext != null && !currentTracingContext.isCorrupted()) {
if (currentTracingContext.getMethodStack().size() >= STACK_SIZE) {
currentTracingContext.setCorrupted(true);
throw new ThreadMap.RuntimeException("Stacksize is exceeded.");
}
else {
currentTracingContext.getMethodStack().push(traceMethod);
success = true;
}
}
else
success = false;
return success;
}
/**
* Pops a method from the stack of the current thread.
*
* @return the popped method
*/
@Override
TraceMethod pop() {
TraceMethod traceMethod = null;
TracingContext currentTracingContext = getCurrentTracingContext();
if (currentTracingContext != null && !currentTracingContext.isCorrupted()) {
try {
traceMethod = currentTracingContext.getMethodStack().pop();
traceMethod.stopTime();
}
catch (NoSuchElementException ex)
{
currentTracingContext.setCorrupted(true);
throw new ThreadMap.RuntimeException(ex);
}
}
return traceMethod;
}
}
| apache-2.0 |
googleads/google-ads-php | src/Google/Ads/GoogleAds/V9/Services/GetMobileDeviceConstantRequest.php | 2195 | <?php
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v9/services/mobile_device_constant_service.proto
namespace Google\Ads\GoogleAds\V9\Services;
use Google\Protobuf\Internal\GPBType;
use Google\Protobuf\Internal\RepeatedField;
use Google\Protobuf\Internal\GPBUtil;
/**
* Request message for [MobileDeviceConstantService.GetMobileDeviceConstant][google.ads.googleads.v9.services.MobileDeviceConstantService.GetMobileDeviceConstant].
*
* Generated from protobuf message <code>google.ads.googleads.v9.services.GetMobileDeviceConstantRequest</code>
*/
class GetMobileDeviceConstantRequest extends \Google\Protobuf\Internal\Message
{
/**
* Required. Resource name of the mobile device to fetch.
*
* Generated from protobuf field <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = {</code>
*/
protected $resource_name = '';
/**
* Constructor.
*
* @param array $data {
* Optional. Data for populating the Message object.
*
* @type string $resource_name
* Required. Resource name of the mobile device to fetch.
* }
*/
public function __construct($data = NULL) {
\GPBMetadata\Google\Ads\GoogleAds\V9\Services\MobileDeviceConstantService::initOnce();
parent::__construct($data);
}
/**
* Required. Resource name of the mobile device to fetch.
*
* Generated from protobuf field <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = {</code>
* @return string
*/
public function getResourceName()
{
return $this->resource_name;
}
/**
* Required. Resource name of the mobile device to fetch.
*
* Generated from protobuf field <code>string resource_name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = {</code>
* @param string $var
* @return $this
*/
public function setResourceName($var)
{
GPBUtil::checkString($var, True);
$this->resource_name = $var;
return $this;
}
}
| apache-2.0 |
jamesagnew/whereismystreetcar | src/ca/wimsc/client/common/select/NearbyStopSelectionPanel.java | 5305 | package ca.wimsc.client.common.select;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import ca.wimsc.client.common.model.NearbyStop;
import ca.wimsc.client.common.util.HistoryUtil;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.user.client.ui.FlexTable;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.Hyperlink;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.RadioButton;
public class NearbyStopSelectionPanel extends FlexTable {
private String myOpenDirectionTag = null;
private String myOpenRouteTag = null;
private Integer myOpenRowIndex = null;
private List<RadioButton> myRadioButtons = new ArrayList<RadioButton>();
private List<NearbyStop> myResults;
public NearbyStopSelectionPanel() {
}
/**
* Sets or updates the results displayed in this table
*/
public void setResults(List<NearbyStop> theResults) {
myResults = new ArrayList<NearbyStop>(theResults);
Collections.sort(myResults, new MyNearbyStopRouteAndDirectionComparator());
while (getRowCount() > 0) {
removeRow(0);
}
int nextRow = 0;
String lastRouteTag = null;
String lastDirTag = null;
myRadioButtons.clear();
for (NearbyStop nearbyStop : myResults) {
boolean sameRoute = nearbyStop.getRouteTag().equals(lastRouteTag);
if (sameRoute) {
if (nearbyStop.getDirectionTag().equals(lastDirTag)) {
continue;
}
}
if (!sameRoute) {
Label routeLabel = new Label(nearbyStop.getRouteTitle());
setWidget(nextRow, 0, routeLabel);
}
lastRouteTag = nearbyStop.getRouteTag();
lastDirTag = nearbyStop.getDirectionTag();
RadioButton radioButton = new RadioButton("routeDir", nearbyStop.getDirectionTitle());
myRadioButtons.add(radioButton);
radioButton.addValueChangeHandler(new MyRadioButtonValueChangeHandler(nextRow, lastRouteTag, lastDirTag));
setWidget(nextRow, 1, radioButton);
if (lastDirTag.equals(myOpenDirectionTag) && lastRouteTag.equals(myOpenRouteTag)) {
radioButton.setValue(true, false);
showResultsRow(nextRow, lastRouteTag, lastDirTag);
nextRow++;
}
nextRow++;
}
}
private final class MyNearbyStopRouteAndDirectionComparator implements Comparator<NearbyStop> {
@Override
public int compare(NearbyStop theO1, NearbyStop theO2) {
int retVal = theO1.getRouteTag().compareTo(theO2.getRouteTag());
if (retVal == 0) {
retVal = theO1.getDirectionTitle().compareTo(theO2.getDirectionTitle());
}
return retVal;
}
}
public class MyRadioButtonValueChangeHandler implements ValueChangeHandler<Boolean> {
private String myDirTag;
private String myRouteTag;
private int myRowIndex;
public MyRadioButtonValueChangeHandler(int theRowIndex, String theRouteTag, String theDirTag) {
myRowIndex = theRowIndex;
myRouteTag = theRouteTag;
myDirTag = theDirTag;
}
@Override
public void onValueChange(ValueChangeEvent<Boolean> theEvent) {
if (theEvent.getValue() != Boolean.TRUE) {
return;
}
for (RadioButton next : myRadioButtons) {
if (next != theEvent.getSource()) {
next.setValue(false, false);
}
}
if (myOpenRowIndex != null) {
removeRow(myOpenRowIndex);
}
int rowIndex = myRowIndex;
String dirTag = myDirTag;
String routeTag = myRouteTag;
showResultsRow(rowIndex, routeTag, dirTag);
}
}
private void showResultsRow(int theRowIndex, String theRouteTag, String theDirTag) {
myOpenRowIndex = theRowIndex + 1;
myOpenDirectionTag = theDirTag;
myOpenRouteTag = theRouteTag;
insertRow(myOpenRowIndex);
getFlexCellFormatter().setColSpan(myOpenRowIndex, 0, 2);
getFlexCellFormatter().setHorizontalAlignment(myOpenRowIndex, 0, HasHorizontalAlignment.ALIGN_CENTER);
FlowPanel panel = new FlowPanel();
setWidget(myOpenRowIndex, 0, panel);
for (NearbyStop next : myResults) {
if (next.getRouteTag().equals(myOpenRouteTag)) {
if (next.getDirectionTag().equals(myOpenDirectionTag)) {
panel.add(new Hyperlink(next.getTitle(), HistoryUtil.getTokenForNewStop(theRouteTag, next.getStopTag())));
}
}
}
}
}
| apache-2.0 |
marcelocataoca/PAMH | WebService/ProjetosExemplo/checkWeatherRest/www/js/controllers.js | 422 | angular.module('app.controllers', [])
.controller('checkWeatherRestCtrl', ['$scope', '$stateParams', // The following is the constructor function for this page's controller. See https://docs.angularjs.org/guide/controller
// You can include any angular dependencies as parameters for this function
// TIP: Access Route Parameters for your page via $stateParams.parameterName
function ($scope, $stateParams) {
}])
| apache-2.0 |
ZuInnoTe/hadoopcryptoledger | inputformat/src/main/java/org/zuinnote/hadoop/ethereum/format/exception/EthereumBlockReadException.java | 824 | /**
* Copyright 2017 ZuInnoTe (Jörn Franke) <zuinnote@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
**/
package org.zuinnote.hadoop.ethereum.format.exception;
/**
*
*/
public class EthereumBlockReadException extends Exception {
public EthereumBlockReadException(String message) {
super(message);
}
}
| apache-2.0 |
android-libraries/android_image_loader | library/src/glide/java/com/albedinsky/android/imageloader/ImageLoaderImpl.java | 3112 | /*
* =================================================================================================
* Copyright (C) 2016 Martin Albedinsky
* =================================================================================================
* Licensed under the Apache License, Version 2.0 or later (further "License" only).
* -------------------------------------------------------------------------------------------------
* You may use this file only in compliance with the License. More details and copy of this License
* you may obtain at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* You can redistribute, modify or publish any part of the code written within this file but as it
* is described in the License, the software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES or CONDITIONS OF ANY KIND.
*
* See the License for the specific language governing permissions and limitations under the License.
* =================================================================================================
*/
package com.albedinsky.android.imageloader;
import android.support.annotation.NonNull;
import com.bumptech.glide.RequestManager;
/**
* A {@link BaseImageLoader} implementation that wraps instance of {@link RequestManager} loader used
* to perform loading for {@link ImageTask ImageTasks}.
*
* @author Martin Albedinsky
*/
final class ImageLoaderImpl extends BaseImageLoader<RequestManager> {
/**
* Interface ===================================================================================
*/
/**
* Constants ===================================================================================
*/
/**
* Log TAG.
*/
// private static final String TAG = "ImageLoader#Glide";
/**
* Static members ==============================================================================
*/
/**
* Members =====================================================================================
*/
/**
* Constructors ================================================================================
*/
/**
* Creates a new instance of ImageLoaderImpl to wrap the given <var>loader</var>.
*
* @param loader The loader to be used to perform images loading.
*/
protected ImageLoaderImpl(@NonNull RequestManager loader) {
super(loader);
}
/**
* Methods =====================================================================================
*/
/**
*/
@Override
public void start() {
mLoader.onStart();
}
/**
*/
@Override
public void resume() {
start();
}
/**
*/
@Override
public void pause() {
stop();
}
/**
*/
@Override
public void stop() {
mLoader.onStop();
}
/**
*/
@Override
public void destroy() {
mLoader.onDestroy();
}
/**
*/
@Override
public void onTrimMemory(int level) {
mLoader.onTrimMemory(level);
}
/**
*/
@Override
public void onLowMemory() {
mLoader.onLowMemory();
}
/**
* Inner classes ===============================================================================
*/
}
| apache-2.0 |
LesserGiraffe/BunnyHop | App/BhDefine/Functions/event/Exp/ColorExpCnct.js | 273 | (function() {
let section = bhReplacedNewNode.findSymbolInDescendants('*');
let sectionName = null;
if (section !== null) {
sectionName = String(section.getSymbolName());
}
return sectionName === 'ColorExpSctn' ||
sectionName === 'AnyExpSctn';
})(); | apache-2.0 |
vadimv/PlatypusJS | designer/PlatypusForms/src/com/bearsoft/org/netbeans/modules/form/editors/IconEditor.java | 11198 | /*
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS HEADER.
*
* Copyright 1997-2010 Oracle and/or its affiliates. All rights reserved.
*
* Oracle and Java are registered trademarks of Oracle and/or its affiliates.
* Other names may be trademarks of their respective owners.
*
* The contents of this file are subject to the terms of either the GNU
* General Public License Version 2 only ("GPL") or the Common
* Development and Distribution License("CDDL") (collectively, the
* "License"). You may not use this file except in compliance with the
* License. You can obtain a copy of the License at
* http://www.netbeans.org/cddl-gplv2.html
* or nbbuild/licenses/CDDL-GPL-2-CP. See the License for the
* specific language governing permissions and limitations under the
* License. When distributing the software, include this License Header
* Notice in each file and include the License file at
* nbbuild/licenses/CDDL-GPL-2-CP. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the GPL Version 2 section of the License file that
* accompanied this code. If applicable, add the following below the
* License Header, with the fields enclosed by brackets [] replaced by
* your own identifying information:
* "Portions Copyrighted [year] [name of copyright owner]"
*
* Contributor(s):
*
* The Original Software is NetBeans. The Initial Developer of the Original
* Software is Sun Microsystems, Inc. Portions Copyright 1997-2006 Sun
* Microsystems, Inc. All Rights Reserved.
*
* If you wish your version of this file to be governed by only the CDDL
* or only the GPL Version 2, indicate your decision by adding
* "[Contributor] elects to include this software in this distribution
* under the [CDDL or GPL Version 2] license." If you do not indicate a
* single choice of license, a recipient has the option to distribute
* your version of this file under either the CDDL, the GPL Version 2 or
* to extend the choice of license to its licensees as provided above.
* However, if you add GPL Version 2 code and therefore, elected the GPL
* Version 2 license, then the option applies only if the new code is
* made subject to such option by the copyright holder.
*/
package com.bearsoft.org.netbeans.modules.form.editors;
import com.bearsoft.org.netbeans.modules.form.FormCookie;
import com.bearsoft.org.netbeans.modules.form.FormModel;
import com.bearsoft.org.netbeans.modules.form.FormProperty;
import com.bearsoft.org.netbeans.modules.form.PlatypusFormDataObject;
import java.awt.Component;
import java.awt.Image;
import java.beans.*;
import java.io.IOException;
import java.net.URL;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.swing.*;
import org.openide.ErrorManager;
import org.openide.explorer.propertysheet.ExPropertyEditor;
import org.openide.explorer.propertysheet.PropertyEnv;
import org.openide.filesystems.FileObject;
import org.openide.nodes.AbstractNode;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.PropertyEditorRegistration;
/**
* PropertyEditor for Icons. Depends on existing DataObject for images. Images
* must be represented by some DataObject which returns itself as cookie, and
* has image file as a primary file. File extensions for images is specified in
* isImage method.
*
* @author Jan Jancura, Jan Stola, Tomas Pavek
*/
@PropertyEditorRegistration(targetType = {javax.swing.Icon.class, java.awt.Image.class, javax.swing.ImageIcon.class})
public class IconEditor extends PropertyEditorSupport implements ExPropertyEditor {
/**
* Type constant for icons from URL.
*/
public static final int TYPE_URL = 1;
/**
* Type constant for icons from file.
*/
public static final int TYPE_FILE = 2;
public static final String RESOURCES_IMAGES_ANCHOR = "thumbs.cp";
//
private static String[] currentFiles;
private PlatypusFormDataObject dataObject;
protected static class ImageNode extends AbstractNode {
protected FileObject imageFile;
protected ImageIcon icon;
public ImageNode(FileObject aImageFile) {
super(Children.LEAF);
imageFile = aImageFile;
}
@Override
public Image getIcon(int type) {
try {
if (icon == null) {
icon = new ImageIcon(imageFile.asBytes());
}
return icon.getImage();
} catch (IOException ex) {
ErrorManager.getDefault().notify(ex);
return null;
}
}
@Override
public String getDisplayName() {
return getName();
}
@Override
public String getName() {
return imageFile.getNameExt();
}
}
protected class FileChildren extends Children.Keys<String> {
public FileChildren() throws Exception {
super();
setKeys(getCurrentFileNames());
}
@Override
protected Node[] createNodes(String key) {
try {
return new Node[]{new ImageNode(getProjectSrcFolder().getFileObject(key))};
} catch (Exception ex) {
return new Node[]{};
}
}
}
@Override
public void attachEnv(PropertyEnv aEnv) {
aEnv.getFeatureDescriptor().setValue("canEditAsText", Boolean.TRUE); // NOI18N
Object bean = aEnv.getBeans()[0];
if (bean instanceof Node) {
Node node = (Node) bean;
FormCookie formCookie = node.getLookup().lookup(FormCookie.class);
if (formCookie != null && aEnv.getFeatureDescriptor() instanceof FormProperty<?>) {
dataObject = formCookie.getFormModel().getDataObject();
//(FormProperty<?>) aEnv.getFeatureDescriptor());
}
}
}
@Override
public void setValue(Object value) {
if (!sameValue(value, getValue())) {
super.setValue(value);
}
}
private static boolean sameValue(Object val1, Object val2) {
if (val1 == null && val2 == null) {
return true;
}
if (val1 instanceof NbImageIcon && val2 instanceof NbImageIcon) {
return sameIcon((NbImageIcon) val1, (NbImageIcon) val2);
}
return false;
}
private static boolean sameIcon(NbImageIcon nbIcon1, NbImageIcon nbIcon2) {
if ((nbIcon1.getName() == null) ? (nbIcon2.getName() != null) : !nbIcon1.getName().equals(nbIcon2.getName())) {
return false;
}
return true;
}
@Override
public String getAsText() {
Object val = getValue();
if (val instanceof NbImageIcon) {
NbImageIcon nbIcon = (NbImageIcon) val;
return nbIcon.getName();
}
return ""; // NOI18N
}
@Override
public void setAsText(String aValue) throws IllegalArgumentException {
try {
setValue(createIconFromText(aValue));
} catch (Exception ex) {
throw new IllegalArgumentException(ex);
}
}
@Override
public String getJavaInitializationString() {
if (getValue() instanceof NbImageIcon) {
NbImageIcon ii = (NbImageIcon) getValue();
return ii.getName();
}
return "null"; // NOI18N
}
@Override
public boolean supportsCustomEditor() {
return true;
}
@Override
public Component getCustomEditor() {
try {
return new CustomIconEditor(this);
} catch (Exception ex) {
ErrorManager.getDefault().notify(ex);
return null;
}
}
public FileObject getProjectSrcFolder() throws Exception {
return dataObject.getProject().getSrcRoot();
}
/*
private Node getCurrentRootNode() throws Exception {
if (rootNode == null) {
rootNode = new AbstractNode(new FileChildren());
}
return rootNode;
}
*/
/**
* @return names of files (without path) available in current folder
*/
public String[] getCurrentFileNames() throws Exception {
if (currentFiles == null) {
FileObject folder = getProjectSrcFolder();
assert folder != null;
List<String> list = new ArrayList<>();
for (FileObject fo : folder.getChildren()) {
if (isImageFile(fo)) {
list.add(fo.getNameExt());
}
}
currentFiles = new String[list.size()];
list.toArray(currentFiles);
Arrays.sort(currentFiles);
}
return currentFiles;
}
static boolean isImageFile(FileObject fo) {
return fo.isFolder() ? false : isImageFileName(fo.getNameExt());
}
static boolean isImageFileName(String name) {
name = name.toLowerCase();
return name.endsWith(".gif") || name.endsWith(".jpg") || name.endsWith(".png") // NOI18N
|| name.endsWith(".jpeg") || name.endsWith(".jpe"); // NOI18N
}
private NbImageIcon createIconFromText(String aIconName) throws Exception {
if (aIconName == null || "".equals(aIconName.trim())) {
return null;
}
NbImageIcon nbIcon = iconFromResourceName(dataObject, aIconName);
if (nbIcon != null) {
return nbIcon;
}
return null;
}
private static final Pattern pattern = Pattern.compile("https?://.*");
public static NbImageIcon iconFromResourceName(PlatypusFormDataObject dataObject, String resName) throws Exception {
if (resName != null && !resName.isEmpty()) {
Matcher htppMatcher = pattern.matcher(resName);
if (htppMatcher.matches()) {
return new NbImageIcon(dataObject, new URL(resName), TYPE_URL, resName);
} else {
FileObject fo = dataObject.getProject().getSrcRoot().getFileObject(resName);
if (fo != null) {
return new NbImageIcon(dataObject, fo.toURL(), TYPE_FILE, resName);
}else{
return null;
}
}
} else {
return null;
}
}
public static class NbImageIcon extends ImageIcon {
private final int type;
/**
* Name of the icon in icon library.
*/
private final String name;
protected PlatypusFormDataObject dataObject;
public NbImageIcon(PlatypusFormDataObject aDataObject, URL aURL, int aType, String aName) {
super(aURL);
dataObject = aDataObject;
type = aType;
name = aName;
}
public int getType() {
return type;
}
public String getName() {
return name;
}
@Override
public Image getImage() {
return super.getImage();
}
public NbImageIcon copy() throws Exception {
return iconFromResourceName(dataObject, name);
}
}
}
| apache-2.0 |
manishshanker/facetedSearch | test/unit/karma.conf.js | 2664 | // Karma configuration
// Generated on Thu Dec 12 2013 13:03:40 GMT+0000 (GMT)
module.exports = function (config) {
config.set({
// base path, that will be used to resolve files and exclude
basePath: '',
// frameworks to use
frameworks: ['jasmine'],
// list of files / patterns to load in the browser
files: [
'**/*test.js',
'../../src/lib/js/vendor/jquery.min.js',
'../../src/lib/js/vendor/handlebars-v1.1.2.js',
'../../src/lib/js/vendor/Mettle.min.js',
'../../src/lib/js/vendor/**/*.js',
'../../src/lib/js/app/config.js',
'../../src/lib/js/app/serviceURLs.js',
'../../src/lib/js/app/**/*.js',
"../mockServices/**/*.js"
],
preprocessors: {
// source files, that you wanna generate coverage for
// do not include tests or libraries
// (these files will be instrumented by Istanbul)
'../../src/lib/js/app/**/*.js': ['coverage']
},
// list of files to exclude
exclude: [
'../../src/lib/js/app/main.js',
'../../src/lib/js/**/conditional/*'
],
coverageReporter: {
type: 'teamcity',
dir: 'coverage/'
},
// test results reporter to use
// possible values: 'dots', 'progress', 'junit', 'growl', 'coverage'
reporters: ['progress', 'teamcity', 'coverage'],
// web server port
port: 9876,
// enable / disable colors in the output (reporters and logs)
colors: true,
// level of logging
// possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG
logLevel: config.LOG_WARN,
// enable / disable watching file and executing tests whenever any file changes
autoWatch: false,
// Start these browsers, currently available:
// - Chrome
// - ChromeCanary
// - Firefox
// - Opera (has to be installed with `npm install karma-opera-launcher`)
// - Safari (only Mac; has to be installed with `npm install karma-safari-launcher`)
// - PhantomJS
// - IE (only Windows; has to be installed with `npm install karma-ie-launcher`)
browsers: ['PhantomJS'],
// browsers: ['Chrome'],
// If browser does not capture in given timeout [ms], kill it
captureTimeout: 60000,
// Continuous Integration mode
// if true, it capture browsers, run tests and exit
singleRun: false
});
};
| apache-2.0 |
wso2/devstudio-tooling-esb | plugins/org.wso2.developerstudio.visualdatamapper/src/org/wso2/developerstudio/datamapper/util/DataMapperAdapterFactory.java | 36374 | /**
*/
package org.wso2.developerstudio.datamapper.util;
import java.util.Map;
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.Notifier;
import org.eclipse.emf.common.notify.impl.AdapterFactoryImpl;
import org.eclipse.emf.ecore.EObject;
import org.wso2.developerstudio.datamapper.*;
/**
* <!-- begin-user-doc -->
* The <b>Adapter Factory</b> for the model.
* It provides an adapter <code>createXXX</code> method for each class of the model.
* <!-- end-user-doc -->
* @see org.wso2.developerstudio.datamapper.DataMapperPackage
* @generated
*/
public class DataMapperAdapterFactory extends AdapterFactoryImpl {
/**
* The cached model package.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected static DataMapperPackage modelPackage;
/**
* Creates an instance of the adapter factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public DataMapperAdapterFactory() {
if (modelPackage == null) {
modelPackage = DataMapperPackage.eINSTANCE;
}
}
/**
* Returns whether this factory is applicable for the type of the object.
* <!-- begin-user-doc -->
* This implementation returns <code>true</code> if the object is either the model's package or is an instance object of the model.
* <!-- end-user-doc -->
* @return whether this factory is applicable for the type of the object.
* @generated
*/
@Override
public boolean isFactoryForType(Object object) {
if (object == modelPackage) {
return true;
}
if (object instanceof EObject) {
return ((EObject)object).eClass().getEPackage() == modelPackage;
}
return false;
}
/**
* The switch that delegates to the <code>createXXX</code> methods.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected DataMapperSwitch<Adapter> modelSwitch =
new DataMapperSwitch<Adapter>() {
@Override
public Adapter caseDataMapperNode(DataMapperNode object) {
return createDataMapperNodeAdapter();
}
@Override
public Adapter caseTrim(Trim object) {
return createTrimAdapter();
}
@Override
public Adapter caseClone(Clone object) {
return createCloneAdapter();
}
@Override
public Adapter caseReplace(Replace object) {
return createReplaceAdapter();
}
@Override
public Adapter caseMatch(Match object) {
return createMatchAdapter();
}
@Override
public Adapter caseMin(Min object) {
return createMinAdapter();
}
@Override
public Adapter caseMax(Max object) {
return createMaxAdapter();
}
@Override
public Adapter caseCustomFunction(CustomFunction object) {
return createCustomFunctionAdapter();
}
@Override
public Adapter caseProperties(Properties object) {
return createPropertiesAdapter();
}
@Override
public Adapter caseCompare(Compare object) {
return createCompareAdapter();
}
@Override
public Adapter caseStringToNumber(StringToNumber object) {
return createStringToNumberAdapter();
}
@Override
public Adapter caseStringToBoolean(StringToBoolean object) {
return createStringToBooleanAdapter();
}
@Override
public Adapter caseDataMapperRoot(DataMapperRoot object) {
return createDataMapperRootAdapter();
}
@Override
public Adapter caseStringLength(StringLength object) {
return createStringLengthAdapter();
}
@Override
public Adapter caseStartsWith(StartsWith object) {
return createStartsWithAdapter();
}
@Override
public Adapter caseEndsWith(EndsWith object) {
return createEndsWithAdapter();
}
@Override
public Adapter caseSubstring(Substring object) {
return createSubstringAdapter();
}
@Override
public Adapter caseIfElse(IfElse object) {
return createIfElseAdapter();
}
@Override
public Adapter caseAND(AND object) {
return createANDAdapter();
}
@Override
public Adapter caseOR(OR object) {
return createORAdapter();
}
@Override
public Adapter caseInput(Input object) {
return createInputAdapter();
}
@Override
public Adapter caseNOT(NOT object) {
return createNOTAdapter();
}
@Override
public Adapter caseOutput(Output object) {
return createOutputAdapter();
}
@Override
public Adapter caseOperator(Operator object) {
return createOperatorAdapter();
}
@Override
public Adapter caseOperatorBasicContainer(OperatorBasicContainer object) {
return createOperatorBasicContainerAdapter();
}
@Override
public Adapter caseOperatorLeftContainer(OperatorLeftContainer object) {
return createOperatorLeftContainerAdapter();
}
@Override
public Adapter caseOperatorLeftConnector(OperatorLeftConnector object) {
return createOperatorLeftConnectorAdapter();
}
@Override
public Adapter caseOperatorRightContainer(OperatorRightContainer object) {
return createOperatorRightContainerAdapter();
}
@Override
public Adapter caseOperatorRightConnector(OperatorRightConnector object) {
return createOperatorRightConnectorAdapter();
}
@Override
public Adapter caseElement(Element object) {
return createElementAdapter();
}
@Override
public Adapter caseTreeNode(TreeNode object) {
return createTreeNodeAdapter();
}
@Override
public Adapter caseSubtract(Subtract object) {
return createSubtractAdapter();
}
@Override
public Adapter caseInNode(InNode object) {
return createInNodeAdapter();
}
@Override
public Adapter caseOutNode(OutNode object) {
return createOutNodeAdapter();
}
@Override
public Adapter caseDataMapperLink(DataMapperLink object) {
return createDataMapperLinkAdapter();
}
@Override
public Adapter caseConcat(Concat object) {
return createConcatAdapter();
}
@Override
public Adapter caseAbsoluteValue(AbsoluteValue object) {
return createAbsoluteValueAdapter();
}
@Override
public Adapter caseEqual(Equal object) {
return createEqualAdapter();
}
@Override
public Adapter caseConstant(Constant object) {
return createConstantAdapter();
}
@Override
public Adapter caseAdd(Add object) {
return createAddAdapter();
}
@Override
public Adapter caseDivide(Divide object) {
return createDivideAdapter();
}
@Override
public Adapter caseFloor(Floor object) {
return createFloorAdapter();
}
@Override
public Adapter caseRound(Round object) {
return createRoundAdapter();
}
@Override
public Adapter caseMultiply(Multiply object) {
return createMultiplyAdapter();
}
@Override
public Adapter caseSetPrecision(SetPrecision object) {
return createSetPrecisionAdapter();
}
@Override
public Adapter caseSplit(Split object) {
return createSplitAdapter();
}
@Override
public Adapter caseLowerCase(LowerCase object) {
return createLowerCaseAdapter();
}
@Override
public Adapter caseCeli(Celi object) {
return createCeliAdapter();
}
@Override
public Adapter caseUpperCase(UpperCase object) {
return createUpperCaseAdapter();
}
@Override
public Adapter casePropertyKeyValuePair(PropertyKeyValuePair object) {
return createPropertyKeyValuePairAdapter();
}
@Override
public Adapter caseContains(Contains object) {
return createContainsAdapter();
}
@Override
public Adapter caseToString(ToString object) {
return createToStringAdapter();
}
@Override
public Adapter caseGlobalVariable(GlobalVariable object) {
return createGlobalVariableAdapter();
}
@Override
public Adapter caseAdvancedCustomFunction(AdvancedCustomFunction object) {
return createAdvancedCustomFunctionAdapter();
}
@Override
public Adapter defaultCase(EObject object) {
return createEObjectAdapter();
}
};
/**
* Creates an adapter for the <code>target</code>.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @param target the object to adapt.
* @return the adapter for the <code>target</code>.
* @generated
*/
@Override
public Adapter createAdapter(Notifier target) {
return modelSwitch.doSwitch((EObject)target);
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.DataMapperNode <em>Node</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.DataMapperNode
* @generated
*/
public Adapter createDataMapperNodeAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Trim <em>Trim</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Trim
* @generated
*/
public Adapter createTrimAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Clone <em>Clone</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Clone
* @generated
*/
public Adapter createCloneAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Replace <em>Replace</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Replace
* @generated
*/
public Adapter createReplaceAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Match <em>Match</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Match
* @generated
*/
public Adapter createMatchAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Min <em>Min</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Min
* @generated
*/
public Adapter createMinAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Max <em>Max</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Max
* @generated
*/
public Adapter createMaxAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.CustomFunction <em>Custom Function</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.CustomFunction
* @generated
*/
public Adapter createCustomFunctionAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Properties <em>Properties</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Properties
* @generated
*/
public Adapter createPropertiesAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Compare <em>Compare</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Compare
* @generated
*/
public Adapter createCompareAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.StringToNumber <em>String To Number</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.StringToNumber
* @generated
*/
public Adapter createStringToNumberAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.StringToBoolean <em>String To Boolean</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.StringToBoolean
* @generated
*/
public Adapter createStringToBooleanAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.DataMapperRoot <em>Root</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.DataMapperRoot
* @generated
*/
public Adapter createDataMapperRootAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.StringLength <em>String Length</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.StringLength
* @generated
*/
public Adapter createStringLengthAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.StartsWith <em>Starts With</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.StartsWith
* @generated
*/
public Adapter createStartsWithAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.EndsWith <em>Ends With</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.EndsWith
* @generated
*/
public Adapter createEndsWithAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Substring <em>Substring</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Substring
* @generated
*/
public Adapter createSubstringAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.IfElse <em>If Else</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.IfElse
* @generated
*/
public Adapter createIfElseAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.AND <em>AND</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.AND
* @generated
*/
public Adapter createANDAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.OR <em>OR</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.OR
* @generated
*/
public Adapter createORAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Input <em>Input</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Input
* @generated
*/
public Adapter createInputAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.NOT <em>NOT</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.NOT
* @generated
*/
public Adapter createNOTAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Output <em>Output</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Output
* @generated
*/
public Adapter createOutputAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Operator <em>Operator</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Operator
* @generated
*/
public Adapter createOperatorAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.OperatorBasicContainer <em>Operator Basic Container</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.OperatorBasicContainer
* @generated
*/
public Adapter createOperatorBasicContainerAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.OperatorLeftContainer <em>Operator Left Container</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.OperatorLeftContainer
* @generated
*/
public Adapter createOperatorLeftContainerAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.OperatorLeftConnector <em>Operator Left Connector</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.OperatorLeftConnector
* @generated
*/
public Adapter createOperatorLeftConnectorAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.OperatorRightContainer <em>Operator Right Container</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.OperatorRightContainer
* @generated
*/
public Adapter createOperatorRightContainerAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.OperatorRightConnector <em>Operator Right Connector</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.OperatorRightConnector
* @generated
*/
public Adapter createOperatorRightConnectorAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Element <em>Element</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Element
* @generated
*/
public Adapter createElementAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.TreeNode <em>Tree Node</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.TreeNode
* @generated
*/
public Adapter createTreeNodeAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Subtract <em>Subtract</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Subtract
* @generated
*/
public Adapter createSubtractAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.InNode <em>In Node</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.InNode
* @generated
*/
public Adapter createInNodeAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.OutNode <em>Out Node</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.OutNode
* @generated
*/
public Adapter createOutNodeAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.DataMapperLink <em>Link</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.DataMapperLink
* @generated
*/
public Adapter createDataMapperLinkAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Concat <em>Concat</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Concat
* @generated
*/
public Adapter createConcatAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.AbsoluteValue <em>Absolute Value</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.AbsoluteValue
* @generated
*/
public Adapter createAbsoluteValueAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Equal <em>Equal</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Equal
* @generated
*/
public Adapter createEqualAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Constant <em>Constant</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Constant
* @generated
*/
public Adapter createConstantAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Add <em>Add</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Add
* @generated
*/
public Adapter createAddAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Divide <em>Divide</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Divide
* @generated
*/
public Adapter createDivideAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Floor <em>Floor</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Floor
* @generated
*/
public Adapter createFloorAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Round <em>Round</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Round
* @generated
*/
public Adapter createRoundAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Multiply <em>Multiply</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Multiply
* @generated
*/
public Adapter createMultiplyAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.SetPrecision <em>Set Precision</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.SetPrecision
* @generated
*/
public Adapter createSetPrecisionAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Split <em>Split</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Split
* @generated
*/
public Adapter createSplitAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.LowerCase <em>Lower Case</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.LowerCase
* @generated
*/
public Adapter createLowerCaseAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Celi <em>Celi</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Celi
* @generated
*/
public Adapter createCeliAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.UpperCase <em>Upper Case</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.UpperCase
* @generated
*/
public Adapter createUpperCaseAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.PropertyKeyValuePair <em>Property Key Value Pair</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.PropertyKeyValuePair
* @generated
*/
public Adapter createPropertyKeyValuePairAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.Contains <em>Contains</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.Contains
* @generated
*/
public Adapter createContainsAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.ToString <em>To String</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.ToString
* @generated
*/
public Adapter createToStringAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.GlobalVariable <em>Global Variable</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.GlobalVariable
* @generated
*/
public Adapter createGlobalVariableAdapter() {
return null;
}
/**
* Creates a new adapter for an object of class '{@link org.wso2.developerstudio.datamapper.AdvancedCustomFunction <em>Advanced Custom Function</em>}'.
* <!-- begin-user-doc -->
* This default implementation returns null so that we can easily ignore cases;
* it's useful to ignore a case when inheritance will catch all the cases anyway.
* <!-- end-user-doc -->
* @return the new adapter.
* @see org.wso2.developerstudio.datamapper.AdvancedCustomFunction
* @generated
*/
public Adapter createAdvancedCustomFunctionAdapter() {
return null;
}
/**
* Creates a new adapter for the default case.
* <!-- begin-user-doc -->
* This default implementation returns null.
* <!-- end-user-doc -->
* @return the new adapter.
* @generated
*/
public Adapter createEObjectAdapter() {
return null;
}
} //DataMapperAdapterFactory
| apache-2.0 |
spasam/terremark-api | src/main/java/com/terremark/api/ProtocolTypeEnum.java | 2319 | /**
* Copyright 2012 Terremark Worldwide Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.terremark.api;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlEnumValue;
import javax.xml.bind.annotation.XmlType;
/**
* Protocol types.
* <p>
* Java class for ProtocolTypeEnum.
* <p>
* The following schema fragment specifies the expected content contained within this class.
* <p>
*
* <pre>
* <simpleType name="ProtocolTypeEnum">
* <restriction base="{http://www.w3.org/2001/XMLSchema}string">
* <enumeration value="HTTP"/>
* <enumeration value="HTTPS"/>
* <enumeration value="TCP"/>
* <enumeration value="UDP"/>
* <enumeration value="IPSEC"/>
* <enumeration value="FTP"/>
* <enumeration value="Any"/>
* </restriction>
* </simpleType>
* </pre>
*/
@XmlType(name = "ProtocolTypeEnum")
@XmlEnum
public enum ProtocolTypeEnum {
/** Hypertext transfer protocol. */
HTTP("HTTP"),
/** Secure hypertext transfer protocol. */
HTTPS("HTTPS"),
/** Transmission control protocol. */
TCP("TCP"),
/** * User datagram protocol. */
UDP("UDP"),
/** Internet protocol security. */
IPSEC("IPSEC"),
/** File transfer protocol. */
FTP("FTP"),
/**
* Any protocol.
*/
@XmlEnumValue("Any")
ANY("Any");
private final String value;
ProtocolTypeEnum(final String v) {
value = v;
}
public String value() {
return value;
}
public static ProtocolTypeEnum fromValue(final String v) {
for (final ProtocolTypeEnum c : ProtocolTypeEnum.values()) {
if (c.value.equals(v)) {
return c;
}
}
throw new IllegalArgumentException(v);
}
}
| apache-2.0 |
antoniardot/DockBot-Eve | lib/lejos/robotics/filter/LowPassFilter.java | 1572 | package lejos.robotics.filter;
import lejos.robotics.SampleProvider;
/**
* Provides a low-pass filter for samples <br>
*
* @see <a
* href=http://en.wikipedia.org/wiki/Low-pass_filter>http://en.wikipedia.
* org/wiki/Low-pass_filter</a>
* @author Aswin
*
*/
public class LowPassFilter extends AbstractFilter {
float[] smoothed;
long lastTime = 0;
float timeConstant;
/**
* Constructor
*
* @param source
* The source for getting samples
* @param timeConstant
* The cut-off frequency for the filter
*/
public LowPassFilter(SampleProvider source, float timeConstant) {
super(source);
smoothed = new float[sampleSize];
this.timeConstant = timeConstant;
}
/**
* Fetches a sample from the source and low-passes it
*
* See http://en.wikipedia.org/wiki/Low-pass_filter
*/
public void fetchSample(float[] dst, int off) {
super.fetchSample(dst, off);
if (lastTime == 0 || timeConstant == 0) {
for (int axis = 0; axis < sampleSize; axis++) {
smoothed[axis] = (dst[off + axis]);
}
}
else {
float dt = (float) ((System.currentTimeMillis() - lastTime) / 1000.0);
float a = dt / (timeConstant + dt);
for (int axis = 0; axis < sampleSize; axis++) {
smoothed[axis] = (1f - a) * smoothed[axis] + a * (dst[off + axis]);
dst[axis + off] = smoothed[axis];
}
}
lastTime = System.currentTimeMillis();
}
public void setTimeConstant(float timeConstant) {
this.timeConstant = timeConstant;
}
}
| apache-2.0 |
Bitergia/allura | Allura/allura/tests/model/test_discussion.py | 10840 | # -*- coding: utf-8 -*-
"""
Model tests for artifact
"""
from cStringIO import StringIO
import time
from datetime import datetime, timedelta
from cgi import FieldStorage
from pylons import c, g, request, response
from nose.tools import assert_raises, assert_equals, with_setup
import mock
from mock import patch
from ming.orm import session, ThreadLocalORMSession
from webob import Request, Response, exc
from allura import model as M
from allura.lib.app_globals import Globals
from allura.lib import helpers as h
from allura.tests import TestController
from alluratest.controller import setup_global_objects
def setUp():
controller = TestController()
controller.setUp()
controller.app.get('/wiki/Home/')
setup_global_objects()
ThreadLocalORMSession.close_all()
h.set_context('test', 'wiki', neighborhood='Projects')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def tearDown():
ThreadLocalORMSession.close_all()
@with_setup(setUp, tearDown)
def test_discussion_methods():
d = M.Discussion(shortname='test', name='test')
assert d.thread_class() == M.Thread
assert d.post_class() == M.Post
assert d.attachment_class() == M.DiscussionAttachment
ThreadLocalORMSession.flush_all()
d.update_stats()
ThreadLocalORMSession.flush_all()
assert d.last_post == None
assert d.url().endswith('wiki/_discuss/')
assert d.index()['name_s'] == 'test'
assert d.subscription() == None
assert d.find_posts().count() == 0
jsn = d.__json__()
assert jsn['name'] == d.name
d.delete()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
@with_setup(setUp, tearDown)
def test_thread_methods():
d = M.Discussion(shortname='test', name='test')
t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
assert t.discussion_class() == M.Discussion
assert t.post_class() == M.Post
assert t.attachment_class() == M.DiscussionAttachment
p0 = t.post('This is a post')
p1 = t.post('This is another post')
time.sleep(0.25)
p2 = t.post('This is a reply', parent_id=p0._id)
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
d = M.Discussion.query.get(shortname='test')
t = d.threads[0]
assert d.last_post is not None
assert t.last_post is not None
t.create_post_threads(t.posts)
posts0 = t.find_posts(page=0, limit=10, style='threaded')
posts1 = t.find_posts(page=0, limit=10, style='timestamp')
assert posts0 != posts1
ts = p0.timestamp.replace(
microsecond=int(p0.timestamp.microsecond // 1000) * 1000)
posts2 = t.find_posts(page=0, limit=10, style='threaded', timestamp=ts)
assert len(posts2) > 0
assert 'wiki/_discuss/' in t.url()
assert t.index()['views_i'] == 0
assert not t.subscription
t.subscription = True
assert t.subscription
t.subscription = False
assert not t.subscription
assert t.top_level_posts().count() == 2
assert t.post_count == 3
jsn = t.__json__()
assert '_id' in jsn
assert_equals(len(jsn['posts']), 3)
(p.approve() for p in (p0, p1))
assert t.num_replies == 2
t.spam()
assert t.num_replies == 0
ThreadLocalORMSession.flush_all()
assert len(t.find_posts()) == 0
t.delete()
@with_setup(setUp, tearDown)
def test_thread_new():
with mock.patch('allura.model.discuss.h.nonce') as nonce:
nonce.side_effect = ['deadbeef', 'deadbeef', 'beefdead']
d = M.Discussion(shortname='test', name='test')
t1 = M.Thread.new(discussion_id=d._id, subject='Test Thread One')
t2 = M.Thread.new(discussion_id=d._id, subject='Test Thread Two')
ThreadLocalORMSession.flush_all()
session(t1).expunge(t1)
session(t2).expunge(t2)
t1_2 = M.Thread.query.get(_id=t1._id)
t2_2 = M.Thread.query.get(_id=t2._id)
assert_equals(t1._id, 'deadbeef')
assert_equals(t2._id, 'beefdead')
assert_equals(t1_2.subject, 'Test Thread One')
assert_equals(t2_2.subject, 'Test Thread Two')
@with_setup(setUp, tearDown)
def test_post_methods():
d = M.Discussion(shortname='test', name='test')
t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
p = t.post('This is a post')
p2 = t.post('This is another post')
assert p.discussion_class() == M.Discussion
assert p.thread_class() == M.Thread
assert p.attachment_class() == M.DiscussionAttachment
p.commit()
assert p.parent is None
assert p.subject == 'Test Thread'
assert p.attachments.count() == 0
assert 'Test Admin' in p.summary()
assert 'wiki/_discuss' in p.url()
assert p.reply_subject() == 'Re: Test Thread'
assert p.link_text() == p.subject
ss = p.history().first()
assert 'Version' in ss.index()['title_s']
assert '#' in ss.shorthand_id()
jsn = p.__json__()
assert jsn["thread_id"] == t._id
(p.approve() for p in (p, p2))
assert t.num_replies == 1
p2.spam()
assert t.num_replies == 0
p.spam()
assert t.num_replies == 0
p.delete()
assert t.num_replies == 0
@with_setup(setUp, tearDown)
def test_attachment_methods():
d = M.Discussion(shortname='test', name='test')
t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
p = t.post('This is a post')
p_att = p.attach('foo.text', StringIO('Hello, world!'),
discussion_id=d._id,
thread_id=t._id,
post_id=p._id)
t_att = p.attach('foo2.text', StringIO('Hello, thread!'),
discussion_id=d._id,
thread_id=t._id)
d_att = p.attach('foo3.text', StringIO('Hello, discussion!'),
discussion_id=d._id)
ThreadLocalORMSession.flush_all()
assert p_att.post == p
assert p_att.thread == t
assert p_att.discussion == d
for att in (p_att, t_att, d_att):
assert 'wiki/_discuss' in att.url()
assert 'attachment/' in att.url()
# Test notification in mail
t = M.Thread.new(discussion_id=d._id, subject='Test comment notification')
fs = FieldStorage()
fs.name='file_info'
fs.filename='fake.txt'
fs.type = 'text/plain'
fs.file=StringIO('this is the content of the fake file\n')
p = t.post(text=u'test message', forum= None, subject= '', file_info=fs)
ThreadLocalORMSession.flush_all()
n = M.Notification.query.get(subject=u'[test:wiki] Test comment notification')
assert_equals(u'test message\n\n\nAttachment: fake.txt (37 Bytes; text/plain)', n.text)
@with_setup(setUp, tearDown)
def test_discussion_delete():
d = M.Discussion(shortname='test', name='test')
t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
p = t.post('This is a post')
p.attach('foo.text', StringIO(''),
discussion_id=d._id,
thread_id=t._id,
post_id=p._id)
r = M.ArtifactReference.from_artifact(d)
rid = d.index_id()
ThreadLocalORMSession.flush_all()
d.delete()
ThreadLocalORMSession.flush_all()
assert_equals(M.ArtifactReference.query.find(dict(_id=rid)).count(), 0)
@with_setup(setUp, tearDown)
def test_thread_delete():
d = M.Discussion(shortname='test', name='test')
t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
p = t.post('This is a post')
p.attach('foo.text', StringIO(''),
discussion_id=d._id,
thread_id=t._id,
post_id=p._id)
ThreadLocalORMSession.flush_all()
t.delete()
@with_setup(setUp, tearDown)
def test_post_delete():
d = M.Discussion(shortname='test', name='test')
t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
p = t.post('This is a post')
p.attach('foo.text', StringIO(''),
discussion_id=d._id,
thread_id=t._id,
post_id=p._id)
ThreadLocalORMSession.flush_all()
p.delete()
@with_setup(setUp, tearDown)
def test_post_permission_check():
d = M.Discussion(shortname='test', name='test')
t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
c.user = M.User.anonymous()
try:
p1 = t.post('This post will fail the check.')
assert False, "Expected an anonymous post to fail."
except exc.HTTPUnauthorized:
pass
p2 = t.post('This post will pass the check.', ignore_security=True)
@with_setup(setUp, tearDown)
def test_post_url_paginated():
d = M.Discussion(shortname='test', name='test')
t = M.Thread(discussion_id=d._id, subject='Test Thread')
p = [] # posts in display order
ts = datetime.utcnow() - timedelta(days=1)
for i in range(5):
ts += timedelta(minutes=1)
p.append(t.post('This is a post #%s' % i, timestamp=ts))
ts += timedelta(minutes=1)
p.insert(1, t.post(
'This is reply #0 to post #0', parent_id=p[0]._id, timestamp=ts))
ts += timedelta(minutes=1)
p.insert(2, t.post(
'This is reply #1 to post #0', parent_id=p[0]._id, timestamp=ts))
ts += timedelta(minutes=1)
p.insert(4, t.post(
'This is reply #0 to post #1', parent_id=p[3]._id, timestamp=ts))
ts += timedelta(minutes=1)
p.insert(6, t.post(
'This is reply #0 to post #2', parent_id=p[5]._id, timestamp=ts))
ts += timedelta(minutes=1)
p.insert(7, t.post(
'This is reply #1 to post #2', parent_id=p[5]._id, timestamp=ts))
ts += timedelta(minutes=1)
p.insert(8, t.post(
'This is reply #0 to reply #1 to post #2',
parent_id=p[7]._id, timestamp=ts))
# with default paging limit
for _p in p:
url = t.url() + '?limit=50#' + _p.slug
assert _p.url_paginated() == url, _p.url_paginated()
# with user paging limit
limit = 3
c.user.set_pref('results_per_page', limit)
for i, _p in enumerate(p):
page = i / limit
url = t.url() + '?limit=%s' % limit
if page > 0:
url += '&page=%s' % page
url += '#' + _p.slug
assert _p.url_paginated() == url, _p.url_paginated()
@with_setup(setUp, tearDown)
def test_post_notify():
d = M.Discussion(shortname='test', name='test')
d.monitoring_email = 'darthvader@deathstar.org'
t = M.Thread.new(discussion_id=d._id, subject='Test Thread')
with patch('allura.model.notification.Notification.send_simple') as send:
t.post('This is a post')
send.assert_called_with(d.monitoring_email)
c.app.config.project.notifications_disabled = True
with patch('allura.model.notification.Notification.send_simple') as send:
t.post('Another post')
try:
send.assert_called_with(d.monitoring_email)
except AssertionError:
pass # method not called as expected
else:
assert False, 'send_simple must not be called'
| apache-2.0 |
jdgwartney/vsphere-ws | java/JAXWS/samples/com/vmware/vim25/SuspendedRelocateNotSupported.java | 825 |
package com.vmware.vim25;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for SuspendedRelocateNotSupported complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="SuspendedRelocateNotSupported">
* <complexContent>
* <extension base="{urn:vim25}MigrationFault">
* <sequence>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "SuspendedRelocateNotSupported")
public class SuspendedRelocateNotSupported
extends MigrationFault
{
}
| apache-2.0 |
jaymarvels/outpan-api-csharp | Outpan Get/Properties/Resources.Designer.cs | 2789 | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace OutPanApiGet.Properties {
using System;
/// <summary>
/// A strongly-typed resource class, for looking up localized strings, etc.
/// </summary>
// This class was auto-generated by the StronglyTypedResourceBuilder
// class via a tool like ResGen or Visual Studio.
// To add or remove a member, edit your .ResX file then rerun ResGen
// with the /str option, or rebuild your VS project.
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "4.0.0.0")]
[global::System.Diagnostics.DebuggerNonUserCodeAttribute()]
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
internal class Resources {
private static global::System.Resources.ResourceManager resourceMan;
private static global::System.Globalization.CultureInfo resourceCulture;
[global::System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1811:AvoidUncalledPrivateCode")]
internal Resources() {
}
/// <summary>
/// Returns the cached ResourceManager instance used by this class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Resources.ResourceManager ResourceManager {
get {
if (object.ReferenceEquals(resourceMan, null)) {
global::System.Resources.ResourceManager temp = new global::System.Resources.ResourceManager("OutPanApiGet.Properties.Resources", typeof(Resources).Assembly);
resourceMan = temp;
}
return resourceMan;
}
}
/// <summary>
/// Overrides the current thread's CurrentUICulture property for all
/// resource lookups using this strongly typed resource class.
/// </summary>
[global::System.ComponentModel.EditorBrowsableAttribute(global::System.ComponentModel.EditorBrowsableState.Advanced)]
internal static global::System.Globalization.CultureInfo Culture {
get {
return resourceCulture;
}
set {
resourceCulture = value;
}
}
}
}
| apache-2.0 |
kisskys/incubator-asterixdb | asterixdb/asterix-external-data/src/main/java/org/apache/asterix/external/operators/IndexInfoOperatorDescriptor.java | 4395 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.asterix.external.operators;
import org.apache.hyracks.api.context.IHyracksTaskContext;
import org.apache.hyracks.api.dataflow.ActivityId;
import org.apache.hyracks.api.dataflow.IOperatorNodePushable;
import org.apache.hyracks.api.dataflow.value.INullWriterFactory;
import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
import org.apache.hyracks.api.exceptions.HyracksDataException;
import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
import org.apache.hyracks.storage.am.common.api.IIndexLifecycleManagerProvider;
import org.apache.hyracks.storage.am.common.api.IModificationOperationCallbackFactory;
import org.apache.hyracks.storage.am.common.api.ISearchOperationCallbackFactory;
import org.apache.hyracks.storage.am.common.api.ITupleFilterFactory;
import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory;
import org.apache.hyracks.storage.am.common.dataflow.IIndexOperatorDescriptor;
import org.apache.hyracks.storage.common.IStorageManagerInterface;
import org.apache.hyracks.storage.common.file.ILocalResourceFactoryProvider;
/*
* This is a hack used to optain multiple index instances in a single operator and it is not actually used as an operator
*/
public class IndexInfoOperatorDescriptor implements IIndexOperatorDescriptor{
private static final long serialVersionUID = 1L;
private final IFileSplitProvider fileSplitProvider;
private final IStorageManagerInterface storageManager;
private final IIndexLifecycleManagerProvider lifecycleManagerProvider;
public IndexInfoOperatorDescriptor(IFileSplitProvider fileSplitProvider,IStorageManagerInterface storageManager,
IIndexLifecycleManagerProvider lifecycleManagerProvider){
this.fileSplitProvider = fileSplitProvider;
this.lifecycleManagerProvider = lifecycleManagerProvider;
this.storageManager = storageManager;
}
@Override
public ActivityId getActivityId() {
return null;
}
@Override
public IOperatorNodePushable createPushRuntime(IHyracksTaskContext ctx,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) throws HyracksDataException {
return null;
}
@Override
public IFileSplitProvider getFileSplitProvider() {
return fileSplitProvider;
}
@Override
public IStorageManagerInterface getStorageManager() {
return storageManager;
}
@Override
public IIndexLifecycleManagerProvider getLifecycleManagerProvider() {
return lifecycleManagerProvider;
}
@Override
public RecordDescriptor getRecordDescriptor() {
return null;
}
@Override
public IIndexDataflowHelperFactory getIndexDataflowHelperFactory() {
return null;
}
@Override
public boolean getRetainInput() {
return false;
}
@Override
public ISearchOperationCallbackFactory getSearchOpCallbackFactory() {
return null;
}
@Override
public IModificationOperationCallbackFactory getModificationOpCallbackFactory() {
return null;
}
@Override
public ITupleFilterFactory getTupleFilterFactory() {
return null;
}
@Override
public ILocalResourceFactoryProvider getLocalResourceFactoryProvider() {
return null;
}
@Override
public boolean getRetainNull() {
return false;
}
@Override
public INullWriterFactory getNullWriterFactory() {
return null;
}
}
| apache-2.0 |
consulo/consulo | modules/base/lang-impl/src/main/java/consulo/ide/macro/ModuleTestOutputDirPathMacro.java | 1275 | /*
* Copyright 2013-2016 consulo.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package consulo.ide.macro;
import com.intellij.ide.IdeBundle;
import consulo.roots.ContentFolderTypeProvider;
import consulo.roots.impl.TestContentFolderTypeProvider;
import javax.annotation.Nonnull;
/**
* @author VISTALL
* @since 18.08.14
*/
public class ModuleTestOutputDirPathMacro extends ModuleOutputDirPathMacro {
@Nonnull
@Override
public ContentFolderTypeProvider getContentFolderTypeProvider() {
return TestContentFolderTypeProvider.getInstance();
}
@Override
public String getName() {
return "ModuleTestOutputDirPath";
}
@Override
public String getDescription() {
return IdeBundle.message("macro.module.test.output.dir.path");
}
}
| apache-2.0 |
jacekkopecky/parkjam | src/com/hp/hpl/jena/shared/AssertionFailureException.java | 2127 | /*
(c) Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
[See end of file]
$Id: AssertionFailureException.java,v 1.1 2009/06/29 08:55:34 castagna Exp $
*/
package com.hp.hpl.jena.shared;
/**
Exception to throw when an assertion fails. Probably obsolete in
Java's with the assert statement ...
@author kers
*/
public class AssertionFailureException extends JenaException
{
public AssertionFailureException( String message )
{ super( message ); }
}
/*
(c) Copyright 2003, 2004, 2005, 2006, 2007, 2008, 2009 Hewlett-Packard Development Company, LP
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. The name of the author may not be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/ | apache-2.0 |
linkedin/parseq | subprojects/parseq/src/main/java/com/linkedin/parseq/ParSeqGlobalConfiguration.java | 3776 | /*
* Copyright 2012 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.parseq;
/**
* Global parseq configuration, applies to all Engine and Task instances
*
* @author Oleg Anashkin (oanashkin@linkedin.com)
* @author Jaroslaw Odzga (jodzga@linkedin.com)
*/
public final class ParSeqGlobalConfiguration {
private static volatile boolean _crossThreadStackTracesEnabled = false;
private static volatile boolean _trampolineEnabled = false;
private ParSeqGlobalConfiguration() {
}
/**
* Returns current state of cross-thread (cross-task) stack tracing.
*
* Normally tasks are executed in a different thread from the one creating it and at a different time. This makes it
* hard to debug because if a task throws an exception, its call stack ends in the execution engine that actually
* starts a thread that serves the task. This feature collects stack trace in advance, when task is created, so that
* if a task throws an exception then the parent stack trace is appended to it. This has a small performance
* impact even if the task doesn't throw any exceptions because stack trace is collected in task constructor.
*
* @return true if cross-thread stack tracing is enabled, false otherwise
*/
public static boolean isCrossThreadStackTracesEnabled() {
return _crossThreadStackTracesEnabled;
}
/**
* Modifies the current state of cross-thread (cross-task) stack tracing.
* This is a dynamic runtime configuration that has immediate effect on all tasks in the current process.
*
* Normally tasks are executed in a different thread from the one creating it and at a different time. This makes it
* hard to debug because if a task throws an exception, its call stack ends in the execution engine that actually
* starts a thread that serves the task. This feature collects stack trace in advance, when task is created, so that
* if a task throws an exception then the parent stack trace is appended to it. This has a small performance
* impact even if the task doesn't throw any exceptions because stack trace is collected in task constructor.
*
* @param enabled true if cross-thread stack tracing is enabled, false otherwise
*/
public static void setCrossThreadStackTracesEnabled(boolean enabled) {
_crossThreadStackTracesEnabled = enabled;
}
/**
* Returns true if trampoline is currently enabled.
*
* Trampoline prevents stack overflow in situation of extremely deep large ParSeq plans. Typically this problem
* does not occur and trampoline can be disabled allowing performance optimization.
*
* @return true if trampoline is currently enabled
*/
public static boolean isTrampolineEnabled() {
return _trampolineEnabled;
}
/**
* Enables or disables trampoline.
* This is a dynamic runtime configuration that has immediate effect on all tasks in the current process.
*
* Trampoline prevents stack overflow in situation of extremely deep ParSeq plans. Typically this problem
* does not occur and trampoline can be disabled allowing performance optimization.
*
* @param enabled
*/
public static void setTrampolineEnabled(boolean enabled) {
_trampolineEnabled = enabled;
}
}
| apache-2.0 |
yyitsz/myjavastudio | SimpleCrm/SimpleCrm/Utils/FormHelper.cs | 4639 | using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Windows.Forms;
using SimpleCrm.Facade;
namespace SimpleCrm.Utils
{
public static class FormHelper
{
public static void ShowMdiChildForm<T>(this Form owner)
where T : Form, new()
{
T form = FindOpenedForm<T>();
if (form == null)
{
form = new T();
form.MdiParent = AppFacade.Facade.MainForm;
form.StartPosition = FormStartPosition.CenterScreen;
form.WindowState = FormWindowState.Maximized;
form.Show();
}
else
{
form.Activate();
if (form.WindowState == FormWindowState.Minimized)
{
form.WindowState = FormWindowState.Normal;
}
}
}
public static void ShowMdiChildForm<T>(this Form owner, Func<T> creator, Predicate<T> predicate = null, Action<T> action = null)
where T : Form
{
T form = FindOpenedForm(predicate);
if (form == default(T))
{
form = creator();
form.MdiParent = AppFacade.Facade.MainForm;
form.StartPosition = FormStartPosition.CenterScreen;
form.WindowState = FormWindowState.Maximized;
form.Show();
if (action != null)
{
form.FormClosed += (sender, e) =>
{
if (owner.IsDisposed == false)
{
WindowsFormsSynchronizationContext.Current.Post(o => action((T)o), sender);
}
};
}
}
else
{
form.Activate();
if (form.WindowState == FormWindowState.Minimized)
{
form.WindowState = FormWindowState.Normal;
}
}
}
public static void ShowNonModalForm<T>(this Form owner, T form, Action<T> action = null)
where T : Form
{
owner.AddOwnedForm(form);
form.Show();
if (action != null)
{
form.FormClosed += (sender, e) => action((T)sender);
}
}
public static T FindNonModalForm<T>(this Form owner, Predicate<T> predicate) where T : Form
{
foreach (var form in owner.OwnedForms)
{
T tForm = form as T;
if (tForm != null && predicate(tForm))
{
return (T)form;
}
}
return default(T);
}
public static void ShowNonModalForm<T>(this Form owner, Func<T> creator, Predicate<T> predicate, Action<T> action = null)
where T : Form
{
T form = owner.FindNonModalForm(predicate);
if (form == default(T))
{
form = creator();
owner.AddOwnedForm(form);
form.StartPosition = FormStartPosition.CenterScreen;
form.Show();
if (action != null)
{
form.FormClosed += (sender, e) => action((T)sender);
}
}
else
{
form.Activate();
if (form.WindowState == FormWindowState.Minimized)
{
form.WindowState = FormWindowState.Normal;
}
}
}
public static DialogResult ShowDialogForm<T>(Action<T> beforeShow = null)
where T : Form, new()
{
T form = new T();
form.StartPosition = FormStartPosition.CenterScreen;
form.MaximizeBox = false;
form.MinimizeBox = false;
form.FormBorderStyle = FormBorderStyle.FixedDialog;
if (beforeShow != null)
{
beforeShow(form);
}
return form.ShowDialog();
}
public static T FindOpenedForm<T>(Predicate<T> predicate = null) where T : Form
{
foreach (Form form in AppFacade.Facade.MainForm.MdiChildren)
{
T tForm = form as T;
if (tForm != null &&
(predicate == null || predicate(tForm)))
{
return tForm;
}
}
return default(T);
}
}
}
| apache-2.0 |
balanced/omnibus-balanced | config/software/gfortran.rb | 2517 | #
# Copyright:: Copyright (c) 2014 Balanced, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
name "gfortran"
version "4.8.2"
dependency "gmp"
dependency "mpc"
dependency "mpfr"
dependency "cloog"
dependency "isl"
source url: "http://ftpmirror.gnu.org/gcc/gcc-4.8.2/gcc-4.8.2.tar.bz2",
md5: "a3d7d63b9cb6b6ea049469a0c4a43c9d"
relative_path "gcc-build"
configure_env = {
"LDFLAGS" => [
"-Wl,-rpath #{install_dir}/embedded/lib",
"-L#{install_dir}/embedded/lib",
"-I#{install_dir}/embedded/include"].join(" "),
"CFLAGS" => "-L#{install_dir}/embedded/lib -I#{install_dir}/embedded/include",
"PATH" => "#{install_dir}/embedded/bin:#{ENV['PATH']}"
}
build do
block do
FileUtils.mkdir_p(File.join(Omnibus.config.source_dir, "gcc-build"))
FileUtils.move(File.join(Omnibus.config.source_dir, "gcc-#{version}/"),
File.join(Omnibus.config.source_dir, "gcc-build/"))
# Added by Mahmoud:
# - This is a quick hack to try to get all the paths to work
# correctly. For some reason, even when passing linker flags down
# stuff gets linked to system path. I don't know how to fix.
FileUtils.ln_s(
"#{install_dir}/embedded/lib",
"#{install_dir}/embedded/lib64",
:force => true
)
end
command(["./gcc-#{version}/configure",
"--prefix=#{install_dir}/embedded",
"--enable-languages=c,fortran",
"--with-gmp=#{install_dir}/embedded",
"--with-mpfr=#{install_dir}/embedded",
"--with-mpc=#{install_dir}/embedded",
"--with-cloog=#{install_dir}/embedded",
"--with-isl=#{install_dir}/embedded",
"--enable-checking=release",
"--disable-bootstrap",
"--disable-multilib",
"--disable-build-poststage1-with-cxx",
"--disable-libstdcxx-pch"].join(" "),
env: configure_env)
command "make", env: configure_env
command "make check-fortran"
command "make install"
end
| apache-2.0 |
atricore/node-uma | lib/resourcesetRead.js | 2772 | /**
* Copyright 2015-present Atricore Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var error = require('./error'),
runner = require('./runner');
module.exports = ResourceSetRead;
/**
* This is the function order used by the runner
*
* @type {Array}
*/
var fns = [
checkClient,
getResourceSet,
sendResponse
];
/**
* ResourceSet Read
*
* @param {Object} config Instance of OAuth object
* @param {Object} req
* @param {Object} res
* @param {Function} next
*/
function ResourceSetRead(config, req, res, next) {
this.config = config;
this.model = config.model;
this.now = new Date();
this.req = req;
this.res = res;
runner(fns, this, next);
}
/**
* Check extracted client against model
*
* @param {Function} done
* @this OAuth
*/
function checkClient(done) {
var self = this;
this.model.getClient(this.req.oauth.bearerToken.client, null,
function (err, client) {
if (err) return done(error('server_error', false, err));
if (!client) {
return done(error('invalid_client', 'Client credentials are invalid'));
}
// Expose validated client
self.req.oauth.client = client;
done();
});
}
/**
* Fetches the supplied resource set
*
* @param done
*/
function getResourceSet(done) {
var self = this;
var id = this.req.params.id;
if (!id) {
return done(error('missing_required_fields',
'Resource request was missing one or more required fields'));
}
this.model.getResourceSet(id, function (err, rs) {
if (err) {
return done(error('invalid_resource_set_requested', 'Invalid Resource Set has been requested'));
}
self.req.resourceset = rs;
done();
});
}
/**
* Return retrieved resource set.
*
* @param {Function} done
* @this OAuth
*/
function sendResponse(done) {
this.res.set({'Cache-Control': 'no-store', 'Pragma': 'no-cache'});
this.res.code = 200;
this.res.jsonp({
'_id': this.req.resourceset.id,
'name': this.req.resourceset.name,
'icon_uri': this.req.resourceset.iconUri,
'type': this.req.resourceset.type,
'scopes': this.req.resourceset.scopes,
'uri': this.req.resourceset.uri
}
);
if (this.config.continueAfterResponse)
done();
}
| apache-2.0 |
codelibs/cl-struts | src/share/org/apache/struts/tiles/actions/ViewDefinitionsAction.java | 2839 | /*
* $Id: ViewDefinitionsAction.java 54929 2004-10-16 16:38:42Z germuska $
*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.struts.tiles.actions;
import java.io.PrintWriter;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts.action.Action;
import org.apache.struts.action.ActionForm;
import org.apache.struts.action.ActionForward;
import org.apache.struts.action.ActionMapping;
import org.apache.struts.tiles.DefinitionsFactory;
import org.apache.struts.tiles.TilesUtil;
/**
* <p>An <strong>Action</strong> that writes the
* definitions of the Tiles factory.
* Useful to check what is effectivly loaded in a
* Tiles factory
*/
public class ViewDefinitionsAction extends Action {
/**
* Process the specified HTTP request, and create the corresponding HTTP
* response (or forward to another web component that will create it),
* with provision for handling exceptions thrown by the business logic.
*
* @param mapping The ActionMapping used to select this instance
* @param form The optional ActionForm bean for this request (if any)
* @param request The HTTP request we are processing
* @param response The HTTP response we are creating
*
* @exception Exception if the application business logic throws
* an exception
* @since Struts 1.1
*/
public ActionForward execute(ActionMapping mapping,
ActionForm form,
HttpServletRequest request,
HttpServletResponse response)
throws Exception
{
response.setContentType("text/plain");
PrintWriter writer = response.getWriter();
try {
ServletContext context = getServlet().getServletContext();
DefinitionsFactory factory = TilesUtil.getDefinitionsFactory(request, context );
writer.println( factory.toString() );
} catch (Exception e) {
writer.println("FAIL - " + e.toString());
getServlet().log("ReloadAction", e);
}
writer.flush();
writer.close();
return (null);
}
}
| apache-2.0 |
bigtester/automation-test-engine | org.bigtester.ate.core/src/main/java/org/bigtester/ate/xmlschema/AlertDialogFindInFocusBeanDefinitionParser.java | 2289 | /*******************************************************************************
* ATE, Automation Test Engine
*
* Copyright 2014, Montreal PROT, or individual contributors as
* indicated by the @author tags or express copyright attribution
* statements applied by the authors. All third-party contributions are
* distributed under license by Montreal PROT.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package org.bigtester.ate.xmlschema;
import org.bigtester.ate.GlobalUtils;
import org.bigtester.ate.model.page.elementfind.AlertDialogFindCurrentInFocus;
import org.eclipse.jdt.annotation.Nullable;
import org.springframework.beans.factory.support.AbstractBeanDefinition;
import org.springframework.beans.factory.support.BeanDefinitionBuilder;
import org.springframework.beans.factory.xml.AbstractBeanDefinitionParser;
import org.springframework.beans.factory.xml.ParserContext;
import org.w3c.dom.Element;
// TODO: Auto-generated Javadoc
/**
* This class SimpleDateFormatBeanDefinitionParser defines ....
*
* @author Peidong Hu
*
*/
public class AlertDialogFindInFocusBeanDefinitionParser extends
AbstractBeanDefinitionParser {
/**
* {@inheritDoc}
*/
@Override
protected @Nullable AbstractBeanDefinition parseInternal(@Nullable Element element,
@Nullable ParserContext parserContext) {
// Here we parse the Spring elements such as < property>
if (parserContext==null || element == null ) throw GlobalUtils.createNotInitializedException("element and parserContext");
BeanDefinitionBuilder factory = BeanDefinitionBuilder.rootBeanDefinition(AlertDialogFindCurrentInFocus.class);
return factory.getBeanDefinition();
}
}
| apache-2.0 |
miekg/coredns | plugin/cache/handler.go | 4987 | package cache
import (
"context"
"math"
"time"
"github.com/coredns/coredns/plugin"
"github.com/coredns/coredns/plugin/metrics"
"github.com/coredns/coredns/request"
"github.com/miekg/dns"
)
// ServeDNS implements the plugin.Handler interface.
func (c *Cache) ServeDNS(ctx context.Context, w dns.ResponseWriter, r *dns.Msg) (int, error) {
rc := r.Copy() // We potentially modify r, to prevent other plugins from seeing this (r is a pointer), copy r into rc.
state := request.Request{W: w, Req: rc}
do := state.Do()
zone := plugin.Zones(c.Zones).Matches(state.Name())
if zone == "" {
return plugin.NextOrFailure(c.Name(), c.Next, ctx, w, rc)
}
now := c.now().UTC()
server := metrics.WithServer(ctx)
// On cache miss, if the request has the OPT record and the DO bit set we leave the message as-is. If there isn't a DO bit
// set we will modify the request to _add_ one. This means we will always do DNSSEC lookups on cache misses.
// When writing to cache, any DNSSEC RRs in the response are written to cache with the response.
// When sending a response to a non-DNSSEC client, we remove DNSSEC RRs from the response. We use a 2048 buffer size, which is
// less than 4096 (and older default) and more than 1024 which may be too small. We might need to tweaks this
// value to be smaller still to prevent UDP fragmentation?
ttl := 0
i := c.getIgnoreTTL(now, state, server)
if i != nil {
ttl = i.ttl(now)
}
if i == nil {
crr := &ResponseWriter{ResponseWriter: w, Cache: c, state: state, server: server, do: do}
return c.doRefresh(ctx, state, crr)
}
if ttl < 0 {
servedStale.WithLabelValues(server).Inc()
// Adjust the time to get a 0 TTL in the reply built from a stale item.
now = now.Add(time.Duration(ttl) * time.Second)
cw := newPrefetchResponseWriter(server, state, c)
go c.doPrefetch(ctx, state, cw, i, now)
} else if c.shouldPrefetch(i, now) {
cw := newPrefetchResponseWriter(server, state, c)
go c.doPrefetch(ctx, state, cw, i, now)
}
resp := i.toMsg(r, now, do)
w.WriteMsg(resp)
return dns.RcodeSuccess, nil
}
func (c *Cache) doPrefetch(ctx context.Context, state request.Request, cw *ResponseWriter, i *item, now time.Time) {
cachePrefetches.WithLabelValues(cw.server).Inc()
c.doRefresh(ctx, state, cw)
// When prefetching we loose the item i, and with it the frequency
// that we've gathered sofar. See we copy the frequencies info back
// into the new item that was stored in the cache.
if i1 := c.exists(state); i1 != nil {
i1.Freq.Reset(now, i.Freq.Hits())
}
}
func (c *Cache) doRefresh(ctx context.Context, state request.Request, cw *ResponseWriter) (int, error) {
if !state.Do() {
setDo(state.Req)
}
return plugin.NextOrFailure(c.Name(), c.Next, ctx, cw, state.Req)
}
func (c *Cache) shouldPrefetch(i *item, now time.Time) bool {
if c.prefetch <= 0 {
return false
}
i.Freq.Update(c.duration, now)
threshold := int(math.Ceil(float64(c.percentage) / 100 * float64(i.origTTL)))
return i.Freq.Hits() >= c.prefetch && i.ttl(now) <= threshold
}
// Name implements the Handler interface.
func (c *Cache) Name() string { return "cache" }
func (c *Cache) get(now time.Time, state request.Request, server string) (*item, bool) {
k := hash(state.Name(), state.QType())
if i, ok := c.ncache.Get(k); ok && i.(*item).ttl(now) > 0 {
cacheHits.WithLabelValues(server, Denial).Inc()
return i.(*item), true
}
if i, ok := c.pcache.Get(k); ok && i.(*item).ttl(now) > 0 {
cacheHits.WithLabelValues(server, Success).Inc()
return i.(*item), true
}
cacheMisses.WithLabelValues(server).Inc()
return nil, false
}
// getIgnoreTTL unconditionally returns an item if it exists in the cache.
func (c *Cache) getIgnoreTTL(now time.Time, state request.Request, server string) *item {
k := hash(state.Name(), state.QType())
if i, ok := c.ncache.Get(k); ok {
ttl := i.(*item).ttl(now)
if ttl > 0 || (c.staleUpTo > 0 && -ttl < int(c.staleUpTo.Seconds())) {
cacheHits.WithLabelValues(server, Denial).Inc()
return i.(*item)
}
}
if i, ok := c.pcache.Get(k); ok {
ttl := i.(*item).ttl(now)
if ttl > 0 || (c.staleUpTo > 0 && -ttl < int(c.staleUpTo.Seconds())) {
cacheHits.WithLabelValues(server, Success).Inc()
return i.(*item)
}
}
cacheMisses.WithLabelValues(server).Inc()
return nil
}
func (c *Cache) exists(state request.Request) *item {
k := hash(state.Name(), state.QType())
if i, ok := c.ncache.Get(k); ok {
return i.(*item)
}
if i, ok := c.pcache.Get(k); ok {
return i.(*item)
}
return nil
}
// setDo sets the DO bit and UDP buffer size in the message m.
func setDo(m *dns.Msg) {
o := m.IsEdns0()
if o != nil {
o.SetDo()
o.SetUDPSize(defaultUDPBufSize)
return
}
o = &dns.OPT{Hdr: dns.RR_Header{Name: ".", Rrtype: dns.TypeOPT}}
o.SetDo()
o.SetUDPSize(defaultUDPBufSize)
m.Extra = append(m.Extra, o)
}
// defaultUDPBufsize is the bufsize the cache plugin uses on outgoing requests that don't
// have an OPT RR.
const defaultUDPBufSize = 2048
| apache-2.0 |
GuardTime/ksi-net-sdk | ksi-net-api-test/Signature/Verification/Rule/SignaturePublicationRecordPublicationHashRuleTests.cs | 2808 | /*
* Copyright 2013-2018 Guardtime, Inc.
*
* This file is part of the Guardtime client SDK.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES, CONDITIONS, OR OTHER LICENSES OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
* "Guardtime" and "KSI" are trademarks or registered trademarks of
* Guardtime, Inc., and no license to trademarks is granted; Guardtime
* reserves and retains all trademark rights.
*/
using Guardtime.KSI.Signature.Verification;
using Guardtime.KSI.Signature.Verification.Rule;
using Guardtime.KSI.Test.Properties;
using NUnit.Framework;
namespace Guardtime.KSI.Test.Signature.Verification.Rule
{
[TestFixture]
public class SignaturePublicationRecordPublicationHashRuleTests : RuleTestsBase
{
public override VerificationRule Rule => new SignaturePublicationRecordPublicationHashRule();
[Test]
public void TestSignatureMissingCalendarHashChain()
{
TestSignatureMissingCalendarHashChain(new TestKsiSignature()
{
PublicationRecord = TestUtil.GetSignature(Resources.KsiSignature_Ok_With_Publication_Record).PublicationRecord
});
}
[Test]
public void TestSignatureMissingPublicationRecord()
{
// Check signature without publication record
CreateSignatureAndVerify(Resources.KsiSignature_Ok, VerificationResultCode.Ok);
}
[Test]
public void TestRfc3161SignaturePublicationRecordHash()
{
// Check legacy signature with publication record
CreateSignatureAndVerify(Resources.KsiSignature_Legacy_Ok_With_Publication_Record, VerificationResultCode.Ok);
}
[Test]
public void TestSignaturePublicationRecordHash()
{
// Check signature with publication record
CreateSignatureAndVerify(Resources.KsiSignature_Ok_With_Publication_Record, VerificationResultCode.Ok);
}
[Test]
public void TestSignatureInvalidPublicationRecordHash()
{
// Check invalid signature with invalid publication record
CreateSignatureAndVerify(Resources.KsiSignature_Invalid_With_Invalid_Publication_Record_Hash, VerificationResultCode.Fail, VerificationError.Int09);
}
}
} | apache-2.0 |
sryza/spark | core/src/main/scala/org/apache/spark/deploy/worker/CommandUtils.scala | 3413 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.worker
import java.io.{File, FileOutputStream, InputStream, IOException}
import java.lang.System._
import org.apache.spark.Logging
import org.apache.spark.deploy.Command
import org.apache.spark.util.Utils
/**
** Utilities for running commands with the spark classpath.
*/
private[spark]
object CommandUtils extends Logging {
def buildCommandSeq(command: Command, memory: Int, sparkHome: String): Seq[String] = {
val runner = getEnv("JAVA_HOME", command).map(_ + "/bin/java").getOrElse("java")
// SPARK-698: do not call the run.cmd script, as process.destroy()
// fails to kill a process tree on Windows
Seq(runner) ++ buildJavaOpts(command, memory, sparkHome) ++ Seq(command.mainClass) ++
command.arguments
}
private def getEnv(key: String, command: Command): Option[String] =
command.environment.get(key).orElse(Option(System.getenv(key)))
/**
* Attention: this must always be aligned with the environment variables in the run scripts and
* the way the JAVA_OPTS are assembled there.
*/
def buildJavaOpts(command: Command, memory: Int, sparkHome: String): Seq[String] = {
val libraryOpts = getEnv("SPARK_LIBRARY_PATH", command)
.map(p => List("-Djava.library.path=" + p))
.getOrElse(Nil)
val workerLocalOpts = Option(getenv("SPARK_JAVA_OPTS"))
.map(Utils.splitCommandString).getOrElse(Nil)
val userOpts = getEnv("SPARK_JAVA_OPTS", command).map(Utils.splitCommandString).getOrElse(Nil)
val memoryOpts = Seq(s"-Xms${memory}M", s"-Xmx${memory}M")
// Figure out our classpath with the external compute-classpath script
val ext = if (System.getProperty("os.name").startsWith("Windows")) ".cmd" else ".sh"
val classPath = Utils.executeAndGetOutput(
Seq(sparkHome + "/bin/compute-classpath" + ext),
extraEnvironment=command.environment)
Seq("-cp", classPath) ++ libraryOpts ++ workerLocalOpts ++ userOpts ++ memoryOpts
}
/** Spawn a thread that will redirect a given stream to a file */
def redirectStream(in: InputStream, file: File) {
val out = new FileOutputStream(file, true)
// TODO: It would be nice to add a shutdown hook here that explains why the output is
// terminating. Otherwise if the worker dies the executor logs will silently stop.
new Thread("redirect output to " + file) {
override def run() {
try {
Utils.copyStream(in, out, true)
} catch {
case e: IOException =>
logInfo("Redirection to " + file + " closed: " + e.getMessage)
}
}
}.start()
}
}
| apache-2.0 |
welterde/ewok | com/planet_ink/coffee_mud/Commands/NoANSI.java | 2368 | package com.planet_ink.coffee_mud.Commands;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2010 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("unchecked")
public class NoANSI extends StdCommand
{
public NoANSI(){}
private String[] access={"NOANSI","NOCOLOR","NOCOLOUR"};
public String[] getAccessWords(){return access;}
public boolean execute(MOB mob, Vector commands, int metaFlags)
throws java.io.IOException
{
if(!mob.isMonster())
{
PlayerAccount acct = null;
if(mob.playerStats()!=null)
acct = mob.playerStats().getAccount();
if(acct != null) acct.setFlag(PlayerAccount.FLAG_ANSI, false);
if(CMath.bset(mob.getBitmap(),MOB.ATT_ANSI))
{
mob.setBitmap(CMath.unsetb(mob.getBitmap(),MOB.ATT_ANSI));
mob.tell("ANSI colour disabled.\n\r");
}
else
{
mob.tell("ANSI is already disabled.\n\r");
}
mob.session().setClientTelnetMode(Session.TELNET_ANSI,false);
mob.session().setServerTelnetMode(Session.TELNET_ANSI,false);
}
return false;
}
public boolean canBeOrdered(){return true;}
}
| apache-2.0 |
mintzhao/yoyo | pkg/conf/conf_test.go | 3012 | // Copyright © 2017 mintzhao <mint.zhao.chiu@gmail.com>
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package conf
import (
"os"
"path/filepath"
"testing"
"time"
"github.com/spf13/viper"
)
func TestInitConfig(t *testing.T) {
defer viper.Reset()
if err := InitConfig(""); err == nil {
t.Fail()
}
tmpFilepath := filepath.Join(os.TempDir(), "yoyo_pkg_conf.yml")
_, err := os.Create(tmpFilepath)
defer os.Remove(tmpFilepath)
if err != nil {
t.Error(err)
}
if err := InitConfig(tmpFilepath); err != nil {
t.Error(err)
}
}
func TestGet(t *testing.T) {
defer viper.Reset()
viper.Set("conf", "string")
if Get("conf") == nil {
t.Fail()
}
if Get("conf1") != nil {
t.Fail()
}
}
func TestGetString(t *testing.T) {
defer viper.Reset()
viper.Set("conf", "string")
val := GetString("conf")
if val == "" {
t.Fail()
}
if val != "string" {
t.Fail()
}
}
func TestGetBool(t *testing.T) {
defer viper.Reset()
viper.Set("conf", true)
if GetBool("conf") == false {
t.Fail()
}
}
func TestGetInt(t *testing.T) {
defer viper.Reset()
viper.Set("conf", int(1))
if GetInt("conf") != int(1) {
t.Fail()
}
}
func TestGetInt64(t *testing.T) {
defer viper.Reset()
viper.Set("conf", int64(1))
if GetInt64("conf") != int64(1) {
t.Fail()
}
}
func TestGetFloat64(t *testing.T) {
defer viper.Reset()
viper.Set("conf", 1.0)
if GetFloat64("conf") != float64(1.0) {
t.Fail()
}
}
func TestGetTime(t *testing.T) {
defer viper.Reset()
now := time.Now()
viper.Set("conf", now)
if GetTime("conf").Sub(now) != time.Duration(0) {
t.Fail()
}
}
func TestGetDuration(t *testing.T) {
defer viper.Reset()
viper.Set("conf", "10s")
if GetDuration("conf") != time.Duration(10)*time.Second {
t.Fail()
}
}
func TestGetStringSlice(t *testing.T) {
defer viper.Reset()
viper.Set("conf", []string{"string1", "string2"})
if ss := GetStringSlice("conf"); ss == nil || ss[0] != "string1" {
t.Fail()
}
}
func TestGetStringMap(t *testing.T) {
defer viper.Reset()
viper.Set("conf", map[string]interface{}{"map1": "map1"})
if GetStringMap("conf") == nil {
t.Fail()
}
}
func TestGetStringMapString(t *testing.T) {
defer viper.Reset()
viper.Set("conf", map[string]string{"map1": "map1"})
if m := GetStringMapString("conf"); m == nil || m["map1"] != "map1" {
t.Fail()
}
}
func TestGetStringMapStringSlice(t *testing.T) {
defer viper.Reset()
viper.Set("conf", map[string][]string{})
if GetStringMapString("conf") == nil {
t.Fail()
}
}
| apache-2.0 |
hirayama968/xx | Source/Scene/Expression.js | 81842 | define([
'../Core/Cartesian2',
'../Core/Cartesian3',
'../Core/Cartesian4',
'../Core/Check',
'../Core/Color',
'../Core/defined',
'../Core/defineProperties',
'../Core/DeveloperError',
'../Core/isArray',
'../Core/Math',
'../Core/RuntimeError',
'../ThirdParty/jsep',
'./ExpressionNodeType'
], function(
Cartesian2,
Cartesian3,
Cartesian4,
Check,
Color,
defined,
defineProperties,
DeveloperError,
isArray,
CesiumMath,
RuntimeError,
jsep,
ExpressionNodeType) {
'use strict';
/**
* An expression for a style applied to a {@link Cesium3DTileset}.
* <p>
* Evaluates an expression defined using the
* {@link https://github.com/AnalyticalGraphicsInc/3d-tiles/tree/master/Styling|3D Tiles Styling language}.
* </p>
* <p>
* Implements the {@link StyleExpression} interface.
* </p>
*
* @alias Expression
* @constructor
*
* @param {String} [expression] The expression defined using the 3D Tiles Styling language.
* @param {Object} [defines] Defines in the style.
*
* @example
* var expression = new Cesium.Expression('(regExp("^Chest").test(${County})) && (${YearBuilt} >= 1970)');
* expression.evaluate(frameState, feature); // returns true or false depending on the feature's properties
*
* @example
* var expression = new Cesium.Expression('(${Temperature} > 90) ? color("red") : color("white")');
* expression.evaluateColor(frameState, feature, result); // returns a Cesium.Color object
*/
function Expression(expression, defines) {
//>>includeStart('debug', pragmas.debug);
Check.typeOf.string('expression', expression);
//>>includeEnd('debug');
this._expression = expression;
expression = replaceDefines(expression, defines);
expression = replaceVariables(removeBackslashes(expression));
// customize jsep operators
jsep.addBinaryOp('=~', 0);
jsep.addBinaryOp('!~', 0);
var ast;
try {
ast = jsep(expression);
} catch (e) {
throw new RuntimeError(e);
}
this._runtimeAst = createRuntimeAst(this, ast);
}
defineProperties(Expression.prototype, {
/**
* Gets the expression defined in the 3D Tiles Styling language.
*
* @memberof Expression.prototype
*
* @type {String}
* @readonly
*
* @default undefined
*/
expression : {
get : function() {
return this._expression;
}
}
});
// Scratch storage manager while evaluating deep expressions.
// For example, an expression like dot(vec4(${red}), vec4(${green}) * vec4(${blue}) requires 3 scratch Cartesian4's
var scratchStorage = {
arrayIndex : 0,
arrayArray : [[]],
cartesian2Index : 0,
cartesian3Index : 0,
cartesian4Index : 0,
cartesian2Array : [new Cartesian2()],
cartesian3Array : [new Cartesian3()],
cartesian4Array : [new Cartesian4()],
reset : function() {
this.arrayIndex = 0;
this.cartesian2Index = 0;
this.cartesian3Index = 0;
this.cartesian4Index = 0;
},
getArray : function() {
if (this.arrayIndex >= this.arrayArray.length) {
this.arrayArray.push([]);
}
var array = this.arrayArray[this.arrayIndex++];
array.length = 0;
return array;
},
getCartesian2 : function() {
if (this.cartesian2Index >= this.cartesian2Array.length) {
this.cartesian2Array.push(new Cartesian2());
}
return this.cartesian2Array[this.cartesian2Index++];
},
getCartesian3 : function() {
if (this.cartesian3Index >= this.cartesian3Array.length) {
this.cartesian3Array.push(new Cartesian3());
}
return this.cartesian3Array[this.cartesian3Index++];
},
getCartesian4 : function() {
if (this.cartesian4Index >= this.cartesian4Array.length) {
this.cartesian4Array.push(new Cartesian4());
}
return this.cartesian4Array[this.cartesian4Index++];
}
};
/**
* Evaluates the result of an expression, optionally using the provided feature's properties. If the result of
* the expression in the
* {@link https://github.com/AnalyticalGraphicsInc/3d-tiles/tree/master/Styling|3D Tiles Styling language}
* is of type <code>Boolean</code>, <code>Number</code>, or <code>String</code>, the corresponding JavaScript
* primitive type will be returned. If the result is a <code>RegExp</code>, a Javascript <code>RegExp</code>
* object will be returned. If the result is a <code>Cartesian2</code>, <code>Cartesian3</code>, or <code>Cartesian4</code>,
* a {@link Cartesian2}, {@link Cartesian3}, or {@link Cartesian4} object will be returned. If the <code>result</code> argument is
* a {@link Color}, the {@link Cartesian4} value is converted to a {@link Color} and then returned.
*
* @param {FrameState} frameState The frame state.
* @param {Cesium3DTileFeature} feature The feature whose properties may be used as variables in the expression.
* @param {Object} [result] The object onto which to store the result.
* @returns {Boolean|Number|String|RegExp|Cartesian2|Cartesian3|Cartesian4|Color} The result of evaluating the expression.
*/
Expression.prototype.evaluate = function(frameState, feature, result) {
scratchStorage.reset();
var value = this._runtimeAst.evaluate(frameState, feature);
if ((result instanceof Color) && (value instanceof Cartesian4)) {
return Color.fromCartesian4(value, result);
}
if ((value instanceof Cartesian2) || (value instanceof Cartesian3) || (value instanceof Cartesian4)) {
return value.clone(result);
}
return value;
};
/**
* Evaluates the result of a Color expression, optionally using the provided feature's properties.
* <p>
* This is equivalent to {@link Expression#evaluate} but always returns a {@link Color} object.
* </p>
*
* @param {FrameState} frameState The frame state.
* @param {Cesium3DTileFeature} feature The feature whose properties may be used as variables in the expression.
* @param {Color} [result] The object in which to store the result
* @returns {Color} The modified result parameter or a new Color instance if one was not provided.
*/
Expression.prototype.evaluateColor = function(frameState, feature, result) {
scratchStorage.reset();
var color = this._runtimeAst.evaluate(frameState, feature);
return Color.fromCartesian4(color, result);
};
/**
* Gets the shader function for this expression.
* Returns undefined if the shader function can't be generated from this expression.
*
* @param {String} functionName Name to give to the generated function.
* @param {String} attributePrefix Prefix that is added to any variable names to access vertex attributes.
* @param {Object} shaderState Stores information about the generated shader function, including whether it is translucent.
* @param {String} returnType The return type of the generated function.
*
* @returns {String} The shader function.
*
* @private
*/
Expression.prototype.getShaderFunction = function(functionName, attributePrefix, shaderState, returnType) {
var shaderExpression = this.getShaderExpression(attributePrefix, shaderState);
shaderExpression = returnType + ' ' + functionName + '() \n' +
'{ \n' +
' return ' + shaderExpression + '; \n' +
'} \n';
return shaderExpression;
};
/**
* Gets the shader expression for this expression.
* Returns undefined if the shader expression can't be generated from this expression.
*
* @param {String} attributePrefix Prefix that is added to any variable names to access vertex attributes.
* @param {Object} shaderState Stores information about the generated shader function, including whether it is translucent.
*
* @returns {String} The shader expression.
*
* @private
*/
Expression.prototype.getShaderExpression = function(attributePrefix, shaderState) {
return this._runtimeAst.getShaderExpression(attributePrefix, shaderState);
};
var unaryOperators = ['!', '-', '+'];
var binaryOperators = ['+', '-', '*', '/', '%', '===', '!==', '>', '>=', '<', '<=', '&&', '||', '!~', '=~'];
var variableRegex = /\${(.*?)}/g; // Matches ${variable_name}
var backslashRegex = /\\/g;
var backslashReplacement = '@#%';
var replacementRegex = /@#%/g;
var scratchColor = new Color();
var unaryFunctions = {
abs : getEvaluateUnaryComponentwise(Math.abs),
sqrt : getEvaluateUnaryComponentwise(Math.sqrt),
cos : getEvaluateUnaryComponentwise(Math.cos),
sin : getEvaluateUnaryComponentwise(Math.sin),
tan : getEvaluateUnaryComponentwise(Math.tan),
acos : getEvaluateUnaryComponentwise(Math.acos),
asin : getEvaluateUnaryComponentwise(Math.asin),
atan : getEvaluateUnaryComponentwise(Math.atan),
radians : getEvaluateUnaryComponentwise(CesiumMath.toRadians),
degrees : getEvaluateUnaryComponentwise(CesiumMath.toDegrees),
sign : getEvaluateUnaryComponentwise(CesiumMath.sign),
floor : getEvaluateUnaryComponentwise(Math.floor),
ceil : getEvaluateUnaryComponentwise(Math.ceil),
round : getEvaluateUnaryComponentwise(Math.round),
exp : getEvaluateUnaryComponentwise(Math.exp),
exp2 : getEvaluateUnaryComponentwise(exp2),
log : getEvaluateUnaryComponentwise(Math.log),
log2 : getEvaluateUnaryComponentwise(log2),
fract : getEvaluateUnaryComponentwise(fract),
length : length,
normalize: normalize
};
var binaryFunctions = {
atan2 : getEvaluateBinaryCommponentwise(Math.atan2, false),
pow : getEvaluateBinaryCommponentwise(Math.pow, false),
min : getEvaluateBinaryCommponentwise(Math.min, true),
max : getEvaluateBinaryCommponentwise(Math.max, true),
distance : distance,
dot : dot,
cross : cross
};
var ternaryFunctions = {
clamp : getEvaluateTernaryCommponentwise(CesiumMath.clamp, true),
mix : getEvaluateTernaryCommponentwise(CesiumMath.lerp, true)
};
function fract(number) {
return number - Math.floor(number);
}
function exp2(exponent) {
return Math.pow(2.0,exponent);
}
function log2(number) {
return CesiumMath.log2(number);
}
function getEvaluateUnaryComponentwise(operation) {
return function(call, left) {
if (typeof left === 'number') {
return operation(left);
} else if (left instanceof Cartesian2) {
return Cartesian2.fromElements(operation(left.x), operation(left.y), scratchStorage.getCartesian2());
} else if (left instanceof Cartesian3) {
return Cartesian3.fromElements(operation(left.x), operation(left.y), operation(left.z), scratchStorage.getCartesian3());
} else if (left instanceof Cartesian4) {
return Cartesian4.fromElements(operation(left.x), operation(left.y), operation(left.z), operation(left.w), scratchStorage.getCartesian4());
}
throw new RuntimeError('Function "' + call + '" requires a vector or number argument. Argument is ' + left + '.');
};
}
function getEvaluateBinaryCommponentwise(operation, allowScalar) {
return function(call, left, right) {
if (allowScalar && typeof right === 'number') {
if (typeof left === 'number') {
return operation(left, right);
} else if (left instanceof Cartesian2) {
return Cartesian2.fromElements(operation(left.x, right), operation(left.y, right), scratchStorage.getCartesian2());
} else if (left instanceof Cartesian3) {
return Cartesian3.fromElements(operation(left.x, right), operation(left.y, right), operation(left.z, right), scratchStorage.getCartesian3());
} else if (left instanceof Cartesian4) {
return Cartesian4.fromElements(operation(left.x, right), operation(left.y, right), operation(left.z, right), operation(left.w, right), scratchStorage.getCartesian4());
}
}
if (typeof left === 'number' && typeof right === 'number') {
return operation(left, right);
} else if (left instanceof Cartesian2 && right instanceof Cartesian2) {
return Cartesian2.fromElements(operation(left.x, right.x), operation(left.y, right.y), scratchStorage.getCartesian2());
} else if (left instanceof Cartesian3 && right instanceof Cartesian3) {
return Cartesian3.fromElements(operation(left.x, right.x), operation(left.y, right.y), operation(left.z, right.z), scratchStorage.getCartesian3());
} else if (left instanceof Cartesian4 && right instanceof Cartesian4) {
return Cartesian4.fromElements(operation(left.x, right.x), operation(left.y, right.y), operation(left.z, right.z), operation(left.w, right.w), scratchStorage.getCartesian4());
}
throw new RuntimeError('Function "' + call + '" requires vector or number arguments of matching types. Arguments are ' + left + ' and ' + right + '.');
};
}
function getEvaluateTernaryCommponentwise(operation, allowScalar) {
return function(call, left, right, test) {
if (allowScalar && typeof test === 'number') {
if (typeof left === 'number' && typeof right === 'number') {
return operation(left, right, test);
} else if (left instanceof Cartesian2 && right instanceof Cartesian2) {
return Cartesian2.fromElements(operation(left.x, right.x, test), operation(left.y, right.y, test), scratchStorage.getCartesian2());
} else if (left instanceof Cartesian3 && right instanceof Cartesian3) {
return Cartesian3.fromElements(operation(left.x, right.x, test), operation(left.y, right.y, test), operation(left.z, right.z, test), scratchStorage.getCartesian3());
} else if (left instanceof Cartesian4 && right instanceof Cartesian4) {
return Cartesian4.fromElements(operation(left.x, right.x, test), operation(left.y, right.y, test), operation(left.z, right.z, test), operation(left.w, right.w, test), scratchStorage.getCartesian4());
}
}
if (typeof left === 'number' && typeof right === 'number' && typeof test === 'number') {
return operation(left, right, test);
} else if (left instanceof Cartesian2 && right instanceof Cartesian2 && test instanceof Cartesian2) {
return Cartesian2.fromElements(operation(left.x, right.x, test.x), operation(left.y, right.y, test.y), scratchStorage.getCartesian2());
} else if (left instanceof Cartesian3 && right instanceof Cartesian3 && test instanceof Cartesian3) {
return Cartesian3.fromElements(operation(left.x, right.x, test.x), operation(left.y, right.y, test.y), operation(left.z, right.z, test.z), scratchStorage.getCartesian3());
} else if (left instanceof Cartesian4 && right instanceof Cartesian4 && test instanceof Cartesian4) {
return Cartesian4.fromElements(operation(left.x, right.x, test.x), operation(left.y, right.y, test.y), operation(left.z, right.z, test.z), operation(left.w, right.w, test.w), scratchStorage.getCartesian4());
}
throw new RuntimeError('Function "' + call + '" requires vector or number arguments of matching types. Arguments are ' + left + ', ' + right + ', and ' + test + '.');
};
}
function length(call, left) {
if (typeof left === 'number') {
return Math.abs(left);
} else if (left instanceof Cartesian2) {
return Cartesian2.magnitude(left);
} else if (left instanceof Cartesian3) {
return Cartesian3.magnitude(left);
} else if (left instanceof Cartesian4) {
return Cartesian4.magnitude(left);
}
throw new RuntimeError('Function "' + call + '" requires a vector or number argument. Argument is ' + left + '.');
}
function normalize(call, left) {
if (typeof left === 'number') {
return 1.0;
} else if (left instanceof Cartesian2) {
return Cartesian2.normalize(left, scratchStorage.getCartesian2());
} else if (left instanceof Cartesian3) {
return Cartesian3.normalize(left, scratchStorage.getCartesian3());
} else if (left instanceof Cartesian4) {
return Cartesian4.normalize(left, scratchStorage.getCartesian4());
}
throw new RuntimeError('Function "' + call + '" requires a vector or number argument. Argument is ' + left + '.');
}
function distance(call, left, right) {
if (typeof left === 'number' && typeof right === 'number') {
return Math.abs(left - right);
} else if (left instanceof Cartesian2 && right instanceof Cartesian2) {
return Cartesian2.distance(left, right);
} else if (left instanceof Cartesian3 && right instanceof Cartesian3) {
return Cartesian3.distance(left, right);
} else if (left instanceof Cartesian4 && right instanceof Cartesian4) {
return Cartesian4.distance(left, right);
}
throw new RuntimeError('Function "' + call + '" requires vector or number arguments of matching types. Arguments are ' + left + ' and ' + right + '.');
}
function dot(call, left, right) {
if (typeof left === 'number' && typeof right === 'number') {
return left * right;
} else if (left instanceof Cartesian2 && right instanceof Cartesian2) {
return Cartesian2.dot(left, right);
} else if (left instanceof Cartesian3 && right instanceof Cartesian3) {
return Cartesian3.dot(left, right);
} else if (left instanceof Cartesian4 && right instanceof Cartesian4) {
return Cartesian4.dot(left, right);
}
throw new RuntimeError('Function "' + call + '" requires vector or number arguments of matching types. Arguments are ' + left + ' and ' + right + '.');
}
function cross(call, left, right) {
if (left instanceof Cartesian3 && right instanceof Cartesian3) {
return Cartesian3.cross(left, right, scratchStorage.getCartesian3());
}
throw new RuntimeError('Function "' + call + '" requires vec3 arguments. Arguments are ' + left + ' and ' + right + '.');
}
function Node(type, value, left, right, test) {
this._type = type;
this._value = value;
this._left = left;
this._right = right;
this._test = test;
this.evaluate = undefined;
setEvaluateFunction(this);
}
function replaceDefines(expression, defines) {
if (!defined(defines)) {
return expression;
}
for (var key in defines) {
if (defines.hasOwnProperty(key)) {
var definePlaceholder = new RegExp('\\$\\{' + key + '\\}', 'g');
var defineReplace = '(' + defines[key] + ')';
if (defined(defineReplace)) {
expression = expression.replace(definePlaceholder, defineReplace);
}
}
}
return expression;
}
function removeBackslashes(expression) {
return expression.replace(backslashRegex, backslashReplacement);
}
function replaceBackslashes(expression) {
return expression.replace(replacementRegex, '\\');
}
function replaceVariables(expression) {
var exp = expression;
var result = '';
var i = exp.indexOf('${');
while (i >= 0) {
// Check if string is inside quotes
var openSingleQuote = exp.indexOf('\'');
var openDoubleQuote = exp.indexOf('"');
var closeQuote;
if (openSingleQuote >= 0 && openSingleQuote < i) {
closeQuote = exp.indexOf('\'', openSingleQuote + 1);
result += exp.substr(0, closeQuote + 1);
exp = exp.substr(closeQuote + 1);
i = exp.indexOf('${');
} else if (openDoubleQuote >= 0 && openDoubleQuote < i) {
closeQuote = exp.indexOf('"', openDoubleQuote + 1);
result += exp.substr(0, closeQuote + 1);
exp = exp.substr(closeQuote + 1);
i = exp.indexOf('${');
} else {
result += exp.substr(0, i);
var j = exp.indexOf('}');
if (j < 0) {
throw new RuntimeError('Unmatched {.');
}
result += 'czm_' + exp.substr(i + 2, j - (i + 2));
exp = exp.substr(j + 1);
i = exp.indexOf('${');
}
}
result += exp;
return result;
}
function parseLiteral(ast) {
var type = typeof ast.value;
if (ast.value === null) {
return new Node(ExpressionNodeType.LITERAL_NULL, null);
} else if (type === 'boolean') {
return new Node(ExpressionNodeType.LITERAL_BOOLEAN, ast.value);
} else if (type === 'number') {
return new Node(ExpressionNodeType.LITERAL_NUMBER, ast.value);
} else if (type === 'string') {
if (ast.value.indexOf('${') >= 0) {
return new Node(ExpressionNodeType.VARIABLE_IN_STRING, ast.value);
}
return new Node(ExpressionNodeType.LITERAL_STRING, replaceBackslashes(ast.value));
}
}
function parseCall(expression, ast) {
var args = ast.arguments;
var argsLength = args.length;
var call;
var val, left, right;
// Member function calls
if (ast.callee.type === 'MemberExpression') {
call = ast.callee.property.name;
var object = ast.callee.object;
if (call === 'test' || call === 'exec') {
// Make sure this is called on a valid type
if (object.callee.name !== 'regExp') {
throw new RuntimeError(call + ' is not a function.');
}
if (argsLength === 0) {
if (call === 'test') {
return new Node(ExpressionNodeType.LITERAL_BOOLEAN, false);
}
return new Node(ExpressionNodeType.LITERAL_NULL, null);
}
left = createRuntimeAst(expression, object);
right = createRuntimeAst(expression, args[0]);
return new Node(ExpressionNodeType.FUNCTION_CALL, call, left, right);
} else if (call === 'toString') {
val = createRuntimeAst(expression, object);
return new Node(ExpressionNodeType.FUNCTION_CALL, call, val);
}
throw new RuntimeError('Unexpected function call "' + call + '".');
}
// Non-member function calls
call = ast.callee.name;
if (call === 'color') {
if (argsLength === 0) {
return new Node(ExpressionNodeType.LITERAL_COLOR, call);
}
val = createRuntimeAst(expression, args[0]);
if (defined(args[1])) {
var alpha = createRuntimeAst(expression, args[1]);
return new Node(ExpressionNodeType.LITERAL_COLOR, call, [val, alpha]);
}
return new Node(ExpressionNodeType.LITERAL_COLOR, call, [val]);
} else if (call === 'rgb' || call === 'hsl') {
if (argsLength < 3) {
throw new RuntimeError(call + ' requires three arguments.');
}
val = [
createRuntimeAst(expression, args[0]),
createRuntimeAst(expression, args[1]),
createRuntimeAst(expression, args[2])
];
return new Node(ExpressionNodeType.LITERAL_COLOR, call, val);
} else if (call === 'rgba' || call === 'hsla') {
if (argsLength < 4) {
throw new RuntimeError(call + ' requires four arguments.');
}
val = [
createRuntimeAst(expression, args[0]),
createRuntimeAst(expression, args[1]),
createRuntimeAst(expression, args[2]),
createRuntimeAst(expression, args[3])
];
return new Node(ExpressionNodeType.LITERAL_COLOR, call, val);
} else if (call === 'vec2' || call === 'vec3' || call === 'vec4') {
// Check for invalid constructors at evaluation time
val = new Array(argsLength);
for (var i = 0; i < argsLength; ++i) {
val[i] = createRuntimeAst(expression, args[i]);
}
return new Node(ExpressionNodeType.LITERAL_VECTOR, call, val);
} else if (call === 'isNaN' || call === 'isFinite') {
if (argsLength === 0) {
if (call === 'isNaN') {
return new Node(ExpressionNodeType.LITERAL_BOOLEAN, true);
}
return new Node(ExpressionNodeType.LITERAL_BOOLEAN, false);
}
val = createRuntimeAst(expression, args[0]);
return new Node(ExpressionNodeType.UNARY, call, val);
} else if (call === 'isExactClass' || call === 'isClass') {
if (argsLength < 1 || argsLength > 1) {
throw new RuntimeError(call + ' requires exactly one argument.');
}
val = createRuntimeAst(expression, args[0]);
return new Node(ExpressionNodeType.UNARY, call, val);
} else if (call === 'getExactClassName') {
if (argsLength > 0) {
throw new RuntimeError(call + ' does not take any argument.');
}
return new Node(ExpressionNodeType.UNARY, call);
} else if (defined(unaryFunctions[call])) {
if (argsLength !== 1) {
throw new RuntimeError(call + ' requires exactly one argument.');
}
val = createRuntimeAst(expression, args[0]);
return new Node(ExpressionNodeType.UNARY, call, val);
} else if (defined(binaryFunctions[call])) {
if (argsLength !== 2) {
throw new RuntimeError(call + ' requires exactly two arguments.');
}
left = createRuntimeAst(expression, args[0]);
right = createRuntimeAst(expression, args[1]);
return new Node(ExpressionNodeType.BINARY, call, left, right);
} else if (defined(ternaryFunctions[call])) {
if (argsLength !== 3) {
throw new RuntimeError(call + ' requires exactly three arguments.');
}
left = createRuntimeAst(expression, args[0]);
right = createRuntimeAst(expression, args[1]);
var test = createRuntimeAst(expression, args[2]);
return new Node(ExpressionNodeType.TERNARY, call, left, right, test);
} else if (call === 'Boolean') {
if (argsLength === 0) {
return new Node(ExpressionNodeType.LITERAL_BOOLEAN, false);
}
val = createRuntimeAst(expression, args[0]);
return new Node(ExpressionNodeType.UNARY, call, val);
} else if (call === 'Number') {
if (argsLength === 0) {
return new Node(ExpressionNodeType.LITERAL_NUMBER, 0);
}
val = createRuntimeAst(expression, args[0]);
return new Node(ExpressionNodeType.UNARY, call, val);
} else if (call === 'String') {
if (argsLength === 0) {
return new Node(ExpressionNodeType.LITERAL_STRING, '');
}
val = createRuntimeAst(expression, args[0]);
return new Node(ExpressionNodeType.UNARY, call, val);
} else if (call === 'regExp') {
return parseRegex(expression, ast);
}
throw new RuntimeError('Unexpected function call "' + call + '".');
}
function parseRegex(expression, ast) {
var args = ast.arguments;
// no arguments, return default regex
if (args.length === 0) {
return new Node(ExpressionNodeType.LITERAL_REGEX, new RegExp());
}
var pattern = createRuntimeAst(expression, args[0]);
var exp;
// optional flag argument supplied
if (args.length > 1) {
var flags = createRuntimeAst(expression, args[1]);
if (isLiteralType(pattern) && isLiteralType(flags)) {
try {
exp = new RegExp(replaceBackslashes(String(pattern._value)), flags._value);
} catch (e) {
throw new RuntimeError(e);
}
return new Node(ExpressionNodeType.LITERAL_REGEX, exp);
}
return new Node(ExpressionNodeType.REGEX, pattern, flags);
}
// only pattern argument supplied
if (isLiteralType(pattern)) {
try {
exp = new RegExp(replaceBackslashes(String(pattern._value)));
} catch (e) {
throw new RuntimeError(e);
}
return new Node(ExpressionNodeType.LITERAL_REGEX, exp);
}
return new Node(ExpressionNodeType.REGEX, pattern);
}
function parseKeywordsAndVariables(ast) {
if (isVariable(ast.name)) {
var name = getPropertyName(ast.name);
if (name.substr(0, 8) === 'tiles3d_') {
return new Node(ExpressionNodeType.BUILTIN_VARIABLE, name);
}
return new Node(ExpressionNodeType.VARIABLE, name);
} else if (ast.name === 'NaN') {
return new Node(ExpressionNodeType.LITERAL_NUMBER, NaN);
} else if (ast.name === 'Infinity') {
return new Node(ExpressionNodeType.LITERAL_NUMBER, Infinity);
} else if (ast.name === 'undefined') {
return new Node(ExpressionNodeType.LITERAL_UNDEFINED, undefined);
}
throw new RuntimeError(ast.name + ' is not defined.');
}
function parseMathConstant(ast) {
var name = ast.property.name;
if (name === 'PI') {
return new Node(ExpressionNodeType.LITERAL_NUMBER, Math.PI);
} else if (name === 'E') {
return new Node(ExpressionNodeType.LITERAL_NUMBER, Math.E);
}
}
function parseNumberConstant(ast) {
var name = ast.property.name;
if (name === 'POSITIVE_INFINITY') {
return new Node(ExpressionNodeType.LITERAL_NUMBER, Number.POSITIVE_INFINITY);
}
}
function parseMemberExpression(expression, ast) {
if (ast.object.name === 'Math') {
return parseMathConstant(ast);
} else if (ast.object.name === 'Number') {
return parseNumberConstant(ast);
}
var val;
var obj = createRuntimeAst(expression, ast.object);
if (ast.computed) {
val = createRuntimeAst(expression, ast.property);
return new Node(ExpressionNodeType.MEMBER, 'brackets', obj, val);
}
val = new Node(ExpressionNodeType.LITERAL_STRING, ast.property.name);
return new Node(ExpressionNodeType.MEMBER, 'dot', obj, val);
}
function isLiteralType(node) {
return (node._type >= ExpressionNodeType.LITERAL_NULL);
}
function isVariable(name) {
return (name.substr(0, 4) === 'czm_');
}
function getPropertyName(variable) {
return variable.substr(4);
}
function createRuntimeAst(expression, ast) {
var node;
var op;
var left;
var right;
if (ast.type === 'Literal') {
node = parseLiteral(ast);
} else if (ast.type === 'CallExpression') {
node = parseCall(expression, ast);
} else if (ast.type === 'Identifier') {
node = parseKeywordsAndVariables(ast);
} else if (ast.type === 'UnaryExpression') {
op = ast.operator;
var child = createRuntimeAst(expression, ast.argument);
if (unaryOperators.indexOf(op) > -1) {
node = new Node(ExpressionNodeType.UNARY, op, child);
} else {
throw new RuntimeError('Unexpected operator "' + op + '".');
}
} else if (ast.type === 'BinaryExpression') {
op = ast.operator;
left = createRuntimeAst(expression, ast.left);
right = createRuntimeAst(expression, ast.right);
if (binaryOperators.indexOf(op) > -1) {
node = new Node(ExpressionNodeType.BINARY, op, left, right);
} else {
throw new RuntimeError('Unexpected operator "' + op + '".');
}
} else if (ast.type === 'LogicalExpression') {
op = ast.operator;
left = createRuntimeAst(expression, ast.left);
right = createRuntimeAst(expression, ast.right);
if (binaryOperators.indexOf(op) > -1) {
node = new Node(ExpressionNodeType.BINARY, op, left, right);
}
} else if (ast.type === 'ConditionalExpression') {
var test = createRuntimeAst(expression, ast.test);
left = createRuntimeAst(expression, ast.consequent);
right = createRuntimeAst(expression, ast.alternate);
node = new Node(ExpressionNodeType.CONDITIONAL, '?', left, right, test);
} else if (ast.type === 'MemberExpression') {
node = parseMemberExpression(expression, ast);
} else if (ast.type === 'ArrayExpression') {
var val = [];
for (var i = 0; i < ast.elements.length; i++) {
val[i] = createRuntimeAst(expression, ast.elements[i]);
}
node = new Node(ExpressionNodeType.ARRAY, val);
} else if (ast.type === 'Compound') {
// empty expression or multiple expressions
throw new RuntimeError('Provide exactly one expression.');
} else {
throw new RuntimeError('Cannot parse expression.');
}
return node;
}
function setEvaluateFunction(node) {
if (node._type === ExpressionNodeType.CONDITIONAL) {
node.evaluate = node._evaluateConditional;
} else if (node._type === ExpressionNodeType.FUNCTION_CALL) {
if (node._value === 'test') {
node.evaluate = node._evaluateRegExpTest;
} else if (node._value === 'exec') {
node.evaluate = node._evaluateRegExpExec;
} else if (node._value === 'toString') {
node.evaluate = node._evaluateToString;
}
} else if (node._type === ExpressionNodeType.UNARY) {
if (node._value === '!') {
node.evaluate = node._evaluateNot;
} else if (node._value === '-') {
node.evaluate = node._evaluateNegative;
} else if (node._value === '+') {
node.evaluate = node._evaluatePositive;
} else if (node._value === 'isNaN') {
node.evaluate = node._evaluateNaN;
} else if (node._value === 'isFinite') {
node.evaluate = node._evaluateIsFinite;
} else if (node._value === 'isExactClass') {
node.evaluate = node._evaluateIsExactClass;
} else if (node._value === 'isClass') {
node.evaluate = node._evaluateIsClass;
} else if (node._value === 'getExactClassName') {
node.evaluate = node._evaluategetExactClassName;
} else if (node._value === 'Boolean') {
node.evaluate = node._evaluateBooleanConversion;
} else if (node._value === 'Number') {
node.evaluate = node._evaluateNumberConversion;
} else if (node._value === 'String') {
node.evaluate = node._evaluateStringConversion;
} else if (defined(unaryFunctions[node._value])) {
node.evaluate = getEvaluateUnaryFunction(node._value);
}
} else if (node._type === ExpressionNodeType.BINARY) {
if (node._value === '+') {
node.evaluate = node._evaluatePlus;
} else if (node._value === '-') {
node.evaluate = node._evaluateMinus;
} else if (node._value === '*') {
node.evaluate = node._evaluateTimes;
} else if (node._value === '/') {
node.evaluate = node._evaluateDivide;
} else if (node._value === '%') {
node.evaluate = node._evaluateMod;
} else if (node._value === '===') {
node.evaluate = node._evaluateEqualsStrict;
} else if (node._value === '!==') {
node.evaluate = node._evaluateNotEqualsStrict;
} else if (node._value === '<') {
node.evaluate = node._evaluateLessThan;
} else if (node._value === '<=') {
node.evaluate = node._evaluateLessThanOrEquals;
} else if (node._value === '>') {
node.evaluate = node._evaluateGreaterThan;
} else if (node._value === '>=') {
node.evaluate = node._evaluateGreaterThanOrEquals;
} else if (node._value === '&&') {
node.evaluate = node._evaluateAnd;
} else if (node._value === '||') {
node.evaluate = node._evaluateOr;
} else if (node._value === '=~') {
node.evaluate = node._evaluateRegExpMatch;
} else if (node._value === '!~') {
node.evaluate = node._evaluateRegExpNotMatch;
} else if (defined(binaryFunctions[node._value])) {
node.evaluate = getEvaluateBinaryFunction(node._value);
}
} else if (node._type === ExpressionNodeType.TERNARY) {
node.evaluate = getEvaluateTernaryFunction(node._value);
} else if (node._type === ExpressionNodeType.MEMBER) {
if (node._value === 'brackets') {
node.evaluate = node._evaluateMemberBrackets;
} else {
node.evaluate = node._evaluateMemberDot;
}
} else if (node._type === ExpressionNodeType.ARRAY) {
node.evaluate = node._evaluateArray;
} else if (node._type === ExpressionNodeType.VARIABLE) {
node.evaluate = node._evaluateVariable;
} else if (node._type === ExpressionNodeType.VARIABLE_IN_STRING) {
node.evaluate = node._evaluateVariableString;
} else if (node._type === ExpressionNodeType.LITERAL_COLOR) {
node.evaluate = node._evaluateLiteralColor;
} else if (node._type === ExpressionNodeType.LITERAL_VECTOR) {
node.evaluate = node._evaluateLiteralVector;
} else if (node._type === ExpressionNodeType.LITERAL_STRING) {
node.evaluate = node._evaluateLiteralString;
} else if (node._type === ExpressionNodeType.REGEX) {
node.evaluate = node._evaluateRegExp;
} else if (node._type === ExpressionNodeType.BUILTIN_VARIABLE) {
if (node._value === 'tiles3d_tileset_time') {
node.evaluate = evaluateTilesetTime;
}
} else {
node.evaluate = node._evaluateLiteral;
}
}
function evaluateTilesetTime(frameState, feature) {
return feature.content.tileset.timeSinceLoad;
}
function getEvaluateUnaryFunction(call) {
var evaluate = unaryFunctions[call];
return function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
return evaluate(call, left);
};
}
function getEvaluateBinaryFunction(call) {
var evaluate = binaryFunctions[call];
return function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
return evaluate(call, left, right);
};
}
function getEvaluateTernaryFunction(call) {
var evaluate = ternaryFunctions[call];
return function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
var test = this._test.evaluate(frameState, feature);
return evaluate(call, left, right, test);
};
}
Node.prototype._evaluateLiteral = function(frameState, feature) {
return this._value;
};
Node.prototype._evaluateLiteralColor = function(frameState, feature) {
var color = scratchColor;
var args = this._left;
if (this._value === 'color') {
if (!defined(args)) {
Color.fromBytes(255, 255, 255, 255, color);
} else if (args.length > 1) {
Color.fromCssColorString(args[0].evaluate(frameState, feature), color);
color.alpha = args[1].evaluate(frameState, feature);
} else {
Color.fromCssColorString(args[0].evaluate(frameState, feature), color);
}
} else if (this._value === 'rgb') {
Color.fromBytes(
args[0].evaluate(frameState, feature),
args[1].evaluate(frameState, feature),
args[2].evaluate(frameState, feature),
255, color);
} else if (this._value === 'rgba') {
// convert between css alpha (0 to 1) and cesium alpha (0 to 255)
var a = args[3].evaluate(frameState, feature) * 255;
Color.fromBytes(
args[0].evaluate(frameState, feature),
args[1].evaluate(frameState, feature),
args[2].evaluate(frameState, feature),
a, color);
} else if (this._value === 'hsl') {
Color.fromHsl(
args[0].evaluate(frameState, feature),
args[1].evaluate(frameState, feature),
args[2].evaluate(frameState, feature),
1.0, color);
} else if (this._value === 'hsla') {
Color.fromHsl(
args[0].evaluate(frameState, feature),
args[1].evaluate(frameState, feature),
args[2].evaluate(frameState, feature),
args[3].evaluate(frameState, feature),
color);
}
return Cartesian4.fromColor(color, scratchStorage.getCartesian4());
};
Node.prototype._evaluateLiteralVector = function(frameState, feature) {
// Gather the components that make up the vector, which includes components from interior vectors.
// For example vec3(1, 2, 3) or vec3(vec2(1, 2), 3) are both valid.
//
// If the number of components does not equal the vector's size, then a RuntimeError is thrown - with two exceptions:
// 1. A vector may be constructed from a larger vector and drop the extra components.
// 2. A vector may be constructed from a single component - vec3(1) will become vec3(1, 1, 1).
//
// Examples of invalid constructors include:
// vec4(1, 2) // not enough components
// vec3(vec2(1, 2)) // not enough components
// vec3(1, 2, 3, 4) // too many components
// vec2(vec4(1), 1) // too many components
var components = scratchStorage.getArray();
var call = this._value;
var args = this._left;
var argsLength = args.length;
for (var i = 0; i < argsLength; ++i) {
var value = args[i].evaluate(frameState, feature);
if (typeof value === 'number') {
components.push(value);
} else if (value instanceof Cartesian2) {
components.push(value.x, value.y);
} else if (value instanceof Cartesian3) {
components.push(value.x, value.y, value.z);
} else if (value instanceof Cartesian4) {
components.push(value.x, value.y, value.z, value.w);
} else {
throw new RuntimeError(call + ' argument must be a vector or number. Argument is ' + value + '.');
}
}
var componentsLength = components.length;
var vectorLength = parseInt(call.charAt(3));
if (componentsLength === 0) {
throw new RuntimeError('Invalid ' + call + ' constructor. No valid arguments.');
} else if ((componentsLength < vectorLength) && (componentsLength > 1)) {
throw new RuntimeError('Invalid ' + call + ' constructor. Not enough arguments.');
} else if ((componentsLength > vectorLength) && (argsLength > 1)) {
throw new RuntimeError('Invalid ' + call + ' constructor. Too many arguments.');
}
if (componentsLength === 1) {
// Add the same component 3 more times
var component = components[0];
components.push(component, component, component);
}
if (call === 'vec2') {
return Cartesian2.fromArray(components, 0, scratchStorage.getCartesian2());
} else if (call === 'vec3') {
return Cartesian3.fromArray(components, 0, scratchStorage.getCartesian3());
} else if (call === 'vec4') {
return Cartesian4.fromArray(components, 0, scratchStorage.getCartesian4());
}
};
Node.prototype._evaluateLiteralString = function(frameState, feature) {
return this._value;
};
Node.prototype._evaluateVariableString = function(frameState, feature) {
var result = this._value;
var match = variableRegex.exec(result);
while (match !== null) {
var placeholder = match[0];
var variableName = match[1];
var property = feature.getProperty(variableName);
if (!defined(property)) {
property = '';
}
result = result.replace(placeholder, property);
match = variableRegex.exec(result);
}
return result;
};
Node.prototype._evaluateVariable = function(frameState, feature) {
// evaluates to undefined if the property name is not defined for that feature
return feature.getProperty(this._value);
};
function checkFeature (ast) {
return (ast._value === 'feature');
}
// PERFORMANCE_IDEA: Determine if parent property needs to be computed before runtime
Node.prototype._evaluateMemberDot = function(frameState, feature) {
if (checkFeature(this._left)) {
return feature.getProperty(this._right.evaluate(frameState, feature));
}
var property = this._left.evaluate(frameState, feature);
if (!defined(property)) {
return undefined;
}
var member = this._right.evaluate(frameState, feature);
if ((property instanceof Cartesian2) || (property instanceof Cartesian3) || (property instanceof Cartesian4)) {
// Vector components may be accessed with .r, .g, .b, .a and implicitly with .x, .y, .z, .w
if (member === 'r') {
return property.x;
} else if (member === 'g') {
return property.y;
} else if (member === 'b') {
return property.z;
} else if (member === 'a') {
return property.w;
}
}
return property[member];
};
Node.prototype._evaluateMemberBrackets = function(frameState, feature) {
if (checkFeature(this._left)) {
return feature.getProperty(this._right.evaluate(frameState, feature));
}
var property = this._left.evaluate(frameState, feature);
if (!defined(property)) {
return undefined;
}
var member = this._right.evaluate(frameState, feature);
if ((property instanceof Cartesian2) || (property instanceof Cartesian3) || (property instanceof Cartesian4)) {
// Vector components may be accessed with [0][1][2][3], ['r']['g']['b']['a'] and implicitly with ['x']['y']['z']['w']
// For Cartesian2 and Cartesian3 out-of-range components will just return undefined
if (member === 0 || member === 'r') {
return property.x;
} else if (member === 1 || member === 'g') {
return property.y;
} else if (member === 2 || member === 'b') {
return property.z;
} else if (member === 3 || member === 'a') {
return property.w;
}
}
return property[member];
};
Node.prototype._evaluateArray = function(frameState, feature) {
var array = [];
for (var i = 0; i < this._value.length; i++) {
array[i] = this._value[i].evaluate(frameState, feature);
}
return array;
};
// PERFORMANCE_IDEA: Have "fast path" functions that deal only with specific types
// that we can assign if we know the types before runtime
Node.prototype._evaluateNot = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
if (typeof left !== 'boolean') {
throw new RuntimeError('Operator "!" requires a boolean argument. Argument is ' + left + '.');
}
return !left;
};
Node.prototype._evaluateNegative = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
if (left instanceof Cartesian2) {
return Cartesian2.negate(left, scratchStorage.getCartesian2());
} else if (left instanceof Cartesian3) {
return Cartesian3.negate(left, scratchStorage.getCartesian3());
} else if (left instanceof Cartesian4) {
return Cartesian4.negate(left, scratchStorage.getCartesian4());
} else if (typeof left === 'number') {
return -left;
}
throw new RuntimeError('Operator "-" requires a vector or number argument. Argument is ' + left + '.');
};
Node.prototype._evaluatePositive = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
if (!((left instanceof Cartesian2) || (left instanceof Cartesian3) || (left instanceof Cartesian4) || (typeof left === 'number'))) {
throw new RuntimeError('Operator "+" requires a vector or number argument. Argument is ' + left + '.');
}
return left;
};
Node.prototype._evaluateLessThan = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((typeof left !== 'number') || (typeof right !== 'number')) {
throw new RuntimeError('Operator "<" requires number arguments. Arguments are ' + left + ' and ' + right + '.');
}
return left < right;
};
Node.prototype._evaluateLessThanOrEquals = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((typeof left !== 'number') || (typeof right !== 'number')) {
throw new RuntimeError('Operator "<=" requires number arguments. Arguments are ' + left + ' and ' + right + '.');
}
return left <= right;
};
Node.prototype._evaluateGreaterThan = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((typeof left !== 'number') || (typeof right !== 'number')) {
throw new RuntimeError('Operator ">" requires number arguments. Arguments are ' + left + ' and ' + right + '.');
}
return left > right;
};
Node.prototype._evaluateGreaterThanOrEquals = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((typeof left !== 'number') || (typeof right !== 'number')) {
throw new RuntimeError('Operator ">=" requires number arguments. Arguments are ' + left + ' and ' + right + '.');
}
return left >= right;
};
Node.prototype._evaluateOr = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
if (typeof left !== 'boolean') {
throw new RuntimeError('Operator "||" requires boolean arguments. First argument is ' + left + '.');
}
// short circuit the expression
if (left) {
return true;
}
var right = this._right.evaluate(frameState, feature);
if (typeof right !== 'boolean') {
throw new RuntimeError('Operator "||" requires boolean arguments. Second argument is ' + right + '.');
}
return left || right;
};
Node.prototype._evaluateAnd = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
if (typeof left !== 'boolean') {
throw new RuntimeError('Operator "&&" requires boolean arguments. First argument is ' + left + '.');
}
// short circuit the expression
if (!left) {
return false;
}
var right = this._right.evaluate(frameState, feature);
if (typeof right !== 'boolean') {
throw new RuntimeError('Operator "&&" requires boolean arguments. Second argument is ' + right + '.');
}
return left && right;
};
Node.prototype._evaluatePlus = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((right instanceof Cartesian2) && (left instanceof Cartesian2)) {
return Cartesian2.add(left, right, scratchStorage.getCartesian2());
} else if ((right instanceof Cartesian3) && (left instanceof Cartesian3)) {
return Cartesian3.add(left, right, scratchStorage.getCartesian3());
} else if ((right instanceof Cartesian4) && (left instanceof Cartesian4)) {
return Cartesian4.add(left, right, scratchStorage.getCartesian4());
} else if ((typeof left === 'string') || (typeof right === 'string')) {
// If only one argument is a string the other argument calls its toString function.
return left + right;
} else if ((typeof left === 'number') && (typeof right === 'number')) {
return left + right;
}
throw new RuntimeError('Operator "+" requires vector or number arguments of matching types, or at least one string argument. Arguments are ' + left + ' and ' + right + '.');
};
Node.prototype._evaluateMinus = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((right instanceof Cartesian2) && (left instanceof Cartesian2)) {
return Cartesian2.subtract(left, right, scratchStorage.getCartesian2());
} else if ((right instanceof Cartesian3) && (left instanceof Cartesian3)) {
return Cartesian3.subtract(left, right, scratchStorage.getCartesian3());
} else if ((right instanceof Cartesian4) && (left instanceof Cartesian4)) {
return Cartesian4.subtract(left, right, scratchStorage.getCartesian4());
} else if ((typeof left === 'number') && (typeof right === 'number')) {
return left - right;
}
throw new RuntimeError('Operator "-" requires vector or number arguments of matching types. Arguments are ' + left + ' and ' + right + '.');
};
Node.prototype._evaluateTimes = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((right instanceof Cartesian2) && (left instanceof Cartesian2)) {
return Cartesian2.multiplyComponents(left, right, scratchStorage.getCartesian2());
} else if ((right instanceof Cartesian2) && (typeof left === 'number')) {
return Cartesian2.multiplyByScalar(right, left, scratchStorage.getCartesian2());
} else if ((left instanceof Cartesian2) && (typeof right === 'number')) {
return Cartesian2.multiplyByScalar(left, right, scratchStorage.getCartesian2());
} else if ((right instanceof Cartesian3) && (left instanceof Cartesian3)) {
return Cartesian3.multiplyComponents(left, right, scratchStorage.getCartesian3());
} else if ((right instanceof Cartesian3) && (typeof left === 'number')) {
return Cartesian3.multiplyByScalar(right, left, scratchStorage.getCartesian3());
} else if ((left instanceof Cartesian3) && (typeof right === 'number')) {
return Cartesian3.multiplyByScalar(left, right, scratchStorage.getCartesian3());
} else if ((right instanceof Cartesian4) && (left instanceof Cartesian4)) {
return Cartesian4.multiplyComponents(left, right, scratchStorage.getCartesian4());
} else if ((right instanceof Cartesian4) && (typeof left === 'number')) {
return Cartesian4.multiplyByScalar(right, left, scratchStorage.getCartesian4());
} else if ((left instanceof Cartesian4) && (typeof right === 'number')) {
return Cartesian4.multiplyByScalar(left, right, scratchStorage.getCartesian4());
} else if ((typeof left === 'number') && (typeof right === 'number')) {
return left * right;
}
throw new RuntimeError('Operator "*" requires vector or number arguments. If both arguments are vectors they must be matching types. Arguments are ' + left + ' and ' + right + '.');
};
Node.prototype._evaluateDivide = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((right instanceof Cartesian2) && (left instanceof Cartesian2)) {
return Cartesian2.divideComponents(left, right, scratchStorage.getCartesian2());
} else if ((left instanceof Cartesian2) && (typeof right === 'number')) {
return Cartesian2.divideByScalar(left, right, scratchStorage.getCartesian2());
} else if ((right instanceof Cartesian3) && (left instanceof Cartesian3)) {
return Cartesian3.divideComponents(left, right, scratchStorage.getCartesian3());
} else if ((left instanceof Cartesian3) && (typeof right === 'number')) {
return Cartesian3.divideByScalar(left, right, scratchStorage.getCartesian3());
} else if ((right instanceof Cartesian4) && (left instanceof Cartesian4)) {
return Cartesian4.divideComponents(left, right, scratchStorage.getCartesian4());
} else if ((left instanceof Cartesian4) && (typeof right === 'number')) {
return Cartesian4.divideByScalar(left, right, scratchStorage.getCartesian4());
} else if ((typeof left === 'number') && (typeof right === 'number')) {
return left / right;
}
throw new RuntimeError('Operator "/" requires vector or number arguments of matching types, or a number as the second argument. Arguments are ' + left + ' and ' + right + '.');
};
Node.prototype._evaluateMod = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((right instanceof Cartesian2) && (left instanceof Cartesian2)) {
return Cartesian2.fromElements(left.x % right.x, left.y % right.y, scratchStorage.getCartesian2());
} else if ((right instanceof Cartesian3) && (left instanceof Cartesian3)) {
return Cartesian3.fromElements(left.x % right.x, left.y % right.y, left.z % right.z, scratchStorage.getCartesian3());
} else if ((right instanceof Cartesian4) && (left instanceof Cartesian4)) {
return Cartesian4.fromElements(left.x % right.x, left.y % right.y, left.z % right.z, left.w % right.w, scratchStorage.getCartesian4());
} else if ((typeof left === 'number') && (typeof right === 'number')) {
return left % right;
}
throw new RuntimeError('Operator "%" requires vector or number arguments of matching types. Arguments are ' + left + ' and ' + right + '.');
};
Node.prototype._evaluateEqualsStrict = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((right instanceof Cartesian2) && (left instanceof Cartesian2) ||
(right instanceof Cartesian3) && (left instanceof Cartesian3) ||
(right instanceof Cartesian4) && (left instanceof Cartesian4)) {
return left.equals(right);
}
return left === right;
};
Node.prototype._evaluateNotEqualsStrict = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((right instanceof Cartesian2) && (left instanceof Cartesian2) ||
(right instanceof Cartesian3) && (left instanceof Cartesian3) ||
(right instanceof Cartesian4) && (left instanceof Cartesian4)) {
return !left.equals(right);
}
return left !== right;
};
Node.prototype._evaluateConditional = function(frameState, feature) {
var test = this._test.evaluate(frameState, feature);
if (typeof test !== 'boolean') {
throw new RuntimeError('Conditional argument of conditional expression must be a boolean. Argument is ' + test + '.');
}
if (test) {
return this._left.evaluate(frameState, feature);
}
return this._right.evaluate(frameState, feature);
};
Node.prototype._evaluateNaN = function(frameState, feature) {
return isNaN(this._left.evaluate(frameState, feature));
};
Node.prototype._evaluateIsFinite = function(frameState, feature) {
return isFinite(this._left.evaluate(frameState, feature));
};
Node.prototype._evaluateIsExactClass = function(frameState, feature) {
return feature.isExactClass(this._left.evaluate(frameState, feature));
};
Node.prototype._evaluateIsClass = function(frameState, feature) {
return feature.isClass(this._left.evaluate(frameState, feature));
};
Node.prototype._evaluategetExactClassName = function(frameState, feature) {
return feature.getExactClassName();
};
Node.prototype._evaluateBooleanConversion = function(frameState, feature) {
return Boolean(this._left.evaluate(frameState, feature));
};
Node.prototype._evaluateNumberConversion = function(frameState, feature) {
return Number(this._left.evaluate(frameState, feature));
};
Node.prototype._evaluateStringConversion = function(frameState, feature) {
return String(this._left.evaluate(frameState, feature));
};
Node.prototype._evaluateRegExp = function(frameState, feature) {
var pattern = this._value.evaluate(frameState, feature);
var flags = '';
if (defined(this._left)) {
flags = this._left.evaluate(frameState, feature);
}
var exp;
try {
exp = new RegExp(pattern, flags);
} catch (e) {
throw new RuntimeError(e);
}
return exp;
};
Node.prototype._evaluateRegExpTest = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if (!((left instanceof RegExp) && (typeof right === 'string'))) {
throw new RuntimeError('RegExp.test requires the first argument to be a RegExp and the second argument to be a string. Arguments are ' + left + ' and ' + right + '.');
}
return left.test(right);
};
Node.prototype._evaluateRegExpMatch = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((left instanceof RegExp) && (typeof right === 'string')) {
return left.test(right);
} else if ((right instanceof RegExp) && (typeof left === 'string')) {
return right.test(left);
}
throw new RuntimeError('Operator "=~" requires one RegExp argument and one string argument. Arguments are ' + left + ' and ' + right + '.');
};
Node.prototype._evaluateRegExpNotMatch = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if ((left instanceof RegExp) && (typeof right === 'string')) {
return !(left.test(right));
} else if ((right instanceof RegExp) && (typeof left === 'string')) {
return !(right.test(left));
}
throw new RuntimeError('Operator "!~" requires one RegExp argument and one string argument. Arguments are ' + left + ' and ' + right + '.');
};
Node.prototype._evaluateRegExpExec = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
var right = this._right.evaluate(frameState, feature);
if (!((left instanceof RegExp) && (typeof right === 'string'))) {
throw new RuntimeError('RegExp.exec requires the first argument to be a RegExp and the second argument to be a string. Arguments are ' + left + ' and ' + right + '.');
}
var exec = left.exec(right);
if (!defined(exec)) {
return null;
}
return exec[1];
};
Node.prototype._evaluateToString = function(frameState, feature) {
var left = this._left.evaluate(frameState, feature);
if ((left instanceof RegExp) || (left instanceof Cartesian2) || (left instanceof Cartesian3) || (left instanceof Cartesian4)) {
return String(left);
}
throw new RuntimeError('Unexpected function call "' + this._value + '".');
};
function convertHSLToRGB(ast) {
// Check if the color contains any nested expressions to see if the color can be converted here.
// E.g. "hsl(0.9, 0.6, 0.7)" is able to convert directly to rgb, "hsl(0.9, 0.6, ${Height})" is not.
var channels = ast._left;
var length = channels.length;
for (var i = 0; i < length; ++i) {
if (channels[i]._type !== ExpressionNodeType.LITERAL_NUMBER) {
return undefined;
}
}
var h = channels[0]._value;
var s = channels[1]._value;
var l = channels[2]._value;
var a = (length === 4) ? channels[3]._value : 1.0;
return Color.fromHsl(h, s, l, a, scratchColor);
}
function convertRGBToColor(ast) {
// Check if the color contains any nested expressions to see if the color can be converted here.
// E.g. "rgb(255, 255, 255)" is able to convert directly to Color, "rgb(255, 255, ${Height})" is not.
var channels = ast._left;
var length = channels.length;
for (var i = 0; i < length; ++i) {
if (channels[i]._type !== ExpressionNodeType.LITERAL_NUMBER) {
return undefined;
}
}
var color = scratchColor;
color.red = channels[0]._value / 255.0;
color.green = channels[1]._value / 255.0;
color.blue = channels[2]._value / 255.0;
color.alpha = (length === 4) ? channels[3]._value : 1.0;
return color;
}
function numberToString(number) {
if (number % 1 === 0) {
// Add a .0 to whole numbers
return number.toFixed(1);
}
return number.toString();
}
function colorToVec3(color) {
var r = numberToString(color.red);
var g = numberToString(color.green);
var b = numberToString(color.blue);
return 'vec3(' + r + ', ' + g + ', ' + b + ')';
}
function colorToVec4(color) {
var r = numberToString(color.red);
var g = numberToString(color.green);
var b = numberToString(color.blue);
var a = numberToString(color.alpha);
return 'vec4(' + r + ', ' + g + ', ' + b + ', ' + a + ')';
}
function getExpressionArray(array, attributePrefix, shaderState, parent) {
var length = array.length;
var expressions = new Array(length);
for (var i = 0; i < length; ++i) {
expressions[i] = array[i].getShaderExpression(attributePrefix, shaderState, parent);
}
return expressions;
}
Node.prototype.getShaderExpression = function(attributePrefix, shaderState, parent) {
var color;
var left;
var right;
var test;
var type = this._type;
var value = this._value;
if (defined(this._left)) {
if (isArray(this._left)) {
// Left can be an array if the type is LITERAL_COLOR or LITERAL_VECTOR
left = getExpressionArray(this._left, attributePrefix, shaderState, this);
} else {
left = this._left.getShaderExpression(attributePrefix, shaderState, this);
}
}
if (defined(this._right)) {
right = this._right.getShaderExpression(attributePrefix, shaderState, this);
}
if (defined(this._test)) {
test = this._test.getShaderExpression(attributePrefix, shaderState, this);
}
if (isArray(this._value)) {
// For ARRAY type
value = getExpressionArray(this._value, attributePrefix, shaderState, this);
}
switch (type) {
case ExpressionNodeType.VARIABLE:
return attributePrefix + value;
case ExpressionNodeType.UNARY:
// Supported types: +, -, !, Boolean, Number
if (value === 'Boolean') {
return 'bool(' + left + ')';
} else if (value === 'Number') {
return 'float(' + left + ')';
} else if (value === 'round') {
return 'floor(' + left + ' + 0.5)';
} else if (defined(unaryFunctions[value])) {
return value + '(' + left + ')';
} else if ((value === 'isNaN') || (value === 'isFinite') || (value === 'String') || (value === 'isExactClass') || (value === 'isClass') || (value === 'getExactClassName')) {
throw new RuntimeError('Error generating style shader: "' + value + '" is not supported.');
} else if (defined(unaryFunctions[value])) {
return value + '(' + left + ')';
}
return value + left;
case ExpressionNodeType.BINARY:
// Supported types: ||, &&, ===, !==, <, >, <=, >=, +, -, *, /, %
if (value === '%') {
return 'mod(' + left + ', ' + right + ')';
} else if (value === '===') {
return '(' + left + ' == ' + right + ')';
} else if (value === '!==') {
return '(' + left + ' != ' + right + ')';
} else if (value === 'atan2') {
return 'atan(' + left + ', ' + right + ')';
} else if (defined(binaryFunctions[value])) {
return value + '(' + left + ', ' + right + ')';
}
return '(' + left + ' ' + value + ' ' + right + ')';
case ExpressionNodeType.TERNARY:
if (defined(ternaryFunctions[value])) {
return value + '(' + left + ', ' + right + ', ' + test + ')';
}
break;
case ExpressionNodeType.CONDITIONAL:
return '(' + test + ' ? ' + left + ' : ' + right + ')';
case ExpressionNodeType.MEMBER:
// This is intended for accessing the components of vector properties. String members aren't supported.
// Check for 0.0 rather than 0 because all numbers are previously converted to decimals.
if (right === 'r' || right === 'x' || right === '0.0') {
return left + '[0]';
} else if (right === 'g' || right === 'y' || right === '1.0') {
return left + '[1]';
} else if (right === 'b' || right === 'z' || right === '2.0') {
return left + '[2]';
} else if (right === 'a' || right === 'w' || right === '3.0') {
return left + '[3]';
}
return left + '[int(' + right + ')]';
case ExpressionNodeType.FUNCTION_CALL:
throw new RuntimeError('Error generating style shader: "' + value + '" is not supported.');
case ExpressionNodeType.ARRAY:
if (value.length === 4) {
return 'vec4(' + value[0] + ', ' + value[1] + ', ' + value[2] + ', ' + value[3] + ')';
} else if (value.length === 3) {
return 'vec3(' + value[0] + ', ' + value[1] + ', ' + value[2] + ')';
} else if (value.length === 2) {
return 'vec2(' + value[0] + ', ' + value[1] + ')';
}
throw new RuntimeError('Error generating style shader: Invalid array length. Array length should be 2, 3, or 4.');
case ExpressionNodeType.REGEX:
throw new RuntimeError('Error generating style shader: Regular expressions are not supported.');
case ExpressionNodeType.VARIABLE_IN_STRING:
throw new RuntimeError('Error generating style shader: Converting a variable to a string is not supported.');
case ExpressionNodeType.LITERAL_NULL:
throw new RuntimeError('Error generating style shader: null is not supported.');
case ExpressionNodeType.LITERAL_BOOLEAN:
return value ? 'true' : 'false';
case ExpressionNodeType.LITERAL_NUMBER:
return numberToString(value);
case ExpressionNodeType.LITERAL_STRING:
if (defined(parent) && (parent._type === ExpressionNodeType.MEMBER)) {
if (value === 'r' || value === 'g' || value === 'b' || value === 'a' ||
value === 'x' || value === 'y' || value === 'z' || value === 'w') {
return value;
}
}
// Check for css color strings
color = Color.fromCssColorString(value, scratchColor);
if (defined(color)) {
return colorToVec3(color);
}
throw new RuntimeError('Error generating style shader: String literals are not supported.');
case ExpressionNodeType.LITERAL_COLOR:
var args = left;
if (value === 'color') {
if (!defined(args)) {
return 'vec4(1.0)';
} else if (args.length > 1) {
var rgb = args[0];
var alpha = args[1];
if (alpha !== '1.0') {
shaderState.translucent = true;
}
return 'vec4(' + rgb + ', ' + alpha + ')';
}
return 'vec4(' + args[0] + ', 1.0)';
} else if (value === 'rgb') {
color = convertRGBToColor(this);
if (defined(color)) {
return colorToVec4(color);
}
return 'vec4(' + args[0] + ' / 255.0, ' + args[1] + ' / 255.0, ' + args[2] + ' / 255.0, 1.0)';
} else if (value === 'rgba') {
if (args[3] !== '1.0') {
shaderState.translucent = true;
}
color = convertRGBToColor(this);
if (defined(color)) {
return colorToVec4(color);
}
return 'vec4(' + args[0] + ' / 255.0, ' + args[1] + ' / 255.0, ' + args[2] + ' / 255.0, ' + args[3] + ')';
} else if (value === 'hsl') {
color = convertHSLToRGB(this);
if (defined(color)) {
return colorToVec4(color);
}
return 'vec4(czm_HSLToRGB(vec3(' + args[0] + ', ' + args[1] + ', ' + args[2] + ')), 1.0)';
} else if (value === 'hsla') {
color = convertHSLToRGB(this);
if (defined(color)) {
if (color.alpha !== 1.0) {
shaderState.translucent = true;
}
return colorToVec4(color);
}
if (args[3] !== '1.0') {
shaderState.translucent = true;
}
return 'vec4(czm_HSLToRGB(vec3(' + args[0] + ', ' + args[1] + ', ' + args[2] + ')), ' + args[3] + ')';
}
break;
case ExpressionNodeType.LITERAL_VECTOR:
//>>includeStart('debug', pragmas.debug);
if (!defined(left)) {
throw new DeveloperError('left should always be defined for type ExpressionNodeType.LITERAL_VECTOR');
}
//>>includeEnd('debug');
var length = left.length;
var vectorExpression = value + '(';
for (var i = 0; i < length; ++i) {
vectorExpression += left[i];
if (i < (length - 1)) {
vectorExpression += ', ';
}
}
vectorExpression += ')';
return vectorExpression;
case ExpressionNodeType.LITERAL_REGEX:
throw new RuntimeError('Error generating style shader: Regular expressions are not supported.');
case ExpressionNodeType.LITERAL_UNDEFINED:
throw new RuntimeError('Error generating style shader: undefined is not supported.');
case ExpressionNodeType.BUILTIN_VARIABLE:
if (value === 'tiles3d_tileset_time') {
return 'u_tilesetTime';
}
}
};
return Expression;
});
| apache-2.0 |
popcorn-rs/popcorn | src/memory.rs | 21 | pub trait Memory { }
| apache-2.0 |
DanielJoyce/websocket-serial-server | src/lib/manager.rs | 12238 | //! Manages serial port state and communication with clients,
//! and handling requests / responses
use std::collections::HashSet;
use std::sync::mpsc::{Receiver, TryRecvError};
use std::thread;
use base64;
use crate::common::*;
use crate::dynamic_sleep::DynamicSleep;
use crate::errors::*;
use crate::messages::*;
use crate::port_manager::*;
use crate::sub_manager::*;
use crate::writelock_manager::*;
/// Serial port management module supporting one
/// writer and multiple readers
///
/// Clients can lock a port for writing, but
/// subscribe to data from multiple ports for reads
///
/// The Manager takes actions in response to
/// [SerialRequest::*](../messages/index.html) messages sent on
/// on its receiver
pub struct Manager {
/// Manage write lock status
writelock_manager: WriteLockManager,
/// Manage ports
port_manager: PortManager,
/// Manage subscriptions
sub_manager: SubscriptionManager,
/// Receiver for serial requests
receiver: Receiver<(String, SerialRequest)>,
/// Receiver for response subscription requests
subsc_receiver: SubscReceiver,
}
impl Manager {
///Constructor
pub fn new(
receiver: Receiver<(String, SerialRequest)>,
subsc_receiver: SubscReceiver,
) -> Manager {
Manager {
writelock_manager: WriteLockManager::new(),
port_manager: PortManager::new(),
sub_manager: SubscriptionManager::new(),
receiver: receiver,
subsc_receiver: subsc_receiver,
}
}
///Spawn an instance in a new thread.
pub fn spawn(
receiver: Receiver<(String, SerialRequest)>,
subsc_receiver: SubscReceiver,
) -> thread::JoinHandle<()> {
thread::spawn(move || {
Manager::new(receiver, subsc_receiver).run();
})
}
/// Main loop
fn run(&mut self) {
// Bad ports we couldn't read from
// A set of SerialResponse::Errors built from
// from the serial port read/write error responses
let mut bad_ports = HashSet::<String>::new();
// Check about 30 times a second
let mut dynamic_sleep = DynamicSleep::new("manager");
loop {
// Sleep for a little bit to avoid pegging cpu
dynamic_sleep.sleep();
// Handle serial operation requests
match self.receiver.try_recv() {
Err(e) => {
match e {
TryRecvError::Empty => {
// nothing to do
}
TryRecvError::Disconnected => {
// Remote end hung up, time to shutdown
info!("Shutting down SerialPortManager");
break;
}
}
}
Ok(req) => self.handle_serial_request(&req.0, req.1),
}
// Check for new data on each port
for (port_name, result) in self.port_manager.read_all_ports() {
match result {
Ok(data) => {
let response = match String::from_utf8(data) {
// We need to send as binary
Err(e) => SerialResponse::Read {
port: port_name.to_string(),
data: base64::encode(&e.into_bytes()),
base64: Some(true),
},
Ok(s) => SerialResponse::Read {
port: port_name.to_string(),
data: s,
base64: Some(false),
},
};
self.broadcast_message_for_port(&port_name, response);
}
// Send data reads
Err(e) => {
warn!("Error reading port!");
warn!("{}", e);
bad_ports.insert(port_name);
}
}
}
//Handle write requests
let mut recv_count = 0;
while recv_count < 50 {
recv_count += 1;
match self.subsc_receiver.try_recv() {
Ok(sub_request) => self.sub_manager.add_subscription(sub_request),
Err(e) => {
match e {
TryRecvError::Disconnected => {
// Does this mean all senders have disconnected?
// Or just one?
debug!("Got disconnected when trying to get serial request");
}
TryRecvError::Empty => break,
}
}
}
}
// Cleanup bad serial ports that failed read or write
// We remove them from everything before
self.cleanup_bad_ports(&bad_ports);
bad_ports.clear();
}
}
/// Handles and dispatches SerialRequest sent by
/// the channel
fn handle_serial_request(&mut self, sub_id: &String, msg: SerialRequest) {
let response = match msg {
SerialRequest::Open { port } => self.handle_open_port(sub_id, port),
SerialRequest::WriteLock { port } => self.handle_write_lock(sub_id, port),
SerialRequest::ReleaseWriteLock { port } => self.handle_release_write_lock(sub_id, port),
SerialRequest::Write { port, data, base64 } => {
self.handle_write_port(sub_id, port, data, base64.unwrap_or(false))
}
SerialRequest::Close { port } => self.handle_close_port(sub_id, port),
SerialRequest::List {} => self.handle_list_ports(sub_id),
};
if let Err(e) = response {
warn!("Error '{}' occured handling serial request message", e);
// Send error?
self.send_message(&sub_id, to_serial_response_error(e));
}
}
/// Handle write port requests
fn handle_write_port(
&mut self,
sub_id: &String,
port_name: String,
data: String,
base_64: bool,
) -> Result<()> {
self.check_sub_id(&sub_id)?;
self.check_owns_writelock(&port_name, &sub_id)?;
match base_64 {
true => base64::decode(&data)
.map_err(|e| ErrorKind::Base64(e).into())
.and_then(|d| self.port_manager.write_port(&port_name, &d))
.map(|_| self.send_message(&sub_id, SerialResponse::Wrote { port: port_name })),
false => self
.port_manager
.write_port(&port_name, data.as_bytes())
.map(|_| self.send_message(&sub_id, SerialResponse::Wrote { port: port_name })),
}
}
/// Handle write lock requests
fn handle_write_lock(&mut self, sub_id: &String, port_name: String) -> Result<()> {
self.check_sub_id(&sub_id)?;
self
.writelock_manager
.lock_port(&port_name, &sub_id)
.map(|_| self.send_message(&sub_id, SerialResponse::WriteLocked { port: port_name }))
}
/// Handle write requests
fn handle_release_write_lock(
&mut self,
sub_id: &String,
port_name: Option<String>,
) -> Result<()> {
self.check_sub_id(sub_id)?;
match port_name {
None => {
self.writelock_manager.unlock_all_ports_for_sub(&sub_id);
Ok(self.send_message(
&sub_id,
SerialResponse::WriteLockReleased { port: port_name },
))
}
Some(port_name) => self
.writelock_manager
.unlock_port(&port_name, &sub_id)
.map(|_| {
self.send_message(
&sub_id,
SerialResponse::WriteLockReleased {
port: Some(port_name),
},
)
}),
}
}
/// Handle open port requests
fn handle_open_port(&mut self, sub_id: &String, port_name: String) -> Result<()> {
self.check_sub_id(&sub_id)?;
self.port_manager.open_port(&port_name)?;
self
.sub_manager
.add_port(&sub_id, &port_name)
.map(|_| self.send_message(&sub_id, SerialResponse::Opened { port: port_name }))
}
/// Handle list ports request
fn handle_list_ports(&mut self, sub_id: &String) -> Result<()> {
self.check_sub_id(&sub_id)?;
let port_names: Result<Vec<String>> = self
.port_manager
.list_ports()
.map(|v| v.iter().map(|v| v.port_name.clone()).collect());
let resp = port_names.map(|pns| SerialResponse::List { ports: pns });
self.send_message(
&sub_id,
resp.unwrap_or(SerialResponse::Error {
display: "".to_string(),
description: "".to_string(),
}),
);
Ok(())
}
/// Handle close port requests
fn handle_close_port(&mut self, sub_id: &String, port_name: Option<String>) -> Result<()> {
match port_name {
Some(port_name) => self.handle_close_port_for_sub(sub_id, port_name),
None => self.handle_close_all_ports_for_sub(sub_id),
}
}
/// Handle closing a signle port for a sub
fn handle_close_port_for_sub(&mut self, sub_id: &String, port_name: String) -> Result<()> {
self.sub_manager.remove_port(&sub_id, &port_name)?;
self
.writelock_manager
.unlock_port_if_locked_by(&port_name, &sub_id);
// self.cleanup_ports_with_no_subs();
let close_resp = SerialResponse::Closed {
port: port_name.clone(),
};
self.send_message(&sub_id, close_resp);
Ok(())
}
/// Handle closing all ports for sub
fn handle_close_all_ports_for_sub(&mut self, sub_id: &String) -> Result<()> {
self.sub_manager.clear_ports(Some(&sub_id));
self.writelock_manager.unlock_all_ports_for_sub(sub_id);
// Close ports with no subscribers
let open_ports = self.port_manager.open_ports();
let subscribed_ports = self.sub_manager.subscribed_ports();
let ports_with_no_subs = open_ports.difference(&subscribed_ports);
// For each open port that isn't subscribed,
for port_to_close in ports_with_no_subs {
// close it, REDUNDANT?
self.port_manager.close_port(&port_to_close);
// remove the write lock, REDUNDANT?
self.writelock_manager.clear_lock(&port_to_close);
// Let them know its closed
let close_resp = SerialResponse::Closed {
port: port_to_close.clone(),
};
self.send_message(&sub_id, close_resp);
}
Ok(())
}
/// Cleanup any bad ports
fn cleanup_bad_ports(&mut self, bad_ports: &HashSet<String>) {
for port_name in bad_ports.iter() {
// Tell everyone port is sick
let err_resp =
to_serial_response_error(ErrorKind::PortReadError(port_name.to_owned()).into());
self.broadcast_message_for_port(port_name, err_resp);
// Tell everyone the sick ports were closed
let close_resp = SerialResponse::Closed {
port: port_name.clone(),
};
self.broadcast_message_for_port(port_name, close_resp);
// Close bad ports
self.port_manager.close_port(port_name);
// Remove write locks on bad ports
self.writelock_manager.clear_lock(port_name);
// Remove bad ports from subscriptions
self.sub_manager.remove_port_from_all(port_name);
}
}
/// Send a message to a subscriber
fn send_message(&mut self, sub_id: &String, msg: SerialResponse) {
if let Err(e) = self.sub_manager.send_message(sub_id, msg) {
warn!("Error sending serial response to sub_id '{}'", sub_id);
let mut bad_subs = Vec::new();
bad_subs.push(e);
self.cleanup_bad_subs(bad_subs);
}
}
/// Broadcast a message to all subscribers and
/// then cleanup any subs that errored
fn broadcast_message(&mut self, msg: SerialResponse) {
let bad_subs = self.sub_manager.broadcast_message(msg);
self.cleanup_bad_subs(bad_subs);
}
/// Broadcast a message to all subscribers interested in the
/// given port and then cleanup any subs that errored
fn broadcast_message_for_port(&mut self, port_name: &String, msg: SerialResponse) {
let bad_subs = self.sub_manager.broadcast_message_for_port(port_name, msg);
self.cleanup_bad_subs(bad_subs);
}
/// Cleanup any bad subs where a message send failed
fn cleanup_bad_subs(&mut self, bad_subs: Vec<Error>) {
for e in bad_subs {
if let Error(ErrorKind::SubscriberSendError(sub_id), _) = e {
// Remove subscriptions
self.sub_manager.end_subscription(&sub_id);
// Remove all write locks held by dead subscription
self.writelock_manager.unlock_all_ports_for_sub(&sub_id);
}
}
}
/// Check if a subscription exists for a given sub_id
fn check_sub_id(&self, sub_id: &String) -> Result<()> {
self.sub_manager.check_subscription_exists(sub_id)
}
/// Check if port_name has a write lock for sub_id
/// Errors if the port is not writelocked by sub_id
fn check_owns_writelock(&self, port_name: &String, sub_id: &String) -> Result<()> {
self
.writelock_manager
.check_owns_write_lock(port_name, sub_id)
}
}
| apache-2.0 |
wukonggg/mz-g | src/main/webapp/assets/js/mz.select_category.js | 3742 | /*! MessJS | mess.select_category.js | 0.1.0 | 选择商品类别JS | by wukong(wukonggg@139.com) */
/** 依赖:jquery.js - ajax、dom等 */
window.mz = window.mz || {};
/**
* 载入商品品类选项
*
* @param contextPath contextPath
* @param selectId 页面select元素的id
* @param errorMsg 错误提示信息
* @param currCateCode 当前选中的cateCode,非必须
* @param pcode 父节点id
*/
window.mz.loadCategory = function(contextPath, selectId, errorMsg, currCateCode, pcode) {
$.ajax({
type: 'POST',
url: contextPath + "/category/list.io?pcode=" + pcode,
dataType: "json",
success: function(data){
// raw.log.debug("typeof data: " + typeof data);
// raw.log.debug("JSON.stringify(data): " + JSON.stringify(data));
var template = "<option value='#code#' selected>#title#</option> ";
var options = "";
$.each(data, function(i, lin){
// raw.log.debug("JSON.stringify(data[i]): " + JSON.stringify(data[i]));
// raw.log.debug("JSON.stringify(lin): " + JSON.stringify(lin));
var option = template.replace("#code#", lin.code);
option = option.replace("#title#", lin.title);
if (lin.code != currCateCode) {
option = option.replace("selected", "");
}
options = options + option;
});
// raw.log.debug("options: " + options);
$("#" + selectId).append(options);
},
error: function() {
var msg = (errorMsg === undefined || ""=== errorMsg) ? "出错啦!快去找悟空!" : errorMsg;
alert(msg);
}
});
};
/**
* 载入商品品类选项到按钮组
*
* @param params json格式参数:{contextPath: contextPath, selectId: 页面select元素的id, errorMsg: 错误提示信息, currCateCodes 当前选中的cateCode,非必须, pcode: 父节点id}
*/
window.mz.loadCategoryIntoButtonGroup = function(params) {
var contextPath = params.contextPath;
var errorMsg = params.errorMsg;
var currCateCodes = params.currCateCodes;
var pcode = params.pcode;
var selectId = params.selectId;
$.ajax({
type: 'POST',
url: contextPath + "/category/list.io?pcode=" + pcode,
dataType: "json",
success: function(data){
var template = '<label name="lblCateCodes" class="am-btn am-btn-default mz-tooltip" title="#title#"><input type="checkbox" value="#code#"> #simple_title#</label>';
var buttons = "";
$.each(data, function(i, lin){
var button = template.replace("#code#", lin.code).replace("#title#", lin.title).replace("#simple_title#", lin.title.substring(0,1));
buttons = buttons + button;
});
$("#" + selectId).append(buttons);
},
error: function() {
var msg = (errorMsg === undefined || ""=== errorMsg) ? "出错啦!快去找悟空!" : errorMsg;
alert(msg);
}
});
//TODO -OPT 等待动态生成的catecodes加载完成后再去设置点击状态。暂时先用timeout
setTimeout(function(){
var lblCateCodes = $("label[name='lblCateCodes']");
lblCateCodes.each(function(i) {
if (currCateCodes.indexOf($(this).val()) >= 0) {
$(this).trigger("click");
}
});
var cc = ", " + currCateCodes;
lblCateCodes.click(function () {
var val = $(this).children().val();
console.log("val=" + val);
if (cc.indexOf(val) < 0) {
cc = cc + ", " + val;
} else {
cc = cc.replace(", " + val, "");
}
console.log("cc=" + cc);
$("#cateCodes").val(cc.substring(1));
});
},500);
};
| apache-2.0 |
jivesoftware/upena | upena-deployable/src/main/java/com/jivesoftware/os/upena/deployable/endpoints/ui/LoadBalancersPluginEndpoints.java | 5768 | package com.jivesoftware.os.upena.deployable.endpoints.ui;
import com.google.common.collect.Lists;
import com.jivesoftware.os.upena.deployable.ShiroRequestHelper;
import com.jivesoftware.os.upena.deployable.region.LoadBalancersPluginRegion;
import com.jivesoftware.os.upena.deployable.region.LoadBalancersPluginRegion.LoadBalancersPluginRegionInput;
import com.jivesoftware.os.upena.deployable.soy.SoyService;
import java.util.Collections;
import java.util.List;
import javax.inject.Singleton;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.apache.commons.lang.StringUtils;
/**
*
*/
@Singleton
@Path("/ui/loadbalancers")
public class LoadBalancersPluginEndpoints {
private final ShiroRequestHelper shiroRequestHelper;
private final SoyService soyService;
private final LoadBalancersPluginRegion pluginRegion;
public LoadBalancersPluginEndpoints(@Context ShiroRequestHelper shiroRequestHelper,
@Context SoyService soyService,
@Context LoadBalancersPluginRegion pluginRegion) {
this.shiroRequestHelper = shiroRequestHelper;
this.soyService = soyService;
this.pluginRegion = pluginRegion;
}
@GET
@Produces(MediaType.TEXT_HTML)
public Response loadBalancers(@Context HttpServletRequest httpRequest) {
return shiroRequestHelper.call("lb", (csrfToken) -> {
String rendered = soyService.renderPlugin(httpRequest.getRemoteUser(),
csrfToken,
pluginRegion,
new LoadBalancersPluginRegionInput("", "", "", "", 0, 0, Collections.emptyList(), "", "", "", Collections.emptyList(),
Collections.emptyList(), Collections.emptyMap(), "", "", "", "", "", "", ""));
return Response.ok(rendered);
});
}
@POST
@Produces(MediaType.TEXT_HTML)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
public Response action(@Context HttpServletRequest httpRequest,
@FormParam("csrfToken") String csrfToken,
@FormParam("key") @DefaultValue("") String key,
@FormParam("name") @DefaultValue("") String name,
@FormParam("description") @DefaultValue("") String description,
@FormParam("clusterKey") @DefaultValue("") String clusterKey,
@FormParam("cluster") @DefaultValue("") String cluster,
@FormParam("serviceKey") @DefaultValue("") String serviceKey,
@FormParam("service") @DefaultValue("") String service,
@FormParam("releaseKey") @DefaultValue("") String releaseGroupKey,
@FormParam("release") @DefaultValue("") String releaseGroup,
@FormParam("action") @DefaultValue("") String action) {
return shiroRequestHelper.csrfCall(csrfToken, "lb/actions", (csrfToken1) -> {
String rendered = soyService.renderPlugin(httpRequest.getRemoteUser(), csrfToken1, pluginRegion,
new LoadBalancersPluginRegionInput(key, name, description, null, -1, -1, null, null, null,
null, null, null, Collections.emptyMap(), clusterKey, cluster, serviceKey, service, releaseGroupKey, releaseGroup,
action));
return Response.ok(rendered);
});
}
@POST
@Path("/config")
@Produces(MediaType.TEXT_HTML)
@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
public Response config(@Context HttpServletRequest httpRequest,
@FormParam("csrfToken") String csrfToken,
@FormParam("key") @DefaultValue("") String key,
@FormParam("scheme") @DefaultValue("") String scheme,
@FormParam("loadBalancerPort") @DefaultValue("-1") int loadBalancerPort,
@FormParam("instancePort") @DefaultValue("-1") int instancePort,
@FormParam("availabilityZones") @DefaultValue("") String availabilityZones,
@FormParam("protocol") @DefaultValue("") String protocol,
@FormParam("certificate") @DefaultValue("") String certificate,
@FormParam("serviceProtocol") @DefaultValue("") String serviceProtocol,
@FormParam("securityGroups") @DefaultValue("") String securityGroups,
@FormParam("subnets") @DefaultValue("") String subnets) {
return shiroRequestHelper.csrfCall(csrfToken, "lb/config", (csrfToken1) -> {
String rendered = soyService.renderPlugin(httpRequest.getRemoteUser(),
csrfToken1,
pluginRegion,
new LoadBalancersPluginRegionInput(key, null, null,
scheme,
loadBalancerPort,
instancePort,
sanitizedList(availabilityZones),
protocol,
certificate,
serviceProtocol,
sanitizedList(securityGroups),
sanitizedList(subnets),
Collections.emptyMap(),
null, null, null, null, null, null,
"update"));
return Response.ok(rendered);
});
}
private List<String> sanitizedList(String string) {
if (string == null) {
return Collections.emptyList();
}
if (string.isEmpty()) {
return Collections.emptyList();
}
List<String> sanitized = Lists.newArrayList();
for (String s : string.split(",")) {
s = s.trim();
if (!StringUtils.isBlank(s)) {
sanitized.add(s);
}
}
return sanitized;
}
}
| apache-2.0 |
armstrong/armstrong.apps.content | fabfile.py | 520 | from armstrong.dev.tasks import *
settings = {
'DEBUG': True,
'INSTALLED_APPS': (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'armstrong.core.arm_access',
'armstrong.core.arm_content',
'armstrong.core.arm_sections',
'armstrong.apps.content',
'south',
'taggit',
),
'SITE_ID': 1,
}
main_app = "content"
tested_apps = (main_app, )
| apache-2.0 |
googleapis/google-api-java-client-services | clients/google-api-services-dialogflow/v2beta1/1.31.0/com/google/api/services/dialogflow/v2beta1/model/GoogleCloudDialogflowV3alpha1GenericKnowledgeOperationMetadata.java | 2466 | /*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.dialogflow.v2beta1.model;
/**
* Metadata in google::longrunning::Operation for Knowledge operations.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Dialogflow API. For a detailed explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleCloudDialogflowV3alpha1GenericKnowledgeOperationMetadata extends com.google.api.client.json.GenericJson {
/**
* Required. Output only. The current state of this operation.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String state;
/**
* Required. Output only. The current state of this operation.
* @return value or {@code null} for none
*/
public java.lang.String getState() {
return state;
}
/**
* Required. Output only. The current state of this operation.
* @param state state or {@code null} for none
*/
public GoogleCloudDialogflowV3alpha1GenericKnowledgeOperationMetadata setState(java.lang.String state) {
this.state = state;
return this;
}
@Override
public GoogleCloudDialogflowV3alpha1GenericKnowledgeOperationMetadata set(String fieldName, Object value) {
return (GoogleCloudDialogflowV3alpha1GenericKnowledgeOperationMetadata) super.set(fieldName, value);
}
@Override
public GoogleCloudDialogflowV3alpha1GenericKnowledgeOperationMetadata clone() {
return (GoogleCloudDialogflowV3alpha1GenericKnowledgeOperationMetadata) super.clone();
}
}
| apache-2.0 |
gitee2008/glaf | src/main/java/com/glaf/generator/tools/JPAEntityToXmlMapping.java | 9740 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.glaf.generator.tools;
import java.util.*;
import org.dom4j.Document;
import org.dom4j.io.OutputFormat;
import org.apache.commons.lang3.StringUtils;
import org.junit.Test;
import javassist.*;
import com.glaf.core.domain.ColumnDefinition;
import com.glaf.core.domain.TableDefinition;
import com.glaf.core.util.AnnotationUtils;
import com.glaf.core.util.Dom4jUtils;
import com.glaf.core.util.FileUtils;
import com.glaf.core.util.StringTools;
import com.glaf.generator.xml.XmlWriter;
public class JPAEntityToXmlMapping {
public final static String newline = System.getProperty("line.separator");
protected ClassPool pool = null;
protected CtClass ctClass;
public JPAEntityToXmlMapping() {
pool = new ClassPool(null);
pool.appendSystemPath();
}
public TableDefinition convert(String className) throws Exception {
TableDefinition tableDefinition = new TableDefinition();
String simpleName = className;
String packageName = "";
if (className.indexOf(".") != -1) {
simpleName = className.substring(className.lastIndexOf(".") + 1, className.length());
packageName = className.substring(0, className.lastIndexOf("."));
packageName = packageName.substring(0, packageName.lastIndexOf("."));
}
tableDefinition.setClassName(simpleName);
tableDefinition.setTitle(simpleName);
tableDefinition.setEnglishTitle(simpleName);
tableDefinition.setEntityName(simpleName);
tableDefinition.setPackageName(packageName);
CtClass ctClass = pool.getCtClass(className);
Object[] anns = ctClass.getAnnotations();
if (anns != null && anns.length > 0) {
for (Object object : anns) {
if (object instanceof javax.persistence.Table) {
javax.persistence.Table table = (javax.persistence.Table) object;
System.out.println(table.name());
tableDefinition.setTableName(table.name());
}
}
}
CtField[] fields = ctClass.getFields();
if (fields != null && fields.length > 0) {
for (CtField field : fields) {
boolean isPK = false;
System.out.println(field.getName() + " " + field.getType().getName());
ColumnDefinition fieldDefinition = new ColumnDefinition();
fieldDefinition.setTitle(StringTools.upper(field.getName()));
fieldDefinition.setEditable(true);
fieldDefinition.setEnglishTitle(StringTools.upper(field.getName()));
fieldDefinition.setName(field.getName());
if (StringUtils.equals(field.getType().getName(), "java.lang.String")) {
fieldDefinition.setType("String");
} else if (StringUtils.equals(field.getType().getName(), "java.util.Date")) {
fieldDefinition.setType("Date");
} else if (StringUtils.equals(field.getType().getName(), "boolean")
|| StringUtils.equals(field.getType().getName(), "java.lang.Boolean")) {
fieldDefinition.setType("Boolean");
} else if (StringUtils.equals(field.getType().getName(), "int")
|| StringUtils.equals(field.getType().getName(), "java.lang.Integer")) {
fieldDefinition.setType("Integer");
} else if (StringUtils.equals(field.getType().getName(), "long")
|| StringUtils.equals(field.getType().getName(), "java.lang.Long")) {
fieldDefinition.setType("Long");
} else if (StringUtils.equals(field.getType().getName(), "double")
|| StringUtils.equals(field.getType().getName(), "java.lang.Double")) {
fieldDefinition.setType("Double");
}
anns = field.getAnnotations();
if (anns != null && anns.length > 0) {
for (Object object : anns) {
if (object instanceof javax.persistence.Column) {
javax.persistence.Column col = (javax.persistence.Column) object;
fieldDefinition.setColumnName(col.name());
if (col.length() > 0) {
fieldDefinition.setLength(col.length());
}
fieldDefinition.setNullable(col.nullable());
fieldDefinition.setUpdatable(col.updatable());
fieldDefinition.setUnique(col.unique());
}
if (object instanceof javax.persistence.Id) {
isPK = true;
fieldDefinition.setEditable(false);
}
}
}
if (fieldDefinition.getType() != null && fieldDefinition.getColumnName() != null) {
if (isPK) {
tableDefinition.setIdColumn(fieldDefinition);
} else {
tableDefinition.addColumn(fieldDefinition);
}
}
}
}
CtMethod[] methods = ctClass.getMethods();
if (methods != null && methods.length > 0) {
for (CtMethod method : methods) {
if (method.getName().startsWith("get")) {
boolean isPK = false;
System.out.println("#######################");
String mm = method.getName().substring(3, method.getName().length());
String x_mm = StringTools.lower(mm);
System.out.println(method.getName() + " " + method.getReturnType().getName());
ColumnDefinition fieldDefinition = new ColumnDefinition();
fieldDefinition.setTitle(StringTools.upper(mm));
fieldDefinition.setEnglishTitle(StringTools.upper(mm));
fieldDefinition.setName(x_mm);
fieldDefinition.setEditable(true);
if (StringUtils.equals(method.getReturnType().getName(), "java.lang.String")) {
fieldDefinition.setType("String");
} else if (StringUtils.equals(method.getReturnType().getName(), "java.util.Date")) {
fieldDefinition.setType("Date");
} else if (StringUtils.equals(method.getReturnType().getName(), "boolean")
|| StringUtils.equals(method.getReturnType().getName(), "java.lang.Boolean")) {
fieldDefinition.setType("Boolean");
} else if (StringUtils.equals(method.getReturnType().getName(), "int")
|| StringUtils.equals(method.getReturnType().getName(), "java.lang.Integer")) {
fieldDefinition.setType("Integer");
} else if (StringUtils.equals(method.getReturnType().getName(), "long")
|| StringUtils.equals(method.getReturnType().getName(), "java.lang.Long")) {
fieldDefinition.setType("Long");
} else if (StringUtils.equals(method.getReturnType().getName(), "double")
|| StringUtils.equals(method.getReturnType().getName(), "java.lang.Double")) {
fieldDefinition.setType("Double");
}
anns = method.getAnnotations();
if (anns != null && anns.length > 0) {
System.out.println("---------------------");
for (Object object : anns) {
if (object instanceof javax.persistence.Column) {
javax.persistence.Column col = (javax.persistence.Column) object;
System.out.println("col:" + col.name());
fieldDefinition.setColumnName(col.name());
System.out.println("col->:" + fieldDefinition.getColumnName());
if (col.length() > 0) {
fieldDefinition.setLength(col.length());
}
fieldDefinition.setNullable(col.nullable());
fieldDefinition.setUpdatable(col.updatable());
fieldDefinition.setUnique(col.unique());
}
if (object instanceof javax.persistence.Id) {
isPK = true;
}
}
}
if (fieldDefinition.getType() != null && fieldDefinition.getColumnName() != null) {
if (isPK) {
tableDefinition.setIdColumn(fieldDefinition);
} else {
tableDefinition.addColumn(fieldDefinition);
}
}
}
}
}
// System.out.println(tableDefinition);
return tableDefinition;
}
@Test
public void genAll() {
Collection<String> classes = AnnotationUtils.findJPAEntity("com.glaf");
for (String cls : classes) {
this.gen(cls);
}
}
public void genAll(String packageName) {
Collection<String> classes = AnnotationUtils.findJPAEntity(packageName);
System.out.println(classes);
for (String cls : classes) {
this.gen(cls);
}
}
public void gen(String className) {
try {
TableDefinition tableDefinition = this.convert(className);
OutputFormat format = OutputFormat.createPrettyPrint();
format.setPadText(true);
format.setNewlines(true);
format.setIndentSize(4);
format.setEncoding("UTF-8");
format.setLineSeparator(newline);
format.setNewLineAfterDeclaration(true);
format.setSuppressDeclaration(true);
XmlWriter xmlWriter = new XmlWriter();
Document d = xmlWriter.write(tableDefinition);
byte[] bytes = Dom4jUtils.getBytesFromDocument(d, format);
System.out.println(new String(bytes));
String toFile = "./codegen/jpa/mapping/" + tableDefinition.getClassName() + ".mapping.xml";
FileUtils.save(toFile, bytes);
System.out.println("文件保存到:" + toFile);
} catch (Exception e) {
e.printStackTrace();
}
}
public static void main(String[] args) {
JPAEntityToXmlMapping gen = new JPAEntityToXmlMapping();
long start = System.currentTimeMillis();
Collection<String> entities = AnnotationUtils.findJPAEntity("com.glaf");
long time = System.currentTimeMillis() - start;
for (String str : entities) {
System.out.println(str);
gen.gen(str);
}
System.out.println("time:" + time);
if (args != null && args.length > 0) {
gen.genAll(args[0]);
} else {
gen.genAll();
}
}
} | apache-2.0 |
hayarobi/simple-config | src/main/java/com/github/hayarobi/simple_config/annotation/Config.java | 954 | package com.github.hayarobi.simple_config.annotation;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* 이 주석이 붙은 클래스는 설정을 담은 객체임을 알림.
* @author Hayarobi Park
*
*/
@Target(ElementType.TYPE)
@Retention(RetentionPolicy.RUNTIME)
public @interface Config {
/**
* 설정 객체 이름을 수동으로 지정. 이름이 없을 경우 해당 설정 클래스의 FQDN으로 찾는다.
* @return {@link String} 설정 소스의 이름
*/
public String value() default "[unassigned]";
/**
* {@link Required} 주석이 안 달린 속성값들의 필수 지정 여부.
*
* @return true 주석이 없는 속성은 필수 속성으로 간주된다.
* @return false 주석이 없는 속성은 선택 속성으로 간주된다.
*/
public boolean propRequired() default false;
}
| apache-2.0 |
msmm/yatelayouknow | Resources/controllers/twLibrary/infomodalview.js | 1167 | /**
* Modal Info View module - Titanium JS
* @author César Cavazos - @cesarcvz
* Based on: https://github.com/appcelerator/KitchenSink
*/
/**
* Open an infomation modal anywhere in hour app
* @param {String} text
* @param {Object} params
*/
exports.showInfo = function(text, params) {
//TODO: Add params like images/icons
params = (params) ? params : {};
var infoWindow = Titanium.UI.createWindow({
height:80,
width:200,
touchEnabled:false
});
var background = Titanium.UI.createView({
height:80,
width:200,
backgroundColor:'#000',
borderRadius:10,
opacity:0.8,
touchEnabled:false
});
infoWindow.add(background);
var message = Titanium.UI.createLabel({
text:text,
color:'#fff',
textAlign:'center',
font:{fontSize:18,fontWeight:'bold'},
height:Ti.UI.SIZE,
width:Ti.UI.SIZE
});
infoWindow.add(message);
infoWindow.open();
var animationProperties = {delay: 1500, duration: 300, opacity: 0.1};
if (Ti.Platform.osname == "iPhone OS") {
animationProperties.transform = Ti.UI.create2DMatrix().translate(-200,200).scale(0);
}
infoWindow.animate(animationProperties, function(){ infoWindow.close(); });
}; | apache-2.0 |
o3project/mlo-net | mlo-srv/src/test/java/org/o3project/mlo/server/action/test/usecase/UsecaseFlow100Test.java | 1443 | /**
* ActionUsecaseTest.java
* (C) 2013,2015, Hitachi, Ltd.
*/
package org.o3project.mlo.server.action.test.usecase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* ActionUsecaseTest
*
*/
public class UsecaseFlow100Test extends UsecaseTestBase {
/* (non-Javadoc)
* @see org.o3project.mlo.server.action.test.usecase.UsecaseTestBase#setUp()
*/
@Before
protected void setUp() throws Exception {
super.setUp();
include("usecase.flow100.app.dicon");
}
/* (non-Javadoc)
* @see org.o3project.mlo.server.action.test.usecase.UsecaseTestBase#tearDown()
*/
@After
protected void tearDown() throws Exception {
super.tearDown();
}
@Test
public void testUsecase_normal_scenario() throws Exception {
// Creates 100 flows in a slice.
_assertMloAction(
"CREATE", "usecaseFlow100/01.create.req.xml",
"usecaseFlow100/01.create.res.xml");
_assertMloAction(
"READ", "usecaseFlow100/read.00000001.req.xml",
"usecaseFlow100/02.read.00000001.res.xml");
// Modifies 100 flows
_assertMloAction(
"UPDATE", "usecaseFlow100/03.update.mod.req.xml",
"usecaseFlow100/03.update.mod.res.xml");
_assertMloAction(
"READ", "usecaseFlow100/read.00000001.req.xml",
"usecaseFlow100/04.read.00000001.res.xml");
// Deletes the slice.
_assertMloAction(
"DELETE", "usecaseFlow100/05.delete.req.xml",
"usecaseFlow100/05.delete.res.xml");
}
}
| apache-2.0 |
b-ggs/indinero-mysql-vagrant | increase-disk-size.rb | 1664 | HOSTNAME = 'indinero-mysql-vagrant'
VMDK_FILENAME = 'ubuntu-xenial-16.04-cloudimg.vmdk'
VDI_FILENAME = 'ubuntu-xenial-16.04-cloudimg.vdi'
VBOX_STORAGE_CONTROLLER = 'SCSI'
VDI_SIZE = 81920
### Check vagrant machine status
puts 'Checking vagrant machine state...'
status = `vagrant status`.chomp
if status =~ /running/
puts 'Vagrant machine is still running. Running `vagrant halt`...'
`vagrant halt`
else
puts 'All good! Proceeding...'
end
### Get VM directory
vbox_vm_directory = `VBoxManage list systemproperties | grep 'Default machine folder' | cut -b 24-`.chomp.strip
vbox_vm_name = `VBoxManage list vms | grep #{HOSTNAME} | awk -F'"' '$0=$2'`.chomp
indinero_vm_directory = File.join vbox_vm_directory, vbox_vm_name
### Convert .vmdk to .vdi
vmdk_path = File.join indinero_vm_directory, VMDK_FILENAME
vdi_path = File.join indinero_vm_directory, VDI_FILENAME
if !File.exists? vmdk_path
puts 'VMDK does not exist. Exiting.'
exit
elsif File.exists? vdi_path
puts 'VDI already exists. Exiting.'
exit
end
puts "Converting #{VMDK_FILENAME} to #{VDI_FILENAME}..."
`VBoxManage clonehd "#{vmdk_path}" "#{vdi_path}" --format VDI`
puts "Success!"
### Increase .vdi size
puts "Resizing #{VDI_FILENAME} to #{VDI_SIZE}..."
`VBoxManage modifymedium disk "#{vdi_path}" --resize #{VDI_SIZE}`
### Attach .vdi to virtual machine
puts "Attaching #{VDI_FILENAME} to #{vbox_vm_name}..."
`VBoxManage storageattach #{vbox_vm_name} --storagectl "#{VBOX_STORAGE_CONTROLLER}" --port 0 --device 0 --type hdd --medium "#{vdi_path}"`
### Remove old .vmdk
puts "Removing #{VMDK_FILENAME}..."
`rm "#{vmdk_path}"`
puts 'Done! You can now run `vagrant up`.'
| apache-2.0 |
shantanusharma/closure-compiler | test/com/google/javascript/jscomp/LiveVariablesAnalysisTest.java | 20359 | /*
* Copyright 2017 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import com.google.javascript.jscomp.AbstractCompiler.LifeCycleStage;
import com.google.javascript.jscomp.CompilerOptions.LanguageMode;
import com.google.javascript.jscomp.DataFlowAnalysis.FlowState;
import com.google.javascript.rhino.InputId;
import com.google.javascript.rhino.Node;
import com.google.javascript.rhino.Token;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@link LiveVariablesAnalysis}. Test cases are snippets of a function and assertions are
* made at the instruction labeled with {@code X}.
*
* @author simranarora@google.com (Simran Arora)
*/
@RunWith(JUnit4.class)
public final class LiveVariablesAnalysisTest {
private LiveVariablesAnalysis liveness = null;
@Test
public void testStraightLine() {
// A sample of simple straight line of code with different liveness changes.
assertNotLiveBeforeX("X:var a;", "a");
assertNotLiveAfterX("X:var a;", "a");
assertNotLiveAfterX("X:var a=1;", "a");
assertLiveAfterX("X:var a=1; a()", "a");
assertNotLiveBeforeX("X:var a=1; a()", "a");
assertLiveBeforeX("var a;X:a;", "a");
assertLiveBeforeX("var a;X:a=a+1;", "a");
assertLiveBeforeX("var a;X:a+=1;", "a");
assertLiveBeforeX("var a;X:a++;", "a");
assertNotLiveAfterX("var a,b;X:b();", "a");
assertNotLiveBeforeX("var a,b;X:b();", "a");
assertLiveBeforeX("var a,b;X:b(a);", "a");
assertLiveBeforeX("var a,b;X:b(1,2,3,b(a + 1));", "a");
assertNotLiveBeforeX("var a,b;X:a=1;b(a)", "a");
assertNotLiveAfterX("var a,b;X:b(a);b()", "a");
assertLiveBeforeX("var a,b;X:b();b=1;a()", "b");
assertLiveAfterX("X:a();var a;a()", "a");
assertNotLiveAfterX("X:a();var a=1;a()", "a");
assertLiveBeforeX("var a,b;X:a,b=1", "a");
}
@Test
public void testProperties() {
// Reading property of a local variable makes that variable live.
assertLiveBeforeX("var a,b;X:a.P;", "a");
// Assigning to a property doesn't kill "a". It makes it live instead.
assertLiveBeforeX("var a,b;X:a.P=1;b()", "a");
assertLiveBeforeX("var a,b;X:a.P.Q=1;b()", "a");
// An "a" in a different context.
assertNotLiveAfterX("var a,b;X:b.P.Q.a=1;", "a");
assertLiveBeforeX("var a,b;X:b.P.Q=a;", "a");
}
@Test
public void testConditions() {
// Reading the condition makes the variable live.
assertLiveBeforeX("var a,b;X:if(a){}", "a");
assertLiveBeforeX("var a,b;X:if(a||b) {}", "a");
assertLiveBeforeX("var a,b;X:if(b||a) {}", "a");
assertLiveBeforeX("var a,b;X:if(b||b(a)) {}", "a");
assertNotLiveAfterX("var a,b;X:b();if(a) {}", "b");
// We can kill within a condition as well.
assertNotLiveAfterX("var a,b;X:a();if(a=b){}a()", "a");
assertNotLiveAfterX("var a,b;X:a();while(a=b){}a()", "a");
// The kill can be "conditional" due to short circuit.
assertNotLiveAfterX("var a,b;X:a();if((a=b)&&b){}a()", "a");
assertNotLiveAfterX("var a,b;X:a();while((a=b)&&b){}a()", "a");
assertLiveBeforeX("var a,b;a();X:if(b&&(a=b)){}a()", "a"); // Assumed live.
assertLiveBeforeX("var a,b;a();X:if(a&&(a=b)){}a()", "a");
assertLiveBeforeX("var a,b;a();X:while(b&&(a=b)){}a()", "a");
assertLiveBeforeX("var a,b;a();X:while(a&&(a=b)){}a()", "a");
}
@Test
public void testArrays() {
assertLiveBeforeX("var a;X:a[1]", "a");
assertLiveBeforeX("var a,b;X:b[a]", "a");
assertLiveBeforeX("var a,b;X:b[1,2,3,4,b(a)]", "a");
assertLiveBeforeX("var a,b;X:b=[a,'a']", "a");
assertNotLiveBeforeX("var a,b;X:a=[];b(a)", "a");
// Element assignment doesn't kill the array.
assertLiveBeforeX("var a;X:a[1]=1", "a");
}
@Test
public void testTwoPaths() {
// Both Paths.
assertLiveBeforeX("var a,b;X:if(b){b(a)}else{b(a)};", "a");
// Only one path.
assertLiveBeforeX("var a,b;X:if(b){b(b)}else{b(a)};", "a");
assertLiveBeforeX("var a,b;X:if(b){b(a)}else{b(b)};", "a");
// None of the paths.
assertNotLiveAfterX("var a,b;X:if(b){b(b)}else{b(b)};", "a");
// At the very end.
assertLiveBeforeX("var a,b;X:if(b){b(b)}else{b(b)}a();", "a");
// The loop might or might not be executed.
assertLiveBeforeX("var a;X:while(param1){a()};", "a");
assertLiveBeforeX("var a;X:while(param1){a=1};a()", "a");
// Same idea with if.
assertLiveBeforeX("var a;X:if(param1){a()};", "a");
assertLiveBeforeX("var a;X:if(param1){a=1};a()", "a");
// This is different in DO. We know for sure at least one iteration is
// executed.
assertNotLiveAfterX("X:var a;do{a=1}while(param1);a()", "a");
}
@Test
public void testThreePaths() {
assertLiveBeforeX("var a;X:if(1){}else if(2){}else{a()};", "a");
assertLiveBeforeX("var a;X:if(1){}else if(2){a()}else{};", "a");
assertLiveBeforeX("var a;X:if(1){a()}else if(2){}else{};", "a");
assertLiveBeforeX("var a;X:if(1){}else if(2){}else{};a()", "a");
}
@Test
public void testHooks() {
assertLiveBeforeX("var a;X:1?a=1:1;a()", "a");
// Unfortunately, we cannot prove the following because we assume there is
// no control flow within a hook (i.e. no joins / set unions).
// assertNotLiveAfterX("var a;X:1?a=1:a=2;a", "a");
assertLiveBeforeX("var a,b;X:b=1?a:2", "a");
}
@Test
public void testForLoops() {
// Induction variable should not be live after the loop.
assertNotLiveBeforeX("var a,b;for(a=0;a<9;a++){b(a)};X:b", "a");
assertNotLiveBeforeX("var a,b;for(a in b){a()};X:b", "a");
assertNotLiveBeforeX("var a,b;for(a in b){a()};X:a", "b");
assertLiveBeforeX("var b;for(var a in b){X:a()};", "a");
// It should be live within the loop even if it is not used.
assertLiveBeforeX("var a,b;for(a=0;a<9;a++){X:1}", "a");
assertLiveAfterX("var a,b;for(a in b){X:b};", "a");
// For-In should serve as a gen as well.
assertLiveBeforeX("var a,b; X:for(a in b){ }", "a");
// "a in b" should kill "a" before it.
// Can't prove this unless we have branched backward DFA.
// assertNotLiveAfterX("var a,b;X:b;for(a in b){a()};", "a");
// Unless it is used before.
assertLiveBeforeX("var a,b;X:a();b();for(a in b){a()};", "a");
// Initializer
assertLiveBeforeX("var a,b;X:b;for(b=a;;){};", "a");
assertNotLiveBeforeX("var a,b;X:a;for(b=a;;){b()};b();", "b");
}
@Test
public void testForOfLoopsVar() {
assertLiveBeforeX("var a; for (a of [1, 2, 3]) {X:{}}", "a");
assertLiveAfterX("for (var a of [1, 2, 3]) {X:{}}", "a");
assertLiveBeforeX("var a,b; for (var y of a = [0, 1, 2]) { X:a[y] }", "a");
}
@Test
public void testForOfLoopsDestructuring() {
assertLiveBeforeX("var key, value; X:for ([key, value] of arr) {value;} value;", "value");
assertLiveBeforeX("let x = 3; X:for (var [y = x] of arr) { y; }", "x");
assertLiveBeforeX("for (let [key, value] in arr) { X: key; value; }", "key");
}
@Test
public void testNestedLoops() {
assertLiveBeforeX("var a;X:while(1){while(1){a()}}", "a");
assertLiveBeforeX("var a;X:while(1){while(1){while(1){a()}}}", "a");
assertLiveBeforeX("var a;X:while(1){while(1){a()};a=1}", "a");
assertLiveAfterX("var a;while(1){while(1){a()};X:a=1;}", "a");
assertLiveAfterX("var a;while(1){X:a=1;while(1){a()}}", "a");
assertNotLiveBeforeX("var a;X:1;do{do{do{a=1;}while(1)}while(1)}while(1);a()", "a");
}
@Test
public void testSwitches() {
assertLiveBeforeX("var a,b;X:switch(a){}", "a");
assertLiveBeforeX("var a,b;X:switch(b){case(a):break;}", "a");
assertLiveBeforeX("var a,b;X:switch(b){case(b):case(a):break;}", "a");
assertNotLiveBeforeX("var a,b;X:switch(b){case 1:a=1;break;default:a=2;break};a()", "a");
assertLiveBeforeX("var a,b;X:switch(b){default:a();break;}", "a");
}
@Test
public void testAssignAndReadInCondition() {
// BUG #1358904
// Technically, this isn't exactly true....but we haven't model control flow
// within an instruction.
assertLiveBeforeX("var a, b; X: if ((a = this) && (b = a)) {}", "a");
assertNotLiveBeforeX("var a, b; X: a = 1, b = 1;", "a");
assertNotLiveBeforeX("var a; X: a = 1, a = 1;", "a");
}
@Test
public void testParam() {
// Unused parameter should not be live.
assertNotLiveAfterX("var a;X:a()", "param1");
assertLiveBeforeX("var a;X:a(param1)", "param1");
assertNotLiveAfterX("var a;X:a();a(param2)", "param1");
}
@Test
public void testExpressionInForIn() {
assertLiveBeforeX("var a = [0]; X:for (a[1] in foo) { }", "a");
}
@Test
public void testArgumentsArray() {
// Check that use of arguments forces the parameters into the
// escaped set.
assertEscaped("arguments[0]", "param1");
assertEscaped("arguments[0]", "param2");
assertEscaped("arguments[0]", "param3");
assertEscaped("var args = arguments", "param1");
assertEscaped("var args = arguments", "param2");
assertEscaped("var args = arguments", "param3");
assertNotEscaped("arguments = []", "param1");
assertNotEscaped("arguments = []", "param2");
assertNotEscaped("arguments = []", "param3");
assertEscaped("arguments[0] = 1", "param1");
assertEscaped("arguments[0] = 1", "param2");
assertEscaped("arguments[0] = 1", "param3");
assertEscaped("arguments[arguments[0]] = 1", "param1");
assertEscaped("arguments[arguments[0]] = 1", "param2");
assertEscaped("arguments[arguments[0]] = 1", "param3");
}
@Test
public void testTryCatchFinally() {
assertLiveAfterX("var a; try {X:a=1} finally {a}", "a");
assertLiveAfterX("var a; try {a()} catch(e) {X:a=1} finally {a}", "a");
// Because the outer catch doesn't catch any exceptions at all, the read of
// "a" within the catch block should not make "a" live.
assertNotLiveAfterX("var a = 1; try {" + "try {a()} catch(e) {X:1} } catch(E) {a}", "a");
assertLiveAfterX("var a; while(1) { try {X:a=1;break} finally {a}}", "a");
}
@Test
public void testForInAssignment() {
assertLiveBeforeX("var a,b; for (var y in a = b) { X:a[y] }", "a");
// No one refers to b after the first iteration.
assertNotLiveBeforeX("var a,b; for (var y in a = b) { X:a[y] }", "b");
assertLiveBeforeX("var a,b; for (var y in a = b) { X:a[y] }", "y");
assertLiveAfterX("var a,b; for (var y in a = b) { a[y]; X: y();}", "a");
}
@Test
public void testExceptionThrowingAssignments() {
assertLiveBeforeX("try{var a; X:a=foo();a} catch(e) {e()}", "a");
assertLiveBeforeX("try{X:var a=foo();a} catch(e) {e()}", "a");
assertLiveBeforeX("try{X:var a=foo()} catch(e) {e(a)}", "a");
}
@Test
public void testInnerFunctions() {
assertLiveBeforeX("function a() {}; X: a()", "a");
assertNotLiveBeforeX("X:; function a() {}", "a");
assertLiveBeforeX("a = function(){}; function a() {}; X: a()", "a");
// NOTE: function a() {} has no CFG node representation since it is not
// part of the control execution.
assertLiveAfterX("X: a = function(){}; function a() {}; a()", "a");
assertNotLiveBeforeX("X: a = function(){}; function a() {}; a()", "a");
}
@Test
public void testEscaped() {
assertEscaped("var a;function b(){a()}", "a");
assertEscaped("var a;function b(){param1()}", "param1");
assertEscaped("var a;function b(){function c(){a()}}", "a");
assertEscaped("var a;function b(){param1.x = function() {a()}}", "a");
assertNotEscaped("var a;function b(){var c; c()}", "c");
assertNotEscaped("var a;function f(){function b(){var c;c()}}", "c");
assertNotEscaped("var a;function b(){};a()", "a");
assertNotEscaped("var a;function f(){function b(){}}a()", "a");
assertNotEscaped("var a;function b(){var a;a()};a()", "a");
// Escaped by exporting.
assertEscaped("var _x", "_x");
}
// ES6 does not require separate handling for catch because the catch block is already recognized
// by the scope creator
@Test
public void testNotEscapedWithCatch() {
assertEscaped("try{} catch(e){}", "e");
}
@Test
public void testEscapedLiveness() {
assertNotLiveBeforeX("var a;X:a();function b(){a()}", "a");
}
@Test
public void testBug1449316() {
assertLiveBeforeX("try {var x=[]; X:var y=x[0]} finally {foo()}", "x");
}
@Test
public void testSimpleLet() {
// a is defined after X and not used
assertNotLiveBeforeX("X:let a;", "a");
assertNotLiveAfterX("X:let a;", "a");
assertNotLiveAfterX("X:let a=1;", "a");
// a is used and defined after X
assertLiveAfterX("X:let a=1; a()", "a");
assertNotLiveBeforeX("X:let a=1; a()", "a");
// no assignment to x; let is initialized with undefined
assertLiveBeforeX("let a;X:a;", "a");
assertNotLiveAfterX("let a,b;X:b();", "a");
assertLiveBeforeX("let a,b;X:b(a);", "a");
assertNotLiveBeforeX("let a,b;X:a=1;b(a)", "a");
assertNotLiveAfterX("let a,b;X:b(a);b()", "a");
assertLiveBeforeX("let a,b;X:b();b=1;a()", "b");
// let initialized afterX
assertLiveAfterX("X:a();let a;a()", "a");
assertNotLiveAfterX("X:a();let a=1;a()", "a");
}
@Test
public void testLetInnerBlock() {
assertNotLiveAfterX("let x; { X:x = 2; let y; }", "x");
}
@Test
public void testSimpleConst() {
// a is defined after X and not used
assertLiveBeforeX("const a = 4; X:a;", "a");
assertNotLiveBeforeX("X:let a = 1;", "a");
assertNotLiveBeforeX("X:const a = 1;", "a");
assertNotLiveAfterX("X:const a = 1;", "a");
}
@Test
public void testArrayDestructuring() {
assertLiveBeforeX("var [a, b] = [1, 2]; X:a;", "a");
assertNotLiveBeforeX("X: var [...a] = f();", "a");
assertNotEscaped("var [a, ...b] = [1, 2];", "b");
assertNotEscaped("var [a, ...b] = [1, 2];", "a");
assertNotEscaped("var [a, ,b] = [1, 2, 3];", "a");
assertNotEscaped("var [a, ,b] = [1, 2, 3];", "b");
assertNotLiveBeforeX("var x = 3; X: [x] = [4]; x;", "x");
assertLiveBeforeX("var x = {}; X: [x.a] = [3]; x.a;", "x");
assertLiveBeforeX("var x = []; X: const [c] = x;", "x");
}
@Test
public void testObjectDestructuring() {
assertLiveBeforeX("var {a: x, b: y} = g(); X:x", "x");
assertNotLiveBeforeX("X: var {a: x, b: y} = g();", "y");
assertNotEscaped("var {a: x, b: y} = g()", "x");
assertNotEscaped("var {a: x, b: y} = g()", "y");
assertNotEscaped("var {a: x = 3, b: y} = g();", "x");
assertNotLiveBeforeX("var x = {}; X: ({x} = {}); x;", "x");
assertLiveBeforeX("var x = {}; X: ({a: x.a} = {}); x.a;", "x");
assertLiveBeforeX("var x = {}; X: const {c} = x;", "x");
}
@Test
public void testComplexDestructuringPattern() {
assertLiveBeforeX("var x = 3; X: var [y = x] = [];", "x");
assertLiveBeforeX("var x = 3, y; X: [y = x] = [];", "x");
assertLiveBeforeX("var x = 3; X: var {y = x} = {};", "x");
assertLiveBeforeX("var x = 3; X: var {key: y = x} = {};", "x");
assertLiveBeforeX("var x = 3; X: const {[x + x]: foo} = obj; x;", "x");
assertLiveBeforeX("var x = 3; X: const {[x + x]: x} = obj; x;", "x");
}
@Test
public void testComplicatedDeclaration() {
assertNotEscaped("var a = 1, {b: b} = f(), c = g()", "a");
assertNotEscaped("var a = 1, {b: b} = f(), c = g()", "b");
assertNotEscaped("var a = 1, {b: b} = f(), c = g()", "c");
}
private void assertLiveBeforeX(String src, String var) {
FlowState<LiveVariablesAnalysis.LiveVariableLattice> state = getFlowStateAtX(src);
assertWithMessage(src + " should contain a label 'X:'").that(state).isNotNull();
assertWithMessage("Variable" + var + " should be live before X")
.that(state.getIn().isLive(liveness.getVarIndex(var)))
.isTrue();
}
private void assertLiveAfterX(String src, String var) {
FlowState<LiveVariablesAnalysis.LiveVariableLattice> state = getFlowStateAtX(src);
assertWithMessage("Label X should be in the input program.").that(state).isNotNull();
assertWithMessage("Variable" + var + " should be live after X")
.that(state.getOut().isLive(liveness.getVarIndex(var)))
.isTrue();
}
private void assertNotLiveAfterX(String src, String var) {
FlowState<LiveVariablesAnalysis.LiveVariableLattice> state = getFlowStateAtX(src);
assertWithMessage("Label X should be in the input program.").that(state).isNotNull();
assertWithMessage("Variable" + var + " should not be live after X")
.that(state.getOut().isLive(liveness.getVarIndex(var)))
.isFalse();
}
private void assertNotLiveBeforeX(String src, String var) {
FlowState<LiveVariablesAnalysis.LiveVariableLattice> state = getFlowStateAtX(src);
assertWithMessage("Label X should be in the input program.").that(state).isNotNull();
assertWithMessage("Variable" + var + " should not be live before X")
.that(state.getIn().isLive(liveness.getVarIndex(var)))
.isFalse();
}
private FlowState<LiveVariablesAnalysis.LiveVariableLattice> getFlowStateAtX(String src) {
liveness = computeLiveness(src);
return getFlowStateAtX(liveness.getCfg().getEntry().getValue(), liveness.getCfg());
}
private FlowState<LiveVariablesAnalysis.LiveVariableLattice> getFlowStateAtX(
Node node, ControlFlowGraph<Node> cfg) {
if (node.isLabel()) {
if (node.getFirstChild().getString().equals("X")) {
return cfg.getNode(node.getLastChild()).getAnnotation();
}
}
for (Node c = node.getFirstChild(); c != null; c = c.getNext()) {
FlowState<LiveVariablesAnalysis.LiveVariableLattice> state = getFlowStateAtX(c, cfg);
if (state != null) {
return state;
}
}
return null;
}
private static void assertEscaped(String src, String name) {
for (Var var : computeLiveness(src).getEscapedLocals()) {
if (var.name.equals(name)) {
return;
}
}
assertWithMessage("Variable " + name + " should be in the escaped local list.").fail();
}
private static void assertNotEscaped(String src, String name) {
for (Var var : computeLiveness(src).getEscapedLocals()) {
assertThat(var.name).isNotEqualTo(name);
}
}
private static LiveVariablesAnalysis computeLiveness(String src) {
// Set up compiler
Compiler compiler = new Compiler();
CompilerOptions options = new CompilerOptions();
options.setLanguage(LanguageMode.ECMASCRIPT_2015);
options.setCodingConvention(new GoogleCodingConvention());
compiler.initOptions(options);
compiler.setLifeCycleStage(LifeCycleStage.NORMALIZED);
// Set up test case
src = "function _FUNCTION(param1, param2 = 1, ...param3){" + src + "}";
Node n = compiler.parseTestCode(src).removeFirstChild();
checkState(n.isFunction(), n);
Node script = new Node(Token.SCRIPT, n);
script.setInputId(new InputId("test"));
assertThat(compiler.getErrors()).isEmpty();
// Create scopes
Es6SyntacticScopeCreator scopeCreator = new Es6SyntacticScopeCreator(compiler);
Scope scope = scopeCreator.createScope(n, Scope.createGlobalScope(script));
Scope childScope = scopeCreator.createScope(NodeUtil.getFunctionBody(n), scope);
// Control flow graph
ControlFlowAnalysis cfa = new ControlFlowAnalysis(compiler, false, true);
cfa.process(null, n);
ControlFlowGraph<Node> cfg = cfa.getCfg();
// Compute liveness of variables
LiveVariablesAnalysis analysis =
new LiveVariablesAnalysis(
cfg, scope, childScope, compiler, new Es6SyntacticScopeCreator(compiler));
analysis.analyze();
return analysis;
}
}
| apache-2.0 |
Ensembl/ensj-healthcheck | src/org/ensembl/healthcheck/testgroup/PostMerge.java | 1063 | /*
* Copyright [1999-2015] Wellcome Trust Sanger Institute and the EMBL-European Bioinformatics Institute
* Copyright [2016-2019] EMBL-European Bioinformatics Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ensembl.healthcheck.testgroup;
import org.ensembl.healthcheck.GroupOfTests;
/**
* These are the tests that register themselves as post_merge.
*
* @author Thibaut Hourlier
*
*/
public class PostMerge extends GroupOfTests {
public PostMerge() {
addTest(
Geneset.class
);
}
}
| apache-2.0 |
OMTAR2/PHPstart | index.php | 32 | <php
echo "strona glowna";
?> | apache-2.0 |
shun634501730/java_source_cn | src_en/com/sun/corba/se/PortableActivationIDL/LocatorHolder.java | 1125 | package com.sun.corba.se.PortableActivationIDL;
/**
* com/sun/corba/se/PortableActivationIDL/LocatorHolder.java .
* Generated by the IDL-to-Java compiler (portable), version "3.2"
* from c:/re/workspace/8-2-build-windows-amd64-cygwin/jdk8u91/7017/corba/src/share/classes/com/sun/corba/se/PortableActivationIDL/activation.idl
* Friday, May 20, 2016 5:44:10 PM PDT
*/
public final class LocatorHolder implements org.omg.CORBA.portable.Streamable
{
public com.sun.corba.se.PortableActivationIDL.Locator value = null;
public LocatorHolder ()
{
}
public LocatorHolder (com.sun.corba.se.PortableActivationIDL.Locator initialValue)
{
value = initialValue;
}
public void _read (org.omg.CORBA.portable.InputStream i)
{
value = com.sun.corba.se.PortableActivationIDL.LocatorHelper.read (i);
}
public void _write (org.omg.CORBA.portable.OutputStream o)
{
com.sun.corba.se.PortableActivationIDL.LocatorHelper.write (o, value);
}
public org.omg.CORBA.TypeCode _type ()
{
return com.sun.corba.se.PortableActivationIDL.LocatorHelper.type ();
}
}
| apache-2.0 |
Vegedus/Fault-Tolerant-Project | RecoveryBlocks/TestProgram/Properties/Settings.Designer.cs | 1068 | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace TestProgram.Properties
{
[global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()]
[global::System.CodeDom.Compiler.GeneratedCodeAttribute("Microsoft.VisualStudio.Editors.SettingsDesigner.SettingsSingleFileGenerator", "11.0.0.0")]
internal sealed partial class Settings : global::System.Configuration.ApplicationSettingsBase
{
private static Settings defaultInstance = ((Settings)(global::System.Configuration.ApplicationSettingsBase.Synchronized(new Settings())));
public static Settings Default
{
get
{
return defaultInstance;
}
}
}
}
| apache-2.0 |
tkrajina/GraphAnything | graphanything/src/main/java/info/puzz/graphanything/utils/StringUtils.java | 811 | package info.puzz.graphanything.utils;
/**
* Created by puzz on 11/10/16.
*/
public final class StringUtils {
private StringUtils() {
throw new Error();
}
public static boolean isEmpty(String s) {
return s == null || s.length() == 0;
}
public static String ellipses(String string, int length) {
if (isEmpty(string)) {
return string;
}
if (string.length() < length) {
return string;
}
return string.substring(0, length) + "…";
}
public static final String firstLine(String string) {
if (StringUtils.isEmpty(string)) {
return string;
}
if (string.indexOf("\n") < 0) {
return string;
}
return string.split("\\n")[0] + "…";
}
}
| apache-2.0 |
ajiang-open/jpaquery | src/main/java/com/jpaquery/core/facade/OrderPath.java | 136 | package com.jpaquery.core.facade;
public interface OrderPath {
enum OrderPathType {
asc, desc
}
Order asc();
Order desc();
}
| apache-2.0 |
kieker-monitoring/kieker | kieker-monitoring/test/kieker/monitoring/writer/filesystem/BinaryFileWriterTest.java | 14833 | /***************************************************************************
* Copyright 2021 Kieker Project (http://kieker-monitoring.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
***************************************************************************/
package kieker.monitoring.writer.filesystem;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import kieker.common.configuration.Configuration;
import kieker.common.record.misc.EmptyRecord;
import kieker.common.util.filesystem.FSUtil;
import kieker.common.util.filesystem.FileExtensionFilter;
import kieker.monitoring.core.configuration.ConfigurationConstants;
import kieker.monitoring.writer.compression.NoneCompressionFilter;
import kieker.monitoring.writer.compression.ZipCompressionFilter;
/**
* @author Christian Wulf
*
* @since 1.13
*/
public class BinaryFileWriterTest {
@Rule
public final TemporaryFolder tmpFolder = new TemporaryFolder(); // NOCS recommends that this is private. JUnit test wants this public.
private Configuration configuration;
private Path writerPath;
public BinaryFileWriterTest() {
super();
}
/**
* Shared setup for the tests.
*/
@Before
public void before() {
this.writerPath = Paths.get(this.tmpFolder.getRoot().getAbsolutePath());
this.configuration = new Configuration();
this.configuration.setProperty(ConfigurationConstants.HOST_NAME, "testHostName");
this.configuration.setProperty(ConfigurationConstants.CONTROLLER_NAME, "testControllerName");
this.configuration.setProperty(FileWriter.CONFIG_BUFFERSIZE, "8192");
this.configuration.setProperty(FileWriter.CONFIG_CHARSET_NAME, "UTF-8");
this.configuration.setProperty(FileWriter.CONFIG_MAXENTRIESINFILE, "-1");
this.configuration.setProperty(FileWriter.CONFIG_MAXLOGFILES, String.valueOf(Integer.MAX_VALUE));
this.configuration.setProperty(FileWriter.CONFIG_MAXLOGSIZE, String.valueOf(Integer.MAX_VALUE));
this.configuration.setProperty(FileWriter.CONFIG_PATH, this.writerPath.toString());
}
/**
* Test whether the log directory is created correctly.
*
* @throws IOException
*/
@Test
public void shouldCreateLogFolder() throws IOException {
// test preparation
this.configuration.setProperty(FileWriter.CONFIG_MAXENTRIESINFILE, "1");
this.configuration.setProperty(FileWriter.CONFIG_COMPRESSION_FILTER, NoneCompressionFilter.class.getName());
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
// test execution
new FileWriter(this.configuration);
final Path kiekerPath = Files.list(this.writerPath).findFirst().get();
// test assertion
Assert.assertTrue(Files.exists(kiekerPath));
}
/**
* Test whether the mapping file is created correctly.
*
* @throws IOException
*/
@Test
public void shouldCreateMappingAndRecordFiles() throws IOException {
// test preparation
this.configuration.setProperty(FileWriter.CONFIG_MAXENTRIESINFILE, "1");
this.configuration.setProperty(FileWriter.CONFIG_COMPRESSION_FILTER, NoneCompressionFilter.class.getName());
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
final FileWriter writer = new FileWriter(this.configuration);
final EmptyRecord record = new EmptyRecord();
// test execution
FilesystemTestUtil.executeFileWriterTest(1, writer, record);
final File storePath = Files.list(this.writerPath).findFirst().get().toFile();
// test assertion
final File[] mapFiles = storePath.listFiles(FileExtensionFilter.MAP);
Assert.assertNotNull(mapFiles);
Assert.assertTrue(mapFiles[0].exists());
Assert.assertThat(mapFiles.length, CoreMatchers.is(1));
final File[] recordFiles = storePath.listFiles(FileExtensionFilter.BIN);
Assert.assertNotNull(recordFiles);
Assert.assertTrue(recordFiles[0].exists());
Assert.assertThat(recordFiles.length, CoreMatchers.is(1));
}
/**
* Test whether the upper limit of entries per file in honored.
*
* @throws IOException
*/
@Test
public void shouldCreateMultipleRecordFiles() throws IOException {
// test preparation
this.configuration.setProperty(FileWriter.CONFIG_MAXENTRIESINFILE, "2");
this.configuration.setProperty(FileWriter.CONFIG_COMPRESSION_FILTER, NoneCompressionFilter.class.getName());
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
final FileWriter writer = new FileWriter(this.configuration);
final EmptyRecord record = new EmptyRecord();
// test execution
FilesystemTestUtil.executeFileWriterTest(3, writer, record);
final File storePath = Files.list(this.writerPath).findFirst().get().toFile();
// test assertion
final File[] mapFiles = storePath.listFiles(FileExtensionFilter.MAP);
Assert.assertNotNull(mapFiles);
Assert.assertTrue(mapFiles[0].exists());
Assert.assertThat(mapFiles.length, CoreMatchers.is(1));
final File[] recordFiles = storePath.listFiles(FileExtensionFilter.BIN);
Assert.assertNotNull(recordFiles);
Assert.assertTrue(recordFiles[0].exists());
Assert.assertTrue(recordFiles[1].exists());
Assert.assertThat(recordFiles.length, CoreMatchers.is(2));
}
/**
* Test whether compression setting works.
*
* @throws IOException
*/
@Test
public void shouldCreateMultipleCompressedRecordFiles() throws IOException {
// test preparation
this.configuration.setProperty(FileWriter.CONFIG_MAXENTRIESINFILE, "2");
this.configuration.setProperty(FileWriter.CONFIG_COMPRESSION_FILTER, ZipCompressionFilter.class.getName());
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
final FileWriter writer = new FileWriter(this.configuration);
final EmptyRecord record = new EmptyRecord();
// test execution
FilesystemTestUtil.executeFileWriterTest(3, writer, record);
final File storePath = Files.list(this.writerPath).findFirst().get().toFile();
// test assertion
final File[] mapFiles = storePath.listFiles(FileExtensionFilter.MAP);
Assert.assertNotNull(mapFiles);
Assert.assertTrue(mapFiles[0].exists());
Assert.assertThat(mapFiles.length, CoreMatchers.is(1));
final File[] recordFiles = storePath.listFiles(FileExtensionFilter.ZIP);
Assert.assertNotNull(recordFiles);
Assert.assertTrue(recordFiles[0].exists());
Assert.assertTrue(recordFiles[1].exists());
Assert.assertThat(recordFiles.length, CoreMatchers.is(2));
}
/**
* Test behavior regarding max log files. Should rotate.
*
* @throws IOException
*/
@Test
public final void testMaxLogFiles() throws IOException {
final int[] maxLogFilesValues = { -1, 0, 1, 2 };
final int[] numRecordsToWriteValues = { 0, 1, 2, 3, 10 };
final int[][] expectedNumRecordFilesValues = { { 1, 1, 1, 2, 5, }, { 1, 1, 1, 2, 5 }, { 1, 1, 1, 1, 1 }, { 1, 1, 1, 2, 2 } };
for (int i = 0; i < maxLogFilesValues.length; i++) {
final int maxLogFiles = maxLogFilesValues[i];
for (int j = 0; j < numRecordsToWriteValues.length; j++) {
final int numRecordsToWrite = numRecordsToWriteValues[j];
final int expectedNumRecordFiles = expectedNumRecordFilesValues[i][j];
// test preparation
this.configuration.setProperty(FileWriter.CONFIG_MAXENTRIESINFILE, "2");
this.configuration.setProperty(FileWriter.CONFIG_MAXLOGSIZE, "-1");
this.configuration.setProperty(FileWriter.CONFIG_MAXLOGFILES, String.valueOf(maxLogFiles));
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
final FileWriter writer = new FileWriter(this.configuration);
final EmptyRecord record = new EmptyRecord();
// test execution
FilesystemTestUtil.executeFileWriterTest(numRecordsToWrite, writer, record);
final File storePath = Files.list(this.writerPath).findFirst().get().toFile();
// test assertion
final String reasonMessage = "Passed arguments: maxLogFiles=" + maxLogFiles + ", numRecordsToWrite=" + numRecordsToWrite;
final File[] recordFiles = storePath.listFiles(FileExtensionFilter.BIN);
Assert.assertNotNull(recordFiles);
Assert.assertThat(reasonMessage, recordFiles.length, CoreMatchers.is(expectedNumRecordFiles));
FilesystemTestUtil.deleteContent(this.writerPath);
}
}
}
/**
* Test whether the max log size.
*
* @throws Exception
* on IO errors
*/
@Test
public void testMaxLogSize() throws Exception {
final int recordSizeInBytes = 4 + 8 + EmptyRecord.SIZE; // 12
// semantics of the tuple: (maxMegaBytesPerFile, megaBytesToWrite, expectedNumRecordFiles)
final int[][] testInputTuples = {
{ -1, 0, 1 }, { -1, 1, 1 },
{ 0, 0, 1 }, { 0, 1, 1 },
{ 1, 0, 1 }, { 1, 1, 1 }, { 1, 2, 2 }, { 1, 3, 2 },
};
for (final int[] testInputTuple : testInputTuples) { // NOPMD
final int maxMegaBytesPerFile = testInputTuple[0];
final int megaBytesToWrite = testInputTuple[1];
final int expectedNumRecordFiles = testInputTuple[2];
// test preparation
final int numRecordsToWrite = (1024 * 1024 * megaBytesToWrite) / recordSizeInBytes;
this.configuration.setProperty(FileWriter.CONFIG_MAXENTRIESINFILE, "-1");
this.configuration.setProperty(FileWriter.CONFIG_MAXLOGSIZE, String.valueOf(maxMegaBytesPerFile));
this.configuration.setProperty(FileWriter.CONFIG_MAXLOGFILES, "2");
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
final FileWriter writer = new FileWriter(this.configuration);
final EmptyRecord record = new EmptyRecord();
// test execution
FilesystemTestUtil.executeFileWriterTest(numRecordsToWrite, writer, record);
final File storePath = Files.list(this.writerPath).findFirst().get().toFile();
// test assertion
final String reasonMessage = "Passed arguments: maxMegaBytesPerFile=" + maxMegaBytesPerFile + ", megaBytesToWrite=" + megaBytesToWrite;
final File[] recordFiles = storePath.listFiles(FileExtensionFilter.BIN);
Assert.assertNotNull(recordFiles);
Assert.assertThat(reasonMessage, recordFiles.length, CoreMatchers.is(expectedNumRecordFiles));
FilesystemTestUtil.deleteContent(this.writerPath);
}
}
/**
* Test valid log directory.
*
* @throws IOException
*/
@Test
public void testValidLogFolder() throws IOException {
final String passedConfigPathName = this.tmpFolder.getRoot().getAbsolutePath();
this.configuration.setProperty(FileWriter.CONFIG_PATH, passedConfigPathName);
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
new FileWriter(this.configuration);
final Path kiekerPath = Files.list(Paths.get(passedConfigPathName)).findFirst().get();
Assert.assertThat(kiekerPath.toAbsolutePath().toString(), CoreMatchers.startsWith(passedConfigPathName));
}
/**
* Test log directory missing in configuration.
*
* @throws IOException
* on IO errors
*/
@Test(expected = IllegalArgumentException.class)
public void testNonDirectoryConfigPath() throws IOException {
final String passedConfigPathName = this.tmpFolder.newFile().getAbsolutePath();
this.configuration.setProperty(FileWriter.CONFIG_PATH, passedConfigPathName);
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
new FileWriter(this.configuration);
final String defaultDir = System.getProperty("java.io.tmpdir");
final Path kiekerPath = Files.list(Paths.get(passedConfigPathName)).findFirst().get();
Assert.assertThat(kiekerPath.toAbsolutePath().toString(), CoreMatchers.startsWith(defaultDir));
}
@Test
public void testValidLogFolderDirectory() throws Exception {
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
new FileWriter(this.configuration);
final String directoryName = this.configuration.getStringProperty(FileWriter.CONFIG_PATH);
final Path kiekerPath = Files.list(this.writerPath).findFirst().get();
Assert.assertThat(kiekerPath.getParent().toString(), CoreMatchers.is(directoryName));
}
@Test
public void testValidLogFolderFileName() throws Exception {
this.configuration.setProperty(FileWriter.CONFIG_MAP_FILE_HANDLER, TextMapFileHandler.class.getCanonicalName());
this.configuration.setProperty(FileWriter.CONFIG_LOG_STREAM_HANDLER, BinaryLogStreamHandler.class.getCanonicalName());
new FileWriter(this.configuration);
final String hostName = this.configuration.getStringProperty(ConfigurationConstants.HOST_NAME);
final String controllerName = this.configuration.getStringProperty(ConfigurationConstants.CONTROLLER_NAME);
final Path kiekerPath = Files.list(this.writerPath).findFirst().get();
Assert.assertThat(kiekerPath.getFileName().toString(), CoreMatchers.startsWith(FSUtil.FILE_PREFIX));
Assert.assertThat(kiekerPath.getFileName().toString(), CoreMatchers.endsWith(hostName + "-" + controllerName));
}
}
| apache-2.0 |
martin-kuba/metacentrum-accounting | pbsmon/src/main/java/cz/cesnet/meta/pbs/JobsInfo.java | 2679 | package cz.cesnet.meta.pbs;
import java.util.Map;
import java.util.HashMap;
import java.util.List;
/**
* Created by IntelliJ IDEA.
*
* @author Martin Kuba makub@ics.muni.cz
* @version $Id: JobsInfo.java,v 1.2 2014/03/05 14:50:15 makub Exp $
*/
public class JobsInfo {
private Map<String, Integer> stavy;
private Map<String, Integer> poctyCpu;
private int celkemJobs;
private int celkemCpu;
private int jobsInStateQ;
private int jobsInStateR;
private int jobsInStateC;
private int jobsInStateOther;
private int cpusInStateQ;
private int cpusInStateR;
private int cpusInStateC;
private int cpusInStateOther;
public JobsInfo(List<Job> jobs) {
stavy = new HashMap<>();
poctyCpu = new HashMap<>();
celkemJobs = 0;
celkemCpu = 0;
for (Job job : jobs) {
JobState state = JobState.valueOf(job.getState());
PbsUtils.updateCount(stavy, state.name(), 1);
switch (state) {
case Q: jobsInStateQ++; break;
case R: jobsInStateR++; break;
case C:
case F:
jobsInStateC++; break;
default: jobsInStateOther++;
}
int cpu = job.getNoOfUsedCPU();
PbsUtils.updateCount(poctyCpu, state.name(), cpu);
switch (state) {
case Q: cpusInStateQ+=cpu; break;
case R: cpusInStateR+=cpu; break;
case C:
case F:
cpusInStateC+=cpu; break;
default: cpusInStateOther+=cpu;
}
celkemJobs++;
celkemCpu += cpu;
}
}
public Map<String, Integer> getStavy() {
return stavy;
}
public Map<String, Integer> getPoctyCpu() {
return poctyCpu;
}
public int getCelkemJobs() {
return celkemJobs;
}
public int getCelkemCpu() {
return celkemCpu;
}
public int getJobsInStateQ() {
return jobsInStateQ;
}
public int getJobsInStateR() {
return jobsInStateR;
}
/**
* C in Torque or F in PBSPro.
* @return number of finished/completed jobs
*/
public int getJobsInStateC() {
return jobsInStateC;
}
public int getJobsInStateOther() {
return jobsInStateOther;
}
public int getCpusInStateQ() {
return cpusInStateQ;
}
public int getCpusInStateR() {
return cpusInStateR;
}
public int getCpusInStateC() {
return cpusInStateC;
}
public int getCpusInStateOther() {
return cpusInStateOther;
}
}
| apache-2.0 |
Allesad/HabraClient | app/src/main/java/com/allesad/habraclient/components/widgets/SlidingTabStrip.java | 6426 | package com.allesad.habraclient.components.widgets;
/*
* Copyright 2014 Google Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import android.R;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.View;
import android.widget.LinearLayout;
class SlidingTabStrip extends LinearLayout {
private static final int DEFAULT_BOTTOM_BORDER_THICKNESS_DIPS = 0;
private static final byte DEFAULT_BOTTOM_BORDER_COLOR_ALPHA = 0x26;
private static final int SELECTED_INDICATOR_THICKNESS_DIPS = 3;
private static final int DEFAULT_SELECTED_INDICATOR_COLOR = 0xFF33B5E5;
private final int mBottomBorderThickness;
private final Paint mBottomBorderPaint;
private final int mSelectedIndicatorThickness;
private final Paint mSelectedIndicatorPaint;
private final int mDefaultBottomBorderColor;
private int mSelectedPosition;
private float mSelectionOffset;
private SlidingTabLayout.TabColorizer mCustomTabColorizer;
private final SimpleTabColorizer mDefaultTabColorizer;
SlidingTabStrip(Context context) {
this(context, null);
}
SlidingTabStrip(Context context, AttributeSet attrs) {
super(context, attrs);
setWillNotDraw(false);
final float density = getResources().getDisplayMetrics().density;
TypedValue outValue = new TypedValue();
context.getTheme().resolveAttribute(R.attr.colorForeground, outValue, true);
final int themeForegroundColor = outValue.data;
mDefaultBottomBorderColor = setColorAlpha(themeForegroundColor,
DEFAULT_BOTTOM_BORDER_COLOR_ALPHA);
mDefaultTabColorizer = new SimpleTabColorizer();
mDefaultTabColorizer.setIndicatorColors(DEFAULT_SELECTED_INDICATOR_COLOR);
mBottomBorderThickness = (int) (DEFAULT_BOTTOM_BORDER_THICKNESS_DIPS * density);
mBottomBorderPaint = new Paint();
mBottomBorderPaint.setColor(mDefaultBottomBorderColor);
mSelectedIndicatorThickness = (int) (SELECTED_INDICATOR_THICKNESS_DIPS * density);
mSelectedIndicatorPaint = new Paint();
}
void setCustomTabColorizer(SlidingTabLayout.TabColorizer customTabColorizer) {
mCustomTabColorizer = customTabColorizer;
invalidate();
}
void setSelectedIndicatorColors(int... colors) {
// Make sure that the custom colorizer is removed
mCustomTabColorizer = null;
mDefaultTabColorizer.setIndicatorColors(colors);
invalidate();
}
void onViewPagerPageChanged(int position, float positionOffset) {
mSelectedPosition = position;
mSelectionOffset = positionOffset;
invalidate();
}
@Override
protected void onDraw(Canvas canvas) {
final int height = getHeight();
final int childCount = getChildCount();
final SlidingTabLayout.TabColorizer tabColorizer = mCustomTabColorizer != null
? mCustomTabColorizer
: mDefaultTabColorizer;
// Thick colored underline below the current selection
if (childCount > 0) {
View selectedTitle = getChildAt(mSelectedPosition);
int left = selectedTitle.getLeft();
int right = selectedTitle.getRight();
int color = tabColorizer.getIndicatorColor(mSelectedPosition);
if (mSelectionOffset > 0f && mSelectedPosition < (getChildCount() - 1)) {
int nextColor = tabColorizer.getIndicatorColor(mSelectedPosition + 1);
if (color != nextColor) {
color = blendColors(nextColor, color, mSelectionOffset);
}
// Draw the selection partway between the tabs
View nextTitle = getChildAt(mSelectedPosition + 1);
left = (int) (mSelectionOffset * nextTitle.getLeft() +
(1.0f - mSelectionOffset) * left);
right = (int) (mSelectionOffset * nextTitle.getRight() +
(1.0f - mSelectionOffset) * right);
}
mSelectedIndicatorPaint.setColor(color);
canvas.drawRect(left, height - mSelectedIndicatorThickness, right,
height, mSelectedIndicatorPaint);
}
// Thin underline along the entire bottom edge
canvas.drawRect(0, height - mBottomBorderThickness, getWidth(), height, mBottomBorderPaint);
}
/**
* Set the alpha value of the {@code color} to be the given {@code alpha} value.
*/
private static int setColorAlpha(int color, byte alpha) {
return Color.argb(alpha, Color.red(color), Color.green(color), Color.blue(color));
}
/**
* Blend {@code color1} and {@code color2} using the given ratio.
*
* @param ratio of which to blend. 1.0 will return {@code color1}, 0.5 will give an even blend,
* 0.0 will return {@code color2}.
*/
private static int blendColors(int color1, int color2, float ratio) {
final float inverseRation = 1f - ratio;
float r = (Color.red(color1) * ratio) + (Color.red(color2) * inverseRation);
float g = (Color.green(color1) * ratio) + (Color.green(color2) * inverseRation);
float b = (Color.blue(color1) * ratio) + (Color.blue(color2) * inverseRation);
return Color.rgb((int) r, (int) g, (int) b);
}
private static class SimpleTabColorizer implements SlidingTabLayout.TabColorizer {
private int[] mIndicatorColors;
@Override
public final int getIndicatorColor(int position) {
return mIndicatorColors[position % mIndicatorColors.length];
}
void setIndicatorColors(int... colors) {
mIndicatorColors = colors;
}
}
} | apache-2.0 |
tzaavi/Lean | Common/Securities/Security.cs | 13827 | /*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using QuantConnect.Data;
using QuantConnect.Data.Market;
using QuantConnect.Securities.Equity;
using QuantConnect.Securities.Forex;
using QuantConnect.Securities.Interfaces;
namespace QuantConnect.Securities
{
/// <summary>
/// A base vehicle properties class for providing a common interface to all assets in QuantConnect.
/// </summary>
/// <remarks>
/// Security object is intended to hold properties of the specific security asset. These properties can include trade start-stop dates,
/// price, market hours, resolution of the security, the holdings information for this security and the specific fill model.
/// </remarks>
public class Security
{
private LocalTimeKeeper _localTimeKeeper;
private readonly SubscriptionDataConfig _config;
/// <summary>
/// <see cref="Symbol"/> for the asset.
/// </summary>
public Symbol Symbol
{
get
{
return _config.Symbol;
}
}
/// <summary>
/// Type of the security.
/// </summary>
/// <remarks>
/// QuantConnect currently only supports Equities and Forex
/// </remarks>
public SecurityType Type
{
get
{
return _config.SecurityType;
}
}
/// <summary>
/// Resolution of data requested for this security.
/// </summary>
/// <remarks>Tick, second or minute resolution for QuantConnect assets.</remarks>
public Resolution Resolution
{
get
{
return _config.Resolution;
}
}
/// <summary>
/// Indicates the data will use previous bars when there was no trading in this time period. This was a configurable datastream setting set in initialization.
/// </summary>
public bool IsFillDataForward
{
get
{
return _config.FillDataForward;
}
}
/// <summary>
/// Indicates the security will continue feeding data after the primary market hours have closed. This was a configurable setting set in initialization.
/// </summary>
public bool IsExtendedMarketHours
{
get
{
return _config.ExtendedMarketHours;
}
}
/// <summary>
/// Gets the subscription configuration for this security
/// </summary>
public SubscriptionDataConfig SubscriptionDataConfig
{
get { return _config; }
}
/// <summary>
/// There has been at least one datapoint since our algorithm started running for us to determine price.
/// </summary>
public bool HasData
{
get
{
return GetLastData() != null;
}
}
/// <summary>
/// Data cache for the security to store previous price information.
/// </summary>
/// <seealso cref="EquityCache"/>
/// <seealso cref="ForexCache"/>
public virtual SecurityCache Cache { get; set; }
/// <summary>
/// Holdings class contains the portfolio, cash and processes order fills.
/// </summary>
/// <seealso cref="EquityHolding"/>
/// <seealso cref="ForexHolding"/>
public virtual SecurityHolding Holdings
{
get;
set;
}
/// <summary>
/// Exchange class contains the market opening hours, along with pre-post market hours.
/// </summary>
/// <seealso cref="EquityExchange"/>
/// <seealso cref="ForexExchange"/>
public virtual SecurityExchange Exchange
{
get;
set;
}
/// <summary>
/// Transaction model class implements the fill models for the security. If the user does not define a model the default
/// model is used for this asset class.
/// </summary>
/// <remarks>This is ignored in live trading and the real fill prices are used instead</remarks>
/// <seealso cref="EquityTransactionModel"/>
/// <seealso cref="ForexTransactionModel"/>
[Obsolete("Security.Model has been made obsolete, use Security.TransactionModel instead.")]
public virtual ISecurityTransactionModel Model
{
get { return TransactionModel; }
set { TransactionModel = value; }
}
/// <summary>
/// Transaction model class implements the fill models for the security. If the user does not define a model the default
/// model is used for this asset class.
/// </summary>
/// <remarks>This is ignored in live trading and the real fill prices are used instead</remarks>
/// <seealso cref="EquityTransactionModel"/>
/// <seealso cref="ForexTransactionModel"/>
public ISecurityTransactionModel TransactionModel
{
get;
set;
}
/// <summary>
/// Gets the portfolio model used by this security
/// </summary>
public ISecurityPortfolioModel PortfolioModel
{
get;
set;
}
/// <summary>
/// Gets the margin model used for this security
/// </summary>
public ISecurityMarginModel MarginModel
{
get;
set;
}
/// <summary>
/// Gets the settlement model used for this security
/// </summary>
public ISettlementModel SettlementModel
{
get;
set;
}
/// <summary>
/// Customizable data filter to filter outlier ticks before they are passed into user event handlers.
/// By default all ticks are passed into the user algorithms.
/// </summary>
/// <remarks>TradeBars (seconds and minute bars) are prefiltered to ensure the ticks which build the bars are realistically tradeable</remarks>
/// <seealso cref="EquityDataFilter"/>
/// <seealso cref="ForexDataFilter"/>
public ISecurityDataFilter DataFilter
{
get;
set;
}
/// <summary>
/// Construct a new security vehicle based on the user options.
/// </summary>
public Security(SecurityExchangeHours exchangeHours, SubscriptionDataConfig config, decimal leverage)
{
_config = config;
Cache = new SecurityCache();
Exchange = new SecurityExchange(exchangeHours);
DataFilter = new SecurityDataFilter();
PortfolioModel = new SecurityPortfolioModel();
TransactionModel = new SecurityTransactionModel();
MarginModel = new SecurityMarginModel(leverage);
SettlementModel = new ImmediateSettlementModel();
Holdings = new SecurityHolding(this);
}
/// <summary>
/// Read only property that checks if we currently own stock in the company.
/// </summary>
public virtual bool HoldStock
{
get
{
//Get a boolean, true if we own this stock.
return Holdings.AbsoluteQuantity > 0;
}
}
/// <summary>
/// Alias for HoldStock - Do we have any of this security
/// </summary>
public virtual bool Invested
{
get
{
return HoldStock;
}
}
/// <summary>
/// Local time for this market
/// </summary>
public virtual DateTime LocalTime
{
get
{
if (_localTimeKeeper == null)
{
throw new Exception("Security.SetLocalTimeKeeper(LocalTimeKeeper) must be called in order to use the LocalTime property.");
}
return _localTimeKeeper.LocalTime;
}
}
/// <summary>
/// Get the current value of the security.
/// </summary>
public virtual decimal Price
{
get
{
//Get the current security value from the cache
var data = GetLastData();
if (data != null)
{
return data.Value;
}
return 0;
}
}
/// <summary>
/// Leverage for this Security.
/// </summary>
public virtual decimal Leverage
{
get
{
return Holdings.Leverage;
}
}
/// <summary>
/// If this uses tradebar data, return the most recent high.
/// </summary>
public virtual decimal High {
get
{
var data = GetLastData();
var bar = data as TradeBar;
if (bar != null)
{
return bar.High;
}
return data.Value;
}
}
/// <summary>
/// If this uses tradebar data, return the most recent low.
/// </summary>
public virtual decimal Low {
get
{
var data = GetLastData();
var bar = data as TradeBar;
if (bar != null)
{
return bar.Low;
}
return data.Value;
}
}
/// <summary>
/// If this uses tradebar data, return the most recent close.
/// </summary>
public virtual decimal Close
{
get
{
var data = GetLastData();
if (data == null) return 0;
return data.Value;
}
}
/// <summary>
/// If this uses tradebar data, return the most recent open.
/// </summary>
public virtual decimal Open {
get {
var data = GetLastData();
var bar = data as TradeBar;
if (bar != null)
{
return bar.Open;
}
return data.Value;
}
}
/// <summary>
/// Access to the volume of the equity today
/// </summary>
public virtual long Volume
{
get
{
var data = GetLastData();
var bar = data as TradeBar;
if (bar != null)
{
return bar.Volume;
}
return 0;
}
}
/// <summary>
/// Get the last price update set to the security.
/// </summary>
/// <returns>BaseData object for this security</returns>
public BaseData GetLastData()
{
return Cache.GetData();
}
/// <summary>
/// Sets the <see cref="LocalTimeKeeper"/> to be used for this <see cref="Security"/>.
/// This is the source of this instance's time.
/// </summary>
/// <param name="localTimeKeeper">The source of this <see cref="Security"/>'s time.</param>
public void SetLocalTimeKeeper(LocalTimeKeeper localTimeKeeper)
{
_localTimeKeeper = localTimeKeeper;
_localTimeKeeper.TimeUpdated += (sender, args) =>
{
//Update the Exchange/Timer:
Exchange.SetLocalDateTimeFrontier(args.Time);
};
}
/// <summary>
/// Update any security properties based on the lastest market data and time
/// </summary>
/// <param name="data">New data packet from LEAN</param>
public void SetMarketPrice(BaseData data)
{
//Add new point to cache:
if (data == null) return;
Cache.AddData(data);
Holdings.UpdateMarketPrice(data.Value);
}
/// <summary>
/// Set the leverage parameter for this security
/// </summary>
/// <param name="leverage">Leverage for this asset</param>
public void SetLeverage(decimal leverage)
{
MarginModel.SetLeverage(this, leverage);
}
/// <summary>
/// Sets the data normalization mode to be used by this security
/// </summary>
public void SetDataNormalizationMode(DataNormalizationMode mode)
{
_config.DataNormalizationMode = mode;
}
/// <summary>
/// Returns a string that represents the current object.
/// </summary>
/// <returns>
/// A string that represents the current object.
/// </returns>
/// <filterpriority>2</filterpriority>
public override string ToString()
{
return Symbol.Permtick;
}
}
} | apache-2.0 |
tallycheck/data-support | meta-descriptor-base/src/main/java/com/taoswork/tallycheck/descriptor/metadata/processor/ProcessResult.java | 596 | package com.taoswork.tallycheck.descriptor.metadata.processor;
public enum ProcessResult {
/**
* INAPPLICABLE: the processor doesn't handle the data specified
*/
INAPPLICABLE(1),
/**
* The processor doesn't handle the data, but may do some other stuffs, such as setting value
*/
PASSING_THROUGH(2),
/**
* The processor handles the data successfully
*/
HANDLED(3),
/**
* The processor failed to handle the data
*/
FAILED(4),;
private final int code;
ProcessResult(int code) {
this.code = code;
}
}
| apache-2.0 |
yahoo/validatar | src/main/java/com/yahoo/validatar/common/Pluggable.java | 3092 | /*
* Copyright 2015 Yahoo Inc.
* Licensed under the terms of the Apache 2 license. Please see LICENSE file in the project root for terms.
*/
package com.yahoo.validatar.common;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
/**
* A class that can be extended to load or plugin additional classes to a type. For example, extending this
* class in an package that loads engines could let it allow loading additional engines at runtime from arguments.
* It only works with classes that can be instantiated with the default constructor.
*
* @param <T> The super type of the pluggable classes.
*/
@Slf4j
public class Pluggable<T> {
@Getter
private OptionParser pluginOptionsParser;
private List<Class<? extends T>> defaults;
private String optionsKey;
/**
* The constructor.
*
* @param defaults The List of default classes to use as plugins.
* @param key The key to use to load the plugin class from command line arguments.
* @param description A helpful description to provide for what these plugins are.
*/
public Pluggable(List<Class<? extends T>> defaults, String key, String description) {
Objects.requireNonNull(defaults);
pluginOptionsParser = new OptionParser() {
{
accepts(key, description)
.withRequiredArg()
.describedAs("Additional custom fully qualified classes to plug in");
allowsUnrecognizedOptions();
}
};
this.defaults = defaults;
this.optionsKey = key;
}
/**
* Returns a set view of the instantiated plugins that could be created.
* @param arguments The commandline arguments containing the optional plugin arguments and class names.
* @return A Set of all the instantiated plugin classes.
*/
public Set<T> getPlugins(String[] arguments) {
OptionSet options = pluginOptionsParser.parse(arguments);
Set<Class<? extends T>> pluginClasses = new HashSet<>(defaults);
for (String pluggable : (List<String>) options.valuesOf(optionsKey)) {
try {
Class<? extends T> plugin = (Class<? extends T>) Class.forName(pluggable);
pluginClasses.add(plugin);
} catch (ClassNotFoundException e) {
log.error("Requested plugin class not found: {}", pluggable, e);
}
}
Set<T> plugins = new HashSet<>();
for (Class<? extends T> pluginClass : pluginClasses) {
try {
plugins.add(pluginClass.newInstance());
} catch (InstantiationException ie) {
log.error("Error instantiating {} plugin.\n{}", pluginClass, ie);
} catch (IllegalAccessException iae) {
log.error("Illegal access while loading {} plugin.\n{}", pluginClass, iae);
}
}
return plugins;
}
}
| apache-2.0 |
Unicon/cas | core/cas-server-core-web/src/test/java/org/apereo/cas/web/SimpleUrlValidatorFactoryBeanTests.java | 890 | package org.apereo.cas.web;
import static org.junit.Assert.*;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* @author swoeste
* @since 5.1.0
*/
@RunWith(JUnit4.class)
public class SimpleUrlValidatorFactoryBeanTests {
@Test
public void verifyValidation() throws Exception {
final UrlValidator validator = new SimpleUrlValidatorFactoryBean(false).getObject();
assertTrue(validator.isValid("http://www.demo.com/logout"));
assertFalse(validator.isValid("http://localhost/logout"));
}
@Test
public void verifyValidationWithLocalUrlAllowed() throws Exception {
final UrlValidator validator = new SimpleUrlValidatorFactoryBean(true).getObject();
assertTrue(validator.isValid("http://www.demo.com/logout"));
assertTrue(validator.isValid("http://localhost/logout"));
}
}
| apache-2.0 |
wenhuiyao/RevealAnimation | app/src/androidTest/java/demo/reveal/wenhui/com/revealanimation/ApplicationTest.java | 370 | package demo.reveal.wenhui.com.revealanimation;
import android.app.Application;
import android.test.ApplicationTestCase;
/**
* <a href="http://d.android.com/tools/testing/testing_android.html">Testing Fundamentals</a>
*/
public class ApplicationTest extends ApplicationTestCase<Application> {
public ApplicationTest() {
super(Application.class);
}
} | apache-2.0 |
liquidm/druid | indexing-service/src/test/java/org/apache/druid/indexing/common/task/CompactionTaskTest.java | 50844 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.indexing.common.task;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair;
import com.fasterxml.jackson.databind.jsontype.NamedType;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.druid.data.input.FirehoseFactory;
import org.apache.druid.data.input.impl.DimensionSchema;
import org.apache.druid.data.input.impl.DimensionsSpec;
import org.apache.druid.data.input.impl.DoubleDimensionSchema;
import org.apache.druid.data.input.impl.FloatDimensionSchema;
import org.apache.druid.data.input.impl.InputRowParser;
import org.apache.druid.data.input.impl.LongDimensionSchema;
import org.apache.druid.data.input.impl.NoopInputRowParser;
import org.apache.druid.data.input.impl.StringDimensionSchema;
import org.apache.druid.data.input.impl.TimeAndDimsParseSpec;
import org.apache.druid.guice.GuiceAnnotationIntrospector;
import org.apache.druid.guice.GuiceInjectableValues;
import org.apache.druid.guice.GuiceInjectors;
import org.apache.druid.indexing.common.TaskToolbox;
import org.apache.druid.indexing.common.TestUtils;
import org.apache.druid.indexing.common.actions.SegmentListUsedAction;
import org.apache.druid.indexing.common.actions.TaskAction;
import org.apache.druid.indexing.common.actions.TaskActionClient;
import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory;
import org.apache.druid.indexing.common.task.CompactionTask.Builder;
import org.apache.druid.indexing.common.task.CompactionTask.PartitionConfigurationManager;
import org.apache.druid.indexing.common.task.CompactionTask.SegmentProvider;
import org.apache.druid.indexing.common.task.IndexTask.IndexIOConfig;
import org.apache.druid.indexing.common.task.IndexTask.IndexIngestionSpec;
import org.apache.druid.indexing.common.task.IndexTask.IndexTuningConfig;
import org.apache.druid.indexing.firehose.IngestSegmentFirehoseFactory;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.granularity.Granularities;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.granularity.PeriodGranularity;
import org.apache.druid.java.util.common.guava.Comparators;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.apache.druid.query.aggregation.CountAggregatorFactory;
import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory;
import org.apache.druid.query.aggregation.FloatMinAggregatorFactory;
import org.apache.druid.query.aggregation.LongMaxAggregatorFactory;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
import org.apache.druid.query.aggregation.first.FloatFirstAggregatorFactory;
import org.apache.druid.query.aggregation.last.DoubleLastAggregatorFactory;
import org.apache.druid.segment.IndexIO;
import org.apache.druid.segment.IndexMergerV9;
import org.apache.druid.segment.IndexSpec;
import org.apache.druid.segment.Metadata;
import org.apache.druid.segment.QueryableIndex;
import org.apache.druid.segment.SimpleQueryableIndex;
import org.apache.druid.segment.column.BaseColumn;
import org.apache.druid.segment.column.BitmapIndex;
import org.apache.druid.segment.column.ColumnCapabilities;
import org.apache.druid.segment.column.ColumnCapabilitiesImpl;
import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.SpatialIndex;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.data.CompressionFactory.LongEncodingStrategy;
import org.apache.druid.segment.data.CompressionStrategy;
import org.apache.druid.segment.data.ListIndexed;
import org.apache.druid.segment.data.RoaringBitmapSerdeFactory;
import org.apache.druid.segment.incremental.IncrementalIndex;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.segment.loading.SegmentLoadingException;
import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
import org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider;
import org.apache.druid.segment.selector.settable.SettableColumnValueSelector;
import org.apache.druid.segment.transform.TransformingInputRowParser;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMediumFactory;
import org.apache.druid.server.security.AuthTestUtils;
import org.apache.druid.server.security.AuthorizerMapper;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.NumberedShardSpec;
import org.hamcrest.CoreMatchers;
import org.joda.time.Interval;
import org.joda.time.Period;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Optional;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
@RunWith(Parameterized.class)
public class CompactionTaskTest
{
private static final long SEGMENT_SIZE_BYTES = 100;
private static final int NUM_ROWS_PER_SEGMENT = 10;
private static final String DATA_SOURCE = "dataSource";
private static final String TIMESTAMP_COLUMN = "timestamp";
private static final String MIXED_TYPE_COLUMN = "string_to_double";
private static final Interval COMPACTION_INTERVAL = Intervals.of("2017-01-01/2017-07-01");
private static final List<Interval> SEGMENT_INTERVALS = ImmutableList.of(
Intervals.of("2017-01-01/2017-02-01"),
Intervals.of("2017-02-01/2017-03-01"),
Intervals.of("2017-03-01/2017-04-01"),
Intervals.of("2017-04-01/2017-05-01"),
Intervals.of("2017-05-01/2017-06-01"),
Intervals.of("2017-06-01/2017-07-01")
);
private static final Map<Interval, DimensionSchema> MIXED_TYPE_COLUMN_MAP = new HashMap<>();
private static final IndexTuningConfig TUNING_CONFIG = createTuningConfig();
private static Map<String, DimensionSchema> DIMENSIONS;
private static List<AggregatorFactory> AGGREGATORS;
private static List<DataSegment> SEGMENTS;
private static RowIngestionMetersFactory rowIngestionMetersFactory = new TestUtils().getRowIngestionMetersFactory();
private static ObjectMapper objectMapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
private static Map<DataSegment, File> segmentMap;
private final boolean keepSegmentGranularity;
private TaskToolbox toolbox;
@BeforeClass
public static void setupClass()
{
MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-01-01/2017-02-01"), new StringDimensionSchema(MIXED_TYPE_COLUMN));
MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-02-01/2017-03-01"), new StringDimensionSchema(MIXED_TYPE_COLUMN));
MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-03-01/2017-04-01"), new StringDimensionSchema(MIXED_TYPE_COLUMN));
MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-04-01/2017-05-01"), new StringDimensionSchema(MIXED_TYPE_COLUMN));
MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-05-01/2017-06-01"), new DoubleDimensionSchema(MIXED_TYPE_COLUMN));
MIXED_TYPE_COLUMN_MAP.put(Intervals.of("2017-06-01/2017-07-01"), new DoubleDimensionSchema(MIXED_TYPE_COLUMN));
DIMENSIONS = new HashMap<>();
AGGREGATORS = new ArrayList<>();
DIMENSIONS.put(ColumnHolder.TIME_COLUMN_NAME, new LongDimensionSchema(ColumnHolder.TIME_COLUMN_NAME));
DIMENSIONS.put(TIMESTAMP_COLUMN, new LongDimensionSchema(TIMESTAMP_COLUMN));
for (int i = 0; i < SEGMENT_INTERVALS.size(); i++) {
final StringDimensionSchema schema = new StringDimensionSchema(
"string_dim_" + i,
null,
null
);
DIMENSIONS.put(schema.getName(), schema);
}
for (int i = 0; i < SEGMENT_INTERVALS.size(); i++) {
final LongDimensionSchema schema = new LongDimensionSchema("long_dim_" + i);
DIMENSIONS.put(schema.getName(), schema);
}
for (int i = 0; i < SEGMENT_INTERVALS.size(); i++) {
final FloatDimensionSchema schema = new FloatDimensionSchema("float_dim_" + i);
DIMENSIONS.put(schema.getName(), schema);
}
for (int i = 0; i < SEGMENT_INTERVALS.size(); i++) {
final DoubleDimensionSchema schema = new DoubleDimensionSchema("double_dim_" + i);
DIMENSIONS.put(schema.getName(), schema);
}
AGGREGATORS.add(new CountAggregatorFactory("agg_0"));
AGGREGATORS.add(new LongSumAggregatorFactory("agg_1", "long_dim_1"));
AGGREGATORS.add(new LongMaxAggregatorFactory("agg_2", "long_dim_2"));
AGGREGATORS.add(new FloatFirstAggregatorFactory("agg_3", "float_dim_3"));
AGGREGATORS.add(new DoubleLastAggregatorFactory("agg_4", "double_dim_4"));
segmentMap = new HashMap<>(SEGMENT_INTERVALS.size());
for (int i = 0; i < SEGMENT_INTERVALS.size(); i++) {
final Interval segmentInterval = Intervals.of(StringUtils.format("2017-0%d-01/2017-0%d-01", (i + 1), (i + 2)));
segmentMap.put(
new DataSegment(
DATA_SOURCE,
segmentInterval,
"version",
ImmutableMap.of(),
findDimensions(i, segmentInterval),
AGGREGATORS.stream().map(AggregatorFactory::getName).collect(Collectors.toList()),
new NumberedShardSpec(0, 1),
0,
SEGMENT_SIZE_BYTES
),
new File("file_" + i)
);
}
SEGMENTS = new ArrayList<>(segmentMap.keySet());
}
private static ObjectMapper setupInjectablesInObjectMapper(ObjectMapper objectMapper)
{
final GuiceAnnotationIntrospector guiceIntrospector = new GuiceAnnotationIntrospector();
objectMapper.setAnnotationIntrospectors(
new AnnotationIntrospectorPair(
guiceIntrospector,
objectMapper.getSerializationConfig().getAnnotationIntrospector()
),
new AnnotationIntrospectorPair(
guiceIntrospector,
objectMapper.getDeserializationConfig().getAnnotationIntrospector()
)
);
GuiceInjectableValues injectableValues = new GuiceInjectableValues(
GuiceInjectors.makeStartupInjectorWithModules(
ImmutableList.of(
binder -> {
binder.bind(AuthorizerMapper.class).toInstance(AuthTestUtils.TEST_AUTHORIZER_MAPPER);
binder.bind(ChatHandlerProvider.class).toInstance(new NoopChatHandlerProvider());
binder.bind(RowIngestionMetersFactory.class).toInstance(rowIngestionMetersFactory);
}
)
)
);
objectMapper.setInjectableValues(injectableValues);
objectMapper.registerModule(
new SimpleModule().registerSubtypes(new NamedType(NumberedShardSpec.class, "NumberedShardSpec"))
);
return objectMapper;
}
private static List<String> findDimensions(int startIndex, Interval segmentInterval)
{
final List<String> dimensions = new ArrayList<>();
dimensions.add(TIMESTAMP_COLUMN);
for (int i = 0; i < 6; i++) {
int postfix = i + startIndex;
postfix = postfix >= 6 ? postfix - 6 : postfix;
dimensions.add("string_dim_" + postfix);
dimensions.add("long_dim_" + postfix);
dimensions.add("float_dim_" + postfix);
dimensions.add("double_dim_" + postfix);
}
dimensions.add(MIXED_TYPE_COLUMN_MAP.get(segmentInterval).getName());
return dimensions;
}
private static IndexTuningConfig createTuningConfig()
{
return new IndexTuningConfig(
null,
null, // null to compute maxRowsPerSegment automatically
500000,
1000000L,
null,
null,
null,
null,
new IndexSpec(
new RoaringBitmapSerdeFactory(true),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
5000,
true,
false,
true,
false,
null,
100L,
null,
null,
null,
null
);
}
@Rule
public ExpectedException expectedException = ExpectedException.none();
@Before
public void setup()
{
toolbox = new TestTaskToolbox(
new TestTaskActionClient(new ArrayList<>(segmentMap.keySet())),
new TestIndexIO(objectMapper, segmentMap),
segmentMap
);
}
@Parameters(name = "keepSegmentGranularity={0}")
public static Collection<Object[]> parameters()
{
return ImmutableList.of(
new Object[] {false},
new Object[] {true}
);
}
public CompactionTaskTest(boolean keepSegmentGranularity)
{
this.keepSegmentGranularity = keepSegmentGranularity;
}
@Test
public void testSerdeWithInterval() throws IOException
{
final Builder builder = new Builder(
DATA_SOURCE,
objectMapper,
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
null,
rowIngestionMetersFactory
);
final CompactionTask task = builder
.interval(COMPACTION_INTERVAL)
.tuningConfig(createTuningConfig())
.context(ImmutableMap.of("testKey", "testContext"))
.build();
final byte[] bytes = objectMapper.writeValueAsBytes(task);
final CompactionTask fromJson = objectMapper.readValue(bytes, CompactionTask.class);
assertEquals(task, fromJson);
}
@Test
public void testSerdeWithSegments() throws IOException
{
final Builder builder = new Builder(
DATA_SOURCE,
objectMapper,
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
null,
rowIngestionMetersFactory
);
final CompactionTask task = builder
.segments(SEGMENTS)
.tuningConfig(createTuningConfig())
.context(ImmutableMap.of("testKey", "testContext"))
.build();
final byte[] bytes = objectMapper.writeValueAsBytes(task);
final CompactionTask fromJson = objectMapper.readValue(bytes, CompactionTask.class);
assertEquals(task, fromJson);
}
@Test
public void testSerdeWithDimensions() throws IOException
{
final Builder builder = new Builder(
DATA_SOURCE,
objectMapper,
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
null,
rowIngestionMetersFactory
);
final CompactionTask task = builder
.segments(SEGMENTS)
.dimensionsSpec(
new DimensionsSpec(
ImmutableList.of(
new StringDimensionSchema("dim1"),
new StringDimensionSchema("dim2"),
new StringDimensionSchema("dim3")
)
)
)
.tuningConfig(createTuningConfig())
.context(ImmutableMap.of("testKey", "testVal"))
.build();
final byte[] bytes = objectMapper.writeValueAsBytes(task);
final CompactionTask fromJson = objectMapper.readValue(bytes, CompactionTask.class);
assertEquals(task, fromJson);
}
private static void assertEquals(CompactionTask expected, CompactionTask actual)
{
Assert.assertEquals(expected.getType(), actual.getType());
Assert.assertEquals(expected.getDataSource(), actual.getDataSource());
Assert.assertEquals(expected.getInterval(), actual.getInterval());
Assert.assertEquals(expected.getSegments(), actual.getSegments());
Assert.assertEquals(expected.getDimensionsSpec(), actual.getDimensionsSpec());
Assert.assertTrue(Arrays.equals(expected.getMetricsSpec(), actual.getMetricsSpec()));
Assert.assertEquals(expected.isKeepSegmentGranularity(), actual.isKeepSegmentGranularity());
Assert.assertEquals(expected.getTargetCompactionSizeBytes(), actual.getTargetCompactionSizeBytes());
Assert.assertEquals(expected.getTuningConfig(), actual.getTuningConfig());
Assert.assertEquals(expected.getContext(), actual.getContext());
}
@Test
public void testCreateIngestionSchema() throws IOException, SegmentLoadingException
{
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(null, TUNING_CONFIG),
null,
null,
keepSegmentGranularity,
null,
objectMapper
);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration(
keepSegmentGranularity
);
if (keepSegmentGranularity) {
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS, SEGMENT_INTERVALS, Granularities.MONTH);
} else {
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
Collections.singletonList(COMPACTION_INTERVAL),
Granularities.ALL
);
}
}
@Test
public void testCreateIngestionSchemaWithTargetPartitionSize() throws IOException, SegmentLoadingException
{
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
6,
500000,
1000000L,
null,
null,
null,
null,
new IndexSpec(
new RoaringBitmapSerdeFactory(true),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
5000,
true,
false,
true,
false,
null,
100L,
null,
null,
null,
null
);
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(null, tuningConfig),
null,
null,
keepSegmentGranularity,
null,
objectMapper
);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration(
keepSegmentGranularity
);
if (keepSegmentGranularity) {
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
SEGMENT_INTERVALS,
tuningConfig,
Granularities.MONTH
);
} else {
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
Collections.singletonList(COMPACTION_INTERVAL),
tuningConfig,
Granularities.ALL
);
}
}
@Test
public void testCreateIngestionSchemaWithMaxTotalRows() throws IOException, SegmentLoadingException
{
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
null,
500000,
1000000L,
6L,
null,
null,
null,
new IndexSpec(
new RoaringBitmapSerdeFactory(true),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
5000,
true,
false,
true,
false,
null,
100L,
null,
null,
null,
null
);
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(null, tuningConfig),
null,
null,
keepSegmentGranularity,
null,
objectMapper
);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration(
keepSegmentGranularity
);
if (keepSegmentGranularity) {
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
SEGMENT_INTERVALS,
tuningConfig,
Granularities.MONTH
);
} else {
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
Collections.singletonList(COMPACTION_INTERVAL),
tuningConfig,
Granularities.ALL
);
}
}
@Test
public void testCreateIngestionSchemaWithNumShards() throws IOException, SegmentLoadingException
{
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
null,
500000,
1000000L,
null,
null,
3,
null,
new IndexSpec(
new RoaringBitmapSerdeFactory(true),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
5000,
true,
false,
true,
false,
null,
100L,
null,
null,
null,
null
);
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(null, tuningConfig),
null,
null,
keepSegmentGranularity,
null,
objectMapper
);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration(
keepSegmentGranularity
);
if (keepSegmentGranularity) {
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
SEGMENT_INTERVALS,
tuningConfig,
Granularities.MONTH
);
} else {
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
Collections.singletonList(COMPACTION_INTERVAL),
tuningConfig,
Granularities.ALL
);
}
}
@Test
public void testCreateIngestionSchemaWithCustomDimensionsSpec() throws IOException, SegmentLoadingException
{
final DimensionsSpec customSpec = new DimensionsSpec(
Lists.newArrayList(
new LongDimensionSchema("timestamp"),
new StringDimensionSchema("string_dim_0"),
new StringDimensionSchema("string_dim_1"),
new StringDimensionSchema("string_dim_2"),
new StringDimensionSchema("string_dim_3"),
new StringDimensionSchema("string_dim_4"),
new LongDimensionSchema("long_dim_0"),
new LongDimensionSchema("long_dim_1"),
new LongDimensionSchema("long_dim_2"),
new LongDimensionSchema("long_dim_3"),
new LongDimensionSchema("long_dim_4"),
new FloatDimensionSchema("float_dim_0"),
new FloatDimensionSchema("float_dim_1"),
new FloatDimensionSchema("float_dim_2"),
new FloatDimensionSchema("float_dim_3"),
new FloatDimensionSchema("float_dim_4"),
new DoubleDimensionSchema("double_dim_0"),
new DoubleDimensionSchema("double_dim_1"),
new DoubleDimensionSchema("double_dim_2"),
new DoubleDimensionSchema("double_dim_3"),
new DoubleDimensionSchema("double_dim_4"),
new StringDimensionSchema(MIXED_TYPE_COLUMN)
)
);
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(null, TUNING_CONFIG),
customSpec,
null,
keepSegmentGranularity,
null,
objectMapper
);
if (keepSegmentGranularity) {
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(6, ingestionSpecs.size());
final List<DimensionsSpec> dimensionsSpecs = new ArrayList<>(6);
IntStream.range(0, 6).forEach(i -> dimensionsSpecs.add(customSpec));
assertIngestionSchema(
ingestionSpecs,
dimensionsSpecs,
AGGREGATORS,
SEGMENT_INTERVALS,
Granularities.MONTH
);
} else {
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
Collections.singletonList(customSpec),
AGGREGATORS,
Collections.singletonList(COMPACTION_INTERVAL),
Granularities.ALL
);
}
}
@Test
public void testCreateIngestionSchemaWithCustomMetricsSpec() throws IOException, SegmentLoadingException
{
final AggregatorFactory[] customMetricsSpec = new AggregatorFactory[]{
new CountAggregatorFactory("custom_count"),
new LongSumAggregatorFactory("custom_long_sum", "agg_1"),
new FloatMinAggregatorFactory("custom_float_min", "agg_3"),
new DoubleMaxAggregatorFactory("custom_double_max", "agg_4")
};
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(null, TUNING_CONFIG),
null,
customMetricsSpec,
keepSegmentGranularity,
null,
objectMapper
);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration(
keepSegmentGranularity
);
if (keepSegmentGranularity) {
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
Arrays.asList(customMetricsSpec),
SEGMENT_INTERVALS,
Granularities.MONTH
);
} else {
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
Arrays.asList(customMetricsSpec),
Collections.singletonList(COMPACTION_INTERVAL),
Granularities.ALL
);
}
}
@Test
public void testCreateIngestionSchemaWithCustomSegments() throws IOException, SegmentLoadingException
{
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(SEGMENTS),
new PartitionConfigurationManager(null, TUNING_CONFIG),
null,
null,
keepSegmentGranularity,
null,
objectMapper
);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration(
keepSegmentGranularity
);
if (keepSegmentGranularity) {
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(ingestionSpecs, expectedDimensionsSpec, AGGREGATORS, SEGMENT_INTERVALS, Granularities.MONTH);
} else {
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
Collections.singletonList(COMPACTION_INTERVAL),
Granularities.ALL
);
}
}
@Test
public void testCreateIngestionSchemaWithDifferentSegmentSet() throws IOException, SegmentLoadingException
{
expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class));
expectedException.expectMessage(CoreMatchers.containsString("are different from the current used segments"));
final List<DataSegment> segments = new ArrayList<>(SEGMENTS);
Collections.sort(segments);
// Remove one segment in the middle
segments.remove(segments.size() / 2);
CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(segments),
new PartitionConfigurationManager(null, TUNING_CONFIG),
null,
null,
keepSegmentGranularity,
null,
objectMapper
);
}
@Test
public void testMissingMetadata() throws IOException, SegmentLoadingException
{
expectedException.expect(RuntimeException.class);
expectedException.expectMessage(CoreMatchers.startsWith("Index metadata doesn't exist for segment"));
final TestIndexIO indexIO = (TestIndexIO) toolbox.getIndexIO();
indexIO.removeMetadata(Iterables.getFirst(indexIO.getQueryableIndexMap().keySet(), null));
final List<DataSegment> segments = new ArrayList<>(SEGMENTS);
CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(segments),
new PartitionConfigurationManager(null, TUNING_CONFIG),
null,
null,
keepSegmentGranularity,
null,
objectMapper
);
}
@Test
public void testEmptyInterval()
{
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage(CoreMatchers.containsString("must specify a nonempty interval"));
final Builder builder = new Builder(
DATA_SOURCE,
objectMapper,
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
null,
rowIngestionMetersFactory
);
final CompactionTask task = builder
.interval(Intervals.of("2000-01-01/2000-01-01"))
.build();
}
@Test
public void testTargetPartitionSizeWithPartitionConfig() throws IOException, SegmentLoadingException
{
final IndexTuningConfig tuningConfig = new IndexTuningConfig(
null,
6,
500000,
1000000L,
null,
null,
null,
null,
new IndexSpec(
new RoaringBitmapSerdeFactory(true),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
5000,
true,
false,
true,
false,
null,
100L,
null,
null,
null,
null
);
expectedException.expect(IllegalArgumentException.class);
expectedException.expectMessage("targetCompactionSizeBytes[6] cannot be used with");
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(6L, tuningConfig),
null,
null,
keepSegmentGranularity,
null,
objectMapper
);
}
@Test
public void testSegmentGranularity() throws IOException, SegmentLoadingException
{
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(null, TUNING_CONFIG),
null,
null,
null,
new PeriodGranularity(Period.months(3), null, null),
objectMapper
);
final List<DimensionsSpec> expectedDimensionsSpec = ImmutableList.of(
new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double")))
);
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
Collections.singletonList(COMPACTION_INTERVAL),
new PeriodGranularity(Period.months(3), null, null)
);
}
@Test
public void testSegmentGranularityWithFalseKeepSegmentGranularity() throws IOException, SegmentLoadingException
{
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(null, TUNING_CONFIG),
null,
null,
false,
new PeriodGranularity(Period.months(3), null, null),
objectMapper
);
final List<DimensionsSpec> expectedDimensionsSpec = ImmutableList.of(
new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double")))
);
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(1, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
Collections.singletonList(COMPACTION_INTERVAL),
new PeriodGranularity(Period.months(3), null, null)
);
}
@Test
public void testNullSegmentGranularityAndNullKeepSegmentGranularity() throws IOException, SegmentLoadingException
{
final List<IndexIngestionSpec> ingestionSpecs = CompactionTask.createIngestionSchema(
toolbox,
new SegmentProvider(DATA_SOURCE, COMPACTION_INTERVAL),
new PartitionConfigurationManager(null, TUNING_CONFIG),
null,
null,
null,
null,
objectMapper
);
final List<DimensionsSpec> expectedDimensionsSpec = getExpectedDimensionsSpecForAutoGeneration(
true
);
ingestionSpecs.sort(
(s1, s2) -> Comparators.intervalsByStartThenEnd().compare(
s1.getDataSchema().getGranularitySpec().inputIntervals().get(0),
s2.getDataSchema().getGranularitySpec().inputIntervals().get(0)
)
);
Assert.assertEquals(6, ingestionSpecs.size());
assertIngestionSchema(
ingestionSpecs,
expectedDimensionsSpec,
AGGREGATORS,
SEGMENT_INTERVALS,
Granularities.MONTH
);
}
@Test
public void testUseKeepSegmentGranularityAndSegmentGranularityTogether()
{
expectedException.expect(IAE.class);
expectedException.expectMessage("keepSegmentGranularity and segmentGranularity can't be used together");
final Builder builder = new Builder(
DATA_SOURCE,
objectMapper,
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
null,
rowIngestionMetersFactory
);
final CompactionTask task = builder
.interval(COMPACTION_INTERVAL)
.keepSegmentGranularity(true)
.segmentGranularity(Granularities.YEAR)
.tuningConfig(createTuningConfig())
.context(ImmutableMap.of("testKey", "testContext"))
.build();
}
private static List<DimensionsSpec> getExpectedDimensionsSpecForAutoGeneration(boolean keepSegmentGranularity)
{
if (keepSegmentGranularity) {
return ImmutableList.of(
new DimensionsSpec(getDimensionSchema(new StringDimensionSchema("string_to_double"))),
new DimensionsSpec(getDimensionSchema(new StringDimensionSchema("string_to_double"))),
new DimensionsSpec(getDimensionSchema(new StringDimensionSchema("string_to_double"))),
new DimensionsSpec(getDimensionSchema(new StringDimensionSchema("string_to_double"))),
new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double"))),
new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double")))
);
} else {
return Collections.singletonList(
new DimensionsSpec(getDimensionSchema(new DoubleDimensionSchema("string_to_double")))
);
}
}
private static List<DimensionSchema> getDimensionSchema(DimensionSchema mixedTypeColumn)
{
return Lists.newArrayList(
new LongDimensionSchema("timestamp"),
new StringDimensionSchema("string_dim_4"),
new LongDimensionSchema("long_dim_4"),
new FloatDimensionSchema("float_dim_4"),
new DoubleDimensionSchema("double_dim_4"),
new StringDimensionSchema("string_dim_0"),
new LongDimensionSchema("long_dim_0"),
new FloatDimensionSchema("float_dim_0"),
new DoubleDimensionSchema("double_dim_0"),
new StringDimensionSchema("string_dim_1"),
new LongDimensionSchema("long_dim_1"),
new FloatDimensionSchema("float_dim_1"),
new DoubleDimensionSchema("double_dim_1"),
new StringDimensionSchema("string_dim_2"),
new LongDimensionSchema("long_dim_2"),
new FloatDimensionSchema("float_dim_2"),
new DoubleDimensionSchema("double_dim_2"),
new StringDimensionSchema("string_dim_3"),
new LongDimensionSchema("long_dim_3"),
new FloatDimensionSchema("float_dim_3"),
new DoubleDimensionSchema("double_dim_3"),
new StringDimensionSchema("string_dim_5"),
new LongDimensionSchema("long_dim_5"),
new FloatDimensionSchema("float_dim_5"),
new DoubleDimensionSchema("double_dim_5"),
mixedTypeColumn
);
}
private static void assertIngestionSchema(
List<IndexIngestionSpec> ingestionSchemas,
List<DimensionsSpec> expectedDimensionsSpecs,
List<AggregatorFactory> expectedMetricsSpec,
List<Interval> expectedSegmentIntervals,
Granularity expectedSegmentGranularity
)
{
assertIngestionSchema(
ingestionSchemas,
expectedDimensionsSpecs,
expectedMetricsSpec,
expectedSegmentIntervals,
new IndexTuningConfig(
null,
41943040, // automatically computed targetPartitionSize
500000,
1000000L,
null,
null,
null,
null,
new IndexSpec(
new RoaringBitmapSerdeFactory(true),
CompressionStrategy.LZ4,
CompressionStrategy.LZF,
LongEncodingStrategy.LONGS
),
5000,
true,
false,
true,
false,
null,
100L,
null,
null,
null,
null
),
expectedSegmentGranularity
);
}
private static void assertIngestionSchema(
List<IndexIngestionSpec> ingestionSchemas,
List<DimensionsSpec> expectedDimensionsSpecs,
List<AggregatorFactory> expectedMetricsSpec,
List<Interval> expectedSegmentIntervals,
IndexTuningConfig expectedTuningConfig,
Granularity expectedSegmentGranularity
)
{
Preconditions.checkArgument(
ingestionSchemas.size() == expectedDimensionsSpecs.size(),
"ingesionSchemas.size()[%s] should be same with expectedDimensionsSpecs.size()[%s]",
ingestionSchemas.size(),
expectedDimensionsSpecs.size()
);
for (int i = 0; i < ingestionSchemas.size(); i++) {
final IndexIngestionSpec ingestionSchema = ingestionSchemas.get(i);
final DimensionsSpec expectedDimensionsSpec = expectedDimensionsSpecs.get(i);
// assert dataSchema
final DataSchema dataSchema = ingestionSchema.getDataSchema();
Assert.assertEquals(DATA_SOURCE, dataSchema.getDataSource());
final InputRowParser parser = objectMapper.convertValue(dataSchema.getParser(), InputRowParser.class);
Assert.assertTrue(parser instanceof TransformingInputRowParser);
Assert.assertTrue(((TransformingInputRowParser) parser).getParser() instanceof NoopInputRowParser);
Assert.assertTrue(parser.getParseSpec() instanceof TimeAndDimsParseSpec);
Assert.assertEquals(
new HashSet<>(expectedDimensionsSpec.getDimensions()),
new HashSet<>(parser.getParseSpec().getDimensionsSpec().getDimensions())
);
// metrics
final List<AggregatorFactory> expectedAggregators = expectedMetricsSpec
.stream()
.map(AggregatorFactory::getCombiningFactory)
.collect(Collectors.toList());
Assert.assertEquals(expectedAggregators, Arrays.asList(dataSchema.getAggregators()));
Assert.assertEquals(
new UniformGranularitySpec(
expectedSegmentGranularity,
Granularities.NONE,
false,
Collections.singletonList(expectedSegmentIntervals.get(i))
),
dataSchema.getGranularitySpec()
);
// assert ioConfig
final IndexIOConfig ioConfig = ingestionSchema.getIOConfig();
Assert.assertFalse(ioConfig.isAppendToExisting());
final FirehoseFactory firehoseFactory = ioConfig.getFirehoseFactory();
Assert.assertTrue(firehoseFactory instanceof IngestSegmentFirehoseFactory);
final IngestSegmentFirehoseFactory ingestSegmentFirehoseFactory = (IngestSegmentFirehoseFactory) firehoseFactory;
Assert.assertEquals(DATA_SOURCE, ingestSegmentFirehoseFactory.getDataSource());
Assert.assertEquals(expectedSegmentIntervals.get(i), ingestSegmentFirehoseFactory.getInterval());
Assert.assertNull(ingestSegmentFirehoseFactory.getDimensionsFilter());
Assert.assertEquals(
new HashSet<>(expectedDimensionsSpec.getDimensionNames()),
new HashSet<>(ingestSegmentFirehoseFactory.getDimensions())
);
// assert tuningConfig
Assert.assertEquals(expectedTuningConfig, ingestionSchema.getTuningConfig());
}
}
private static class TestTaskToolbox extends TaskToolbox
{
private final Map<DataSegment, File> segmentFileMap;
TestTaskToolbox(
TaskActionClient taskActionClient,
IndexIO indexIO,
Map<DataSegment, File> segmentFileMap
)
{
super(
null,
taskActionClient,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
null,
indexIO,
null,
null,
null,
new IndexMergerV9(objectMapper, indexIO, OffHeapMemorySegmentWriteOutMediumFactory.instance()),
null,
null,
null,
null,
new NoopTestTaskFileWriter()
);
this.segmentFileMap = segmentFileMap;
}
@Override
public Map<DataSegment, File> fetchSegments(List<DataSegment> segments)
{
final Map<DataSegment, File> submap = new HashMap<>(segments.size());
for (DataSegment segment : segments) {
final File file = Preconditions.checkNotNull(segmentFileMap.get(segment));
submap.put(segment, file);
}
return submap;
}
}
private static class TestTaskActionClient implements TaskActionClient
{
private final List<DataSegment> segments;
TestTaskActionClient(List<DataSegment> segments)
{
this.segments = segments;
}
@Override
public <RetType> RetType submit(TaskAction<RetType> taskAction)
{
if (!(taskAction instanceof SegmentListUsedAction)) {
throw new ISE("action[%s] is not supported", taskAction);
}
return (RetType) segments;
}
}
private static class TestIndexIO extends IndexIO
{
private final Map<File, QueryableIndex> queryableIndexMap;
TestIndexIO(
ObjectMapper mapper,
Map<DataSegment, File> segmentFileMap
)
{
super(mapper, () -> 0);
queryableIndexMap = new HashMap<>(segmentFileMap.size());
for (Entry<DataSegment, File> entry : segmentFileMap.entrySet()) {
final DataSegment segment = entry.getKey();
final List<String> columnNames = new ArrayList<>(segment.getDimensions().size() + segment.getMetrics().size());
columnNames.add(ColumnHolder.TIME_COLUMN_NAME);
columnNames.addAll(segment.getDimensions());
columnNames.addAll(segment.getMetrics());
final Map<String, ColumnHolder> columnMap = new HashMap<>(columnNames.size());
final List<AggregatorFactory> aggregatorFactories = new ArrayList<>(segment.getMetrics().size());
for (String columnName : columnNames) {
if (MIXED_TYPE_COLUMN.equals(columnName)) {
columnMap.put(columnName, createColumn(MIXED_TYPE_COLUMN_MAP.get(segment.getInterval())));
} else if (DIMENSIONS.containsKey(columnName)) {
columnMap.put(columnName, createColumn(DIMENSIONS.get(columnName)));
} else {
final Optional<AggregatorFactory> maybeMetric = AGGREGATORS.stream()
.filter(agg -> agg.getName().equals(columnName))
.findAny();
if (maybeMetric.isPresent()) {
columnMap.put(columnName, createColumn(maybeMetric.get()));
aggregatorFactories.add(maybeMetric.get());
}
}
}
final Metadata metadata = new Metadata(
null,
aggregatorFactories.toArray(new AggregatorFactory[0]),
null,
null,
null
);
queryableIndexMap.put(
entry.getValue(),
new SimpleQueryableIndex(
segment.getInterval(),
new ListIndexed<>(segment.getDimensions()),
null,
columnMap,
null,
metadata
)
);
}
}
@Override
public QueryableIndex loadIndex(File file)
{
return queryableIndexMap.get(file);
}
void removeMetadata(File file)
{
final SimpleQueryableIndex index = (SimpleQueryableIndex) queryableIndexMap.get(file);
if (index != null) {
queryableIndexMap.put(
file,
new SimpleQueryableIndex(
index.getDataInterval(),
index.getColumnNames(),
index.getAvailableDimensions(),
index.getBitmapFactoryForDimensions(),
index.getColumns(),
index.getFileMapper(),
null,
index.getDimensionHandlers()
)
);
}
}
Map<File, QueryableIndex> getQueryableIndexMap()
{
return queryableIndexMap;
}
}
private static ColumnHolder createColumn(DimensionSchema dimensionSchema)
{
return new TestColumn(IncrementalIndex.TYPE_MAP.get(dimensionSchema.getValueType()));
}
private static ColumnHolder createColumn(AggregatorFactory aggregatorFactory)
{
return new TestColumn(ValueType.fromString(aggregatorFactory.getTypeName()));
}
private static class TestColumn implements ColumnHolder
{
private final ColumnCapabilities columnCapabilities;
TestColumn(ValueType type)
{
columnCapabilities = new ColumnCapabilitiesImpl()
.setType(type)
.setDictionaryEncoded(type == ValueType.STRING) // set a fake value to make string columns
.setHasBitmapIndexes(type == ValueType.STRING)
.setHasSpatialIndexes(false)
.setHasMultipleValues(false);
}
@Override
public ColumnCapabilities getCapabilities()
{
return columnCapabilities;
}
@Override
public int getLength()
{
return NUM_ROWS_PER_SEGMENT;
}
@Override
public BaseColumn getColumn()
{
return null;
}
@Override
public SettableColumnValueSelector makeNewSettableColumnValueSelector()
{
return null;
}
@Override
public BitmapIndex getBitmapIndex()
{
return null;
}
@Override
public SpatialIndex getSpatialIndex()
{
return null;
}
}
}
| apache-2.0 |
paulstapleton/flowable-engine | modules/flowable-engine/src/main/java/org/flowable/engine/impl/test/TestHelper.java | 13375 | /* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.test;
import java.io.InputStream;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.flowable.common.engine.api.FlowableObjectNotFoundException;
import org.flowable.common.engine.impl.db.SchemaManager;
import org.flowable.common.engine.impl.interceptor.Command;
import org.flowable.common.engine.impl.interceptor.CommandContext;
import org.flowable.common.engine.impl.test.EnsureCleanDbUtils;
import org.flowable.common.engine.impl.util.ReflectUtil;
import org.flowable.engine.ProcessEngine;
import org.flowable.engine.ProcessEngineConfiguration;
import org.flowable.engine.impl.bpmn.deployer.ResourceNameUtil;
import org.flowable.engine.impl.bpmn.parser.factory.ActivityBehaviorFactory;
import org.flowable.engine.impl.util.CommandContextUtil;
import org.flowable.engine.repository.DeploymentBuilder;
import org.flowable.engine.runtime.ProcessInstance;
import org.flowable.engine.test.Deployment;
import org.flowable.engine.test.TestActivityBehaviorFactory;
import org.flowable.engine.test.mock.FlowableMockSupport;
import org.flowable.engine.test.mock.MockServiceTask;
import org.flowable.engine.test.mock.MockServiceTasks;
import org.flowable.engine.test.mock.NoOpServiceTasks;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author Tom Baeyens
* @author Joram Barrez
*/
public abstract class TestHelper {
private static final Logger LOGGER = LoggerFactory.getLogger(TestHelper.class);
public static final String EMPTY_LINE = "\n";
public static final List<String> TABLENAMES_EXCLUDED_FROM_DB_CLEAN_CHECK = Collections.singletonList("ACT_GE_PROPERTY");
static Map<String, ProcessEngine> processEngines = new HashMap<>();
// Assertion methods ///////////////////////////////////////////////////
public static void assertProcessEnded(ProcessEngine processEngine, String processInstanceId) {
ProcessInstance processInstance = processEngine.getRuntimeService().createProcessInstanceQuery().processInstanceId(processInstanceId).singleResult();
if (processInstance != null) {
throw new AssertionError("expected finished process instance '" + processInstanceId + "' but it was still in the db");
}
}
// Test annotation support /////////////////////////////////////////////
public static String annotationDeploymentSetUp(ProcessEngine processEngine, Class<?> testClass, String methodName) {
Method method = null;
try {
method = testClass.getMethod(methodName, (Class<?>[]) null);
} catch (Exception e) {
LOGGER.warn("Could not get method by reflection. This could happen if you are using @Parameters in combination with annotations.", e);
return null;
}
return annotationDeploymentSetUp(processEngine, testClass, method);
}
public static String annotationDeploymentSetUp(ProcessEngine processEngine, Class<?> testClass, Method method) {
Deployment deploymentAnnotation = method.getAnnotation(Deployment.class);
return annotationDeploymentSetUp(processEngine, testClass, method, deploymentAnnotation);
}
public static String annotationDeploymentSetUp(ProcessEngine processEngine, Method method, Deployment deploymentAnnotation) {
return annotationDeploymentSetUp(processEngine, method.getDeclaringClass(), method, deploymentAnnotation);
}
public static String annotationDeploymentSetUp(ProcessEngine processEngine, Class<?> testClass, Method method, Deployment deploymentAnnotation) {
String deploymentId = null;
String methodName = method.getName();
if (deploymentAnnotation != null) {
LOGGER.debug("annotation @Deployment creates deployment for {}.{}", testClass.getSimpleName(), methodName);
String[] resources = deploymentAnnotation.resources();
if (resources.length == 0) {
String name = method.getName();
String resource = getBpmnProcessDefinitionResource(testClass, name);
resources = new String[] { resource };
}
DeploymentBuilder deploymentBuilder = processEngine.getRepositoryService().createDeployment().name(testClass.getSimpleName() + "." + methodName);
for (String resource : resources) {
deploymentBuilder.addClasspathResource(resource);
}
if (deploymentAnnotation.tenantId() != null
&& deploymentAnnotation.tenantId().length() > 0) {
deploymentBuilder.tenantId(deploymentAnnotation.tenantId());
}
deploymentId = deploymentBuilder.deploy().getId();
}
return deploymentId;
}
public static void annotationDeploymentTearDown(ProcessEngine processEngine, String deploymentId, Class<?> testClass, String methodName) {
LOGGER.debug("annotation @Deployment deletes deployment for {}.{}", testClass.getSimpleName(), methodName);
if (deploymentId != null) {
try {
processEngine.getRepositoryService().deleteDeployment(deploymentId, true);
} catch (FlowableObjectNotFoundException e) {
// Deployment was already deleted by the test case. Ignore.
}
}
}
public static void annotationMockSupportSetup(Class<?> testClass, String methodName, FlowableMockSupport mockSupport) {
// Get method
Method method = null;
try {
method = testClass.getMethod(methodName, (Class<?>[]) null);
} catch (Exception e) {
LOGGER.warn("Could not get method by reflection. This could happen if you are using @Parameters in combination with annotations.", e);
return;
}
handleMockServiceTaskAnnotation(mockSupport, method);
handleMockServiceTasksAnnotation(mockSupport, method);
handleNoOpServiceTasksAnnotation(mockSupport, method);
}
protected static void handleMockServiceTaskAnnotation(FlowableMockSupport mockSupport, Method method) {
MockServiceTask mockedServiceTask = method.getAnnotation(MockServiceTask.class);
if (mockedServiceTask != null) {
handleMockServiceTaskAnnotation(mockSupport, mockedServiceTask);
}
}
public static void handleMockServiceTaskAnnotation(FlowableMockSupport mockSupport, MockServiceTask mockedServiceTask) {
String originalClassName = mockedServiceTask.originalClassName();
mockSupport.mockServiceTaskWithClassDelegate(originalClassName, mockedServiceTask.mockedClassName());
Class<?> mockedClass = mockedServiceTask.mockedClass();
if (!Void.class.equals(mockedClass)) {
mockSupport.mockServiceTaskWithClassDelegate(originalClassName, mockedClass);
}
String id = mockedServiceTask.id();
if (!id.isEmpty()) {
mockSupport.mockServiceTaskByIdWithClassDelegate(id, mockedServiceTask.mockedClassName());
if (!Void.class.equals(mockedClass)) {
mockSupport.mockServiceTaskByIdWithClassDelegate(id, mockedClass);
}
}
}
protected static void handleMockServiceTasksAnnotation(FlowableMockSupport mockSupport, Method method) {
MockServiceTasks mockedServiceTasks = method.getAnnotation(MockServiceTasks.class);
if (mockedServiceTasks != null) {
for (MockServiceTask mockedServiceTask : mockedServiceTasks.value()) {
handleMockServiceTaskAnnotation(mockSupport, mockedServiceTask);
}
}
}
protected static void handleNoOpServiceTasksAnnotation(FlowableMockSupport mockSupport, Method method) {
NoOpServiceTasks noOpServiceTasks = method.getAnnotation(NoOpServiceTasks.class);
if (noOpServiceTasks != null) {
handleNoOpServiceTasksAnnotation(mockSupport, noOpServiceTasks);
}
}
public static void handleNoOpServiceTasksAnnotation(FlowableMockSupport mockSupport, NoOpServiceTasks noOpServiceTasks) {
if (noOpServiceTasks != null) {
String[] ids = noOpServiceTasks.ids();
Class<?>[] classes = noOpServiceTasks.classes();
String[] classNames = noOpServiceTasks.classNames();
if ((ids == null || ids.length == 0) && (classes == null || classes.length == 0) && (classNames == null || classNames.length == 0)) {
mockSupport.setAllServiceTasksNoOp();
} else {
if (ids != null && ids.length > 0) {
for (String id : ids) {
mockSupport.addNoOpServiceTaskById(id);
}
}
if (classes != null && classes.length > 0) {
for (Class<?> clazz : classes) {
mockSupport.addNoOpServiceTaskByClassName(clazz.getName());
}
}
if (classNames != null && classNames.length > 0) {
for (String className : classNames) {
mockSupport.addNoOpServiceTaskByClassName(className);
}
}
}
}
}
public static void annotationMockSupportTeardown(FlowableMockSupport mockSupport) {
mockSupport.reset();
}
/**
* get a resource location by convention based on a class (type) and a relative resource name. The return value will be the full classpath location of the type, plus a suffix built from the name
* parameter: <code>BpmnDeployer.BPMN_RESOURCE_SUFFIXES</code>. The first resource matching a suffix will be returned.
*/
public static String getBpmnProcessDefinitionResource(Class<?> type, String name) {
for (String suffix : ResourceNameUtil.BPMN_RESOURCE_SUFFIXES) {
String resource = type.getName().replace('.', '/') + "." + name + "." + suffix;
InputStream inputStream = ReflectUtil.getResourceAsStream(resource);
if (inputStream == null) {
continue;
} else {
return resource;
}
}
return type.getName().replace('.', '/') + "." + name + "." + ResourceNameUtil.BPMN_RESOURCE_SUFFIXES[0];
}
// Engine startup and shutdown helpers
// ///////////////////////////////////////////////////
public static ProcessEngine getProcessEngine(String configurationResource) {
ProcessEngine processEngine = processEngines.get(configurationResource);
if (processEngine == null) {
LOGGER.debug("==== BUILDING PROCESS ENGINE ========================================================================");
processEngine = ProcessEngineConfiguration.createProcessEngineConfigurationFromResource(configurationResource).buildProcessEngine();
LOGGER.debug("==== PROCESS ENGINE CREATED =========================================================================");
processEngines.put(configurationResource, processEngine);
}
return processEngine;
}
public static void closeProcessEngines() {
for (ProcessEngine processEngine : processEngines.values()) {
processEngine.close();
}
processEngines.clear();
}
/**
* Each test is assumed to clean up all DB content it entered. After a test method executed, this method scans all tables to see if the DB is completely clean. It throws AssertionFailed in case
* the DB is not clean. If the DB is not clean, it is cleaned by performing a create a drop.
*/
public static void assertAndEnsureCleanDb(ProcessEngine processEngine) {
EnsureCleanDbUtils.assertAndEnsureCleanDb(
"",
LOGGER,
processEngine.getProcessEngineConfiguration(),
TABLENAMES_EXCLUDED_FROM_DB_CLEAN_CHECK,
true,
new Command<Void>() {
@Override
public Void execute(CommandContext commandContext) {
SchemaManager schemaManager = CommandContextUtil.getProcessEngineConfiguration(commandContext).getSchemaManager();
schemaManager.schemaDrop();
schemaManager.schemaCreate();
return null;
}
}
);
}
// Mockup support ////////////////////////////////////////////////////////
public static TestActivityBehaviorFactory initializeTestActivityBehaviorFactory(ActivityBehaviorFactory existingActivityBehaviorFactory) {
return new TestActivityBehaviorFactory(existingActivityBehaviorFactory);
}
}
| apache-2.0 |
manzke/devsurf-crashlytics | core/src/main/java/de/devsurf/crashlytics/service/api/App.java | 1663 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.devsurf.crashlytics.service.api;
import org.codehaus.jackson.annotate.JsonIgnoreProperties;
import org.codehaus.jackson.annotate.JsonProperty;
@JsonIgnoreProperties(ignoreUnknown=true)
public class App {
@JsonProperty
private String name;
@JsonProperty("bundle_identifier")
private String bundleIdentifier;
protected App() {
}
public final String getName() {
return name;
}
public final String getBundleIdentifier() {
return bundleIdentifier;
}
protected final void setName(String name) {
this.name = name;
}
protected final void setBundleIdentifier(String bundleIdentifier) {
this.bundleIdentifier = bundleIdentifier;
}
@Override
public String toString() {
return "App ["
+ (name != null ? "name=" + name + ", " : "")
+ (bundleIdentifier != null ? "bundleIdentifier="
+ bundleIdentifier : "") + "]";
}
} | apache-2.0 |
dfki-asr-fitman/c3dwv | compass-webapp/src/main/webapp/resources/js/editor/settings.js | 665 | /*
* This file is part of COMPASS. It is subject to the license terms in
* the LICENSE file found in the top-level directory of this distribution.
* (Also available at http://www.apache.org/licenses/LICENSE-2.0.txt)
* You may not use this file except in compliance with the License.
*/
XML3D.tools.namespace("COMPASS");
XML3D.tools.namespace("COMPASS.Settings");
COMPASS.Settings.mouseMovePickingEnabled = false;
//Highlighting
COMPASS.Settings.highlightColor = [0.0, 0.0, 1.0];
COMPASS.Settings.highlightTransparency = 0.1;
//SSAO
COMPASS.Settings.SSAOEnabled = false;
COMPASS.Settings.SSAOIntensity = 5;
//Culling
COMPASS.Settings.FaceCulling = "none";
| apache-2.0 |
GwtMaterialDesign/gwt-material-demo | src/main/java/gwt/material/design/demo/client/application/components/search/SearchPresenter.java | 2005 | package gwt.material.design.demo.client.application.components.search;
/*
* #%L
* GwtMaterial
* %%
* Copyright (C) 2015 - 2016 GwtMaterialDesign
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.google.inject.Inject;
import com.google.web.bindery.event.shared.EventBus;
import com.gwtplatform.mvp.client.Presenter;
import com.gwtplatform.mvp.client.View;
import com.gwtplatform.mvp.client.annotations.NameToken;
import com.gwtplatform.mvp.client.annotations.ProxyCodeSplit;
import com.gwtplatform.mvp.client.proxy.ProxyPlace;
import gwt.material.design.demo.client.application.ApplicationPresenter;
import gwt.material.design.demo.client.event.SetPageTitleEvent;
import gwt.material.design.demo.client.place.NameTokens;
public class SearchPresenter extends Presenter<SearchPresenter.MyView, SearchPresenter.MyProxy> {
interface MyView extends View {
}
@NameToken(NameTokens.search)
@ProxyCodeSplit
interface MyProxy extends ProxyPlace<SearchPresenter> {
}
@Inject
SearchPresenter(
EventBus eventBus,
MyView view,
MyProxy proxy) {
super(eventBus, view, proxy, ApplicationPresenter.SLOT_MAIN);
}
@Override
protected void onReveal() {
super.onReveal();
SetPageTitleEvent.fire("Search", "Attached to NavBar component or can be used on external panels.", "components/search/SearchView", "https://material.io/guidelines/patterns/search.html#", this);
}
}
| apache-2.0 |
orichyyy/XFS.Net | TestForm/Form1.Designer.cs | 5639 | namespace TestForm
{
partial class Form1
{
/// <summary>
/// 必需的设计器变量。
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// 清理所有正在使用的资源。
/// </summary>
/// <param name="disposing">如果应释放托管资源,为 true;否则为 false。</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows 窗体设计器生成的代码
/// <summary>
/// 设计器支持所需的方法 - 不要修改
/// 使用代码编辑器修改此方法的内容。
/// </summary>
private void InitializeComponent()
{
this.button1 = new System.Windows.Forms.Button();
this.button2 = new System.Windows.Forms.Button();
this.button3 = new System.Windows.Forms.Button();
this.button4 = new System.Windows.Forms.Button();
this.button5 = new System.Windows.Forms.Button();
this.button6 = new System.Windows.Forms.Button();
this.button7 = new System.Windows.Forms.Button();
this.SuspendLayout();
//
// button1
//
this.button1.Location = new System.Drawing.Point(417, 25);
this.button1.Name = "button1";
this.button1.Size = new System.Drawing.Size(75, 23);
this.button1.TabIndex = 0;
this.button1.Text = "button1";
this.button1.UseVisualStyleBackColor = true;
this.button1.Click += new System.EventHandler(this.button1_Click);
//
// button2
//
this.button2.Location = new System.Drawing.Point(417, 71);
this.button2.Name = "button2";
this.button2.Size = new System.Drawing.Size(75, 23);
this.button2.TabIndex = 1;
this.button2.Text = "button2";
this.button2.UseVisualStyleBackColor = true;
this.button2.Click += new System.EventHandler(this.button2_Click);
//
// button3
//
this.button3.Location = new System.Drawing.Point(417, 122);
this.button3.Name = "button3";
this.button3.Size = new System.Drawing.Size(75, 23);
this.button3.TabIndex = 2;
this.button3.Text = "button3";
this.button3.UseVisualStyleBackColor = true;
this.button3.Click += new System.EventHandler(this.button3_Click);
//
// button4
//
this.button4.Location = new System.Drawing.Point(417, 168);
this.button4.Name = "button4";
this.button4.Size = new System.Drawing.Size(75, 23);
this.button4.TabIndex = 3;
this.button4.Text = "button4";
this.button4.UseVisualStyleBackColor = true;
this.button4.Click += new System.EventHandler(this.button4_Click);
//
// button5
//
this.button5.Location = new System.Drawing.Point(336, 25);
this.button5.Name = "button5";
this.button5.Size = new System.Drawing.Size(75, 23);
this.button5.TabIndex = 4;
this.button5.Text = "button5";
this.button5.UseVisualStyleBackColor = true;
this.button5.Click += new System.EventHandler(this.button5_Click);
//
// button6
//
this.button6.Location = new System.Drawing.Point(12, 25);
this.button6.Name = "button6";
this.button6.Size = new System.Drawing.Size(75, 23);
this.button6.TabIndex = 5;
this.button6.Text = "button6";
this.button6.UseVisualStyleBackColor = true;
this.button6.Click += new System.EventHandler(this.button6_Click);
//
// button7
//
this.button7.Location = new System.Drawing.Point(12, 71);
this.button7.Name = "button7";
this.button7.Size = new System.Drawing.Size(75, 23);
this.button7.TabIndex = 6;
this.button7.Text = "button7";
this.button7.UseVisualStyleBackColor = true;
this.button7.Click += new System.EventHandler(this.button7_Click);
//
// Form1
//
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 12F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(504, 415);
this.Controls.Add(this.button7);
this.Controls.Add(this.button6);
this.Controls.Add(this.button5);
this.Controls.Add(this.button4);
this.Controls.Add(this.button3);
this.Controls.Add(this.button2);
this.Controls.Add(this.button1);
this.Name = "Form1";
this.Text = "Form1";
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Button button1;
private System.Windows.Forms.Button button2;
private System.Windows.Forms.Button button3;
private System.Windows.Forms.Button button4;
private System.Windows.Forms.Button button5;
private System.Windows.Forms.Button button6;
private System.Windows.Forms.Button button7;
}
}
| apache-2.0 |
introproventures/graphql-jpa-query | graphql-jpa-query-autoconfigure/src/main/java/com/introproventures/graphql/jpa/query/autoconfigure/GraphQLShemaRegistrationImpl.java | 562 | package com.introproventures.graphql.jpa.query.autoconfigure;
import java.util.LinkedHashSet;
import java.util.Set;
import graphql.schema.GraphQLSchema;
public class GraphQLShemaRegistrationImpl implements GraphQLShemaRegistration {
Set<GraphQLSchema> managedGraphQLSchemas = new LinkedHashSet<GraphQLSchema>();
public void register(GraphQLSchema graphQLSchema) {
managedGraphQLSchemas.add(graphQLSchema);
}
public GraphQLSchema[] getManagedGraphQLSchemas() {
return managedGraphQLSchemas.toArray(new GraphQLSchema[] {});
}
}
| apache-2.0 |
oehme/analysing-gradle-performance | my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p488/Production9768.java | 2048 | package org.gradle.test.performance.mediummonolithicjavaproject.p488;
import org.gradle.test.performance.mediummonolithicjavaproject.p487.Production9759;
public class Production9768 {
private Production9759 property0;
public Production9759 getProperty0() {
return property0;
}
public void setProperty0(Production9759 value) {
property0 = value;
}
private Production9763 property1;
public Production9763 getProperty1() {
return property1;
}
public void setProperty1(Production9763 value) {
property1 = value;
}
private Production9767 property2;
public Production9767 getProperty2() {
return property2;
}
public void setProperty2(Production9767 value) {
property2 = value;
}
private String property3;
public String getProperty3() {
return property3;
}
public void setProperty3(String value) {
property3 = value;
}
private String property4;
public String getProperty4() {
return property4;
}
public void setProperty4(String value) {
property4 = value;
}
private String property5;
public String getProperty5() {
return property5;
}
public void setProperty5(String value) {
property5 = value;
}
private String property6;
public String getProperty6() {
return property6;
}
public void setProperty6(String value) {
property6 = value;
}
private String property7;
public String getProperty7() {
return property7;
}
public void setProperty7(String value) {
property7 = value;
}
private String property8;
public String getProperty8() {
return property8;
}
public void setProperty8(String value) {
property8 = value;
}
private String property9;
public String getProperty9() {
return property9;
}
public void setProperty9(String value) {
property9 = value;
}
} | apache-2.0 |
carnegiespeech/translations | ar/grading.php | 1650 | <?php
// This file is part of Moodle - http://moodle.org/
//
// Moodle is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Moodle is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with Moodle. If not, see <http://www.gnu.org/licenses/>.
/**
* Strings for component 'grading', language 'ar', branch 'MOODLE_22_STABLE'
*
* @package grading
* @copyright 1999 onwards Martin Dougiamas {@link http://moodle.com}
* @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later
*/
defined('MOODLE_INTERNAL') || die();
$string['gradingmanagement'] = 'تقييم متقدم';
$string['gradingmethod'] = 'طريقة التقييم';
$string['gradingmethod_help'] = 'اختر طريقة التقييم المتقدمة التي ستستخدم لاحتساب العلامات في هذا السياق.
لإلغاء التقييم المتقدم والعودة لطريقة التقييم الافتراضية، اختر "تقييم بسيط ومباشر".';
$string['gradingmethodnone'] = 'تقييم بسيط ومباشر';
$string['gradingmethods'] = 'طرق التقييم';
$string['searchtemplate'] = 'البحث في نماذج التقييم';
$string['statusdraft'] = 'مسودة';
| apache-2.0 |
Arollalz/TWU_Biblioteca-ZhenLiu | src/com/twu/biblioteca/library/MainMenu.java | 1251 | package com.twu.biblioteca.library;
import java.util.LinkedList;
/**
* Created by zhenliu on 9/14/15.
*/
public class MainMenu {
private static LinkedList<Book> content;
private static final String validOptionSelectedMessage = "Select a valid option!";
private static final String checkOutSuccessfullyMessage = "Thank you! Enjoy the book.";
private static final String checkOutFailedMessage = "That book is not available.";
private static final String returnBookMessage = "";
private static final String returnFailedMessage = "";
public static void setContent(LinkedList<Book> content) {
MainMenu.content = content;
}
public static LinkedList<Book> getContent() {
return content;
}
public static String getValidOptionSelectedMessage() {
return validOptionSelectedMessage;
}
public static String getCheckOutSuccessfullyMessage() {
return checkOutSuccessfullyMessage;
}
public static String getCheckOutFailedMessage() {
return checkOutFailedMessage;
}
public static String getReturnBookMessage() {
return returnBookMessage;
}
public static String getReturnFailedMessage() {
return returnFailedMessage;
}
}
| apache-2.0 |
cast-framework/receiver-test-app-android | src/com/castframework/android/framework/CastApplication.java | 133 | package com.castframework.android.framework;
import android.app.Application;
public class CastApplication extends Application {
}
| apache-2.0 |
jayaskren/FastOpencsv | src/test/java/com/fastopencsv/TestUtilitiesTest.java | 652 | package com.fastopencsv;
import static junit.framework.Assert.assertEquals;
import org.junit.Test;
/**
* Created by IntelliJ IDEA.
* User: scott
* Date: 12/20/10
* Time: 2:57 PM
* To change this template use File | Settings | File Templates.
*/
public class TestUtilitiesTest
{
@Test
public void displayStringArray()
{
String[] stringArray = new String[3];
stringArray[0] = "a";
stringArray[1] = "b";
stringArray[2] = "c";
assertEquals("Header\nNumber of elements:\t3\nelement 0:\ta\nelement 1:\tb\nelement 2:\tc\n",
TestUtilities.displayStringArray("Header", stringArray));
}
}
| apache-2.0 |
GibraltarSoftware/Gibraltar.Agent.EntityFramework | src/Agent.EntityFramework.Test/Entities/Invoice.cs | 1855 | //------------------------------------------------------------------------------
// <auto-generated>
// This code was generated from a template.
//
// Manual changes to this file may cause unexpected behavior in your application.
// Manual changes to this file will be overwritten if the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
namespace Agent.EntityFramework.Test.Entities
{
using System;
using System.Collections.Generic;
public partial class Invoice
{
public string ShipName { get; set; }
public string ShipAddress { get; set; }
public string ShipCity { get; set; }
public string ShipRegion { get; set; }
public string ShipPostalCode { get; set; }
public string ShipCountry { get; set; }
public string CustomerID { get; set; }
public string CustomerName { get; set; }
public string Address { get; set; }
public string City { get; set; }
public string Region { get; set; }
public string PostalCode { get; set; }
public string Country { get; set; }
public string Salesperson { get; set; }
public int OrderID { get; set; }
public Nullable<System.DateTime> OrderDate { get; set; }
public Nullable<System.DateTime> RequiredDate { get; set; }
public Nullable<System.DateTime> ShippedDate { get; set; }
public string ShipperName { get; set; }
public int ProductID { get; set; }
public string ProductName { get; set; }
public decimal UnitPrice { get; set; }
public short Quantity { get; set; }
public float Discount { get; set; }
public Nullable<decimal> ExtendedPrice { get; set; }
public Nullable<decimal> Freight { get; set; }
}
}
| apache-2.0 |
pravega/pravega | test/testcommon/src/main/java/io/pravega/test/common/SerializedClassRunner.java | 1459 | /**
* Copyright Pravega Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pravega.test.common;
import org.junit.runner.notification.RunNotifier;
import org.junit.runners.BlockJUnit4ClassRunner;
import org.junit.runners.model.InitializationError;
/**
* This class provides an implementation for the RunWith annotation.
* It works like the normal BlockJUnit4ClassRunner except that it prohibits
* parallelism. So that no two classes with this annotation may run together.
* This is most useful for tests that rely on metrics.
*/
public class SerializedClassRunner extends BlockJUnit4ClassRunner {
private static final Object LOCK = new Object();
public SerializedClassRunner(Class<?> klass) throws InitializationError {
super(klass);
}
@Override
public void run(RunNotifier notifier) {
synchronized (LOCK) {
super.run(notifier);
}
}
}
| apache-2.0 |
CarmenPopoviciu/angular | modules/angular2/src/test_lib/benchmark_util.js | 879 | import {DOM, document, location} from 'angular2/src/facade/dom';
import {NumberWrapper, BaseException, isBlank} from 'angular2/src/facade/lang';
export function getIntParameter(name:string) {
return NumberWrapper.parseInt(getStringParameter(name), 10);
}
export function getStringParameter(name:string) {
var els = DOM.querySelectorAll(document, `input[name="${name}"]`)
var value;
var el;
for (var i=0; i<els.length; i++) {
el = els[i];
if ((el.type !== 'radio' && el.type !== 'checkbox') || el.checked) {
value = el.value;
break;
}
}
if (isBlank(value)) {
throw new BaseException(`Could not find and input field with name ${name}`);
}
return value;
}
export function bindAction(selector:string, callback:Function) {
var el = DOM.querySelector(document, selector);
DOM.on(el, 'click', function(_) {
callback();
});
} | apache-2.0 |
jingle1267/CommonProjectLibrary | SampleProject/src/com/ihongqiqu/common/library/sample/eventbus/EventBusDemoActivity.java | 1802 | package com.ihongqiqu.common.library.sample.eventbus;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.View;
import android.widget.Toast;
import com.ihongqiqu.common.library.sample.R;
import de.greenrobot.event.EventBus;
/**
* EventBus demo
* <p/>
* Created by zhenguo on 5/19/15.
*/
public class EventBusDemoActivity extends Activity {
public static void launch(Activity activity) {
Intent intent = new Intent(activity, EventBusDemoActivity.class);
activity.startActivity(intent);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.layout_eventbus);
}
public void onClick(View view) {
switch (view.getId()) {
case R.id.btn_message_event:
EventBus.getDefault().post(new MessageEvent("Hello everyone!"));
break;
case R.id.btn_other_event:
EventBus.getDefault().post(new SomeOtherEvent());
break;
}
}
@Override
public void onStart() {
super.onStart();
int priority = 1;
EventBus.getDefault().register(this, priority);
}
@Override
public void onStop() {
EventBus.getDefault().unregister(this);
super.onStop();
}
// This method will be called when a MessageEvent is posted
public void onEvent(MessageEvent event) {
Toast.makeText(this, event.message, Toast.LENGTH_SHORT).show();
}
// This method will be called when a SomeOtherEvent is posted
public void onEvent(SomeOtherEvent event) {
// doSomethingWith(event);
Toast.makeText(this, "SomeOtherEvent", Toast.LENGTH_SHORT).show();
}
}
| apache-2.0 |
kwakutwumasi/Quakearts-JSF-Webtools | qa-boot/src/main/java/com/quakearts/webapp/facelets/bootstrap/components/BootSelectOneListbox.java | 1377 | /*******************************************************************************
* Copyright (C) 2016 Kwaku Twumasi-Afriyie <kwaku.twumasi@quakearts.com>.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Kwaku Twumasi-Afriyie <kwaku.twumasi@quakearts.com> - initial API and implementation
******************************************************************************/
package com.quakearts.webapp.facelets.bootstrap.components;
import java.util.ArrayList;
import javax.faces.component.html.HtmlSelectOneMenu;
public class BootSelectOneListbox extends HtmlSelectOneMenu {
public static final String COMPONENT_FAMILY="com.quakearts.bootstrap.selectOneListbox";
public static final String RENDERER_TYPE="com.quakearts.bootstrap.select.renderer";
public BootSelectOneListbox() {
getAttributes().put("javax.faces.component.UIComponentBase.attributesThatAreSet", new ArrayList<String>());
}
@Override
public String getFamily() {
return COMPONENT_FAMILY;
}
@Override
public String getRendererType() {
return RENDERER_TYPE;
}
@Override
public void setRendererType(String rendererType) {
}
}
| apache-2.0 |
enriko-iskandar/chef | cookbooks/shinken/recipes/_hostgroups.rb | 2058 | #
# Cookbook Name:: shinken
# Recipe:: _hostgroups
#
# Copyright (C) 2014 EverTrue, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
if Dir.exist?("#{node['shinken']['conf_dir']}/hostgroups")
active_hostgroups_list = node['shinken']['hostgroups'].keys + ['linux']
current_hostgroups_list = (Dir.entries("#{node['shinken']['conf_dir']}/hostgroups") -
%w(. ..)).map do |e|
e.sub(/\.cfg/, '')
end
deleted_hostgroups_list = current_hostgroups_list - active_hostgroups_list
deleted_hostgroups_list.each do |hg|
file "#{node['shinken']['conf_dir']}/hostgroups/#{hg}.cfg" do
action :delete
notifies :restart, 'service[shinken-arbiter]'
end
end
end
node['shinken']['hostgroups'].each do |hg_name, hg_conf|
conf = { 'hostgroup_name' => hg_name }
if hg_conf['search_str']
conf['members'] = search(
:node,
node['shinken']['host_search_query'] + " AND " + hg_conf['search_str']
).map(&:name).join(',')
elsif hg_conf['members']
conf['members'] = hg_conf['members'].join(',')
else
fail "Hostgroup #{hg_name} must contain either `search_str` or `members`."
end
template "#{node['shinken']['conf_dir']}/hostgroups/#{hg_name}.cfg" do
source 'generic-definition.cfg.erb'
owner node['shinken']['user']
group node['shinken']['group']
mode 0644
variables(
type: 'hostgroup',
conf: hg_conf['conf'].merge(conf)
)
notifies :restart, 'service[shinken-arbiter]'
end
end
| apache-2.0 |
plxaye/chromium | src/chrome/browser/chromeos/extensions/default_app_order.cc | 4436 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "chrome/browser/chromeos/extensions/default_app_order.h"
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/file_util.h"
#include "base/files/file_path.h"
#include "base/json/json_file_value_serializer.h"
#include "base/path_service.h"
#include "base/time.h"
#include "chrome/common/extensions/extension_constants.h"
#include "chromeos/chromeos_paths.h"
#include "content/public/browser/browser_thread.h"
namespace chromeos {
namespace default_app_order {
namespace {
// The single ExternalLoader instance.
ExternalLoader* loader_instance = NULL;
// Reads external ordinal json file and returned the parsed value. Returns NULL
// if the file does not exist or could not be parsed properly. Caller takes
// ownership of the returned value.
base::ListValue* ReadExternalOrdinalFile(const base::FilePath& path) {
if (!file_util::PathExists(path))
return NULL;
JSONFileValueSerializer serializer(path);
std::string error_msg;
base::Value* value = serializer.Deserialize(NULL, &error_msg);
if (!value) {
LOG(WARNING) << "Unable to deserialize default app ordinals json data:"
<< error_msg << ", file=" << path.value();
return NULL;
}
base::ListValue* ordinal_list_value = NULL;
if (value->GetAsList(&ordinal_list_value))
return ordinal_list_value;
LOG(WARNING) << "Expect a JSON list in file " << path.value();
return NULL;
}
// Gets built-in default app order.
void GetDefault(std::vector<std::string>* app_ids) {
DCHECK(app_ids && app_ids->empty());
const char* kDefaultAppOrder[] = {
extension_misc::kChromeAppId,
extension_misc::kWebStoreAppId,
"coobgpohoikkiipiblmjeljniedjpjpf", // Search
"blpcfgokakmgnkcojhhkbfbldkacnbeo", // Youtube
"pjkljhegncpnkpknbcohdijeoejaedia", // Gmail
"ejjicmeblgpmajnghnpcppodonldlgfn", // Calendar
"kjebfhglflhjjjiceimfkgicifkhjlnm", // Scratchpad
"lneaknkopdijkpnocmklfnjbeapigfbh", // Google Maps
"apdfllckaahabafndbhieahigkjlhalf", // Drive
"aohghmighlieiainnegkcijnfilokake", // Docs
"felcaaldnbdncclmgdcncolpebgiejap", // Sheets
"aapocclcgogkmnckokdopfmhonfmgoek", // Slides
"dlppkpafhbajpcmmoheippocdidnckmm", // Google+
"kbpgddbgniojgndnhlkjbkpknjhppkbk", // Google+ Hangouts
"hhaomjibdihmijegdhdafkllkbggdgoj", // Files
"hkhhlkdconhgemhegnplaldnmnmkaemd", // Tips & Tricks
"icppfcnhkcmnfdhfhphakoifcfokfdhg", // Play Music
"mmimngoggfoobjdlefbcabngfnmieonb", // Play Books
"fppdphmgcddhjeddoeghpjefkdlccljb", // Play Movies
"fobcpibfeplaikcclojfdhfdmbbeofai", // Games
"joodangkbfjnajiiifokapkpmhfnpleo", // Calculator
"hfhhnacclhffhdffklopdkcgdhifgngh", // Camera
"gbchcmhmhahfdphkhkmpfmihenigjmpp", // Chrome Remote Desktop
};
for (size_t i = 0; i < arraysize(kDefaultAppOrder); ++i)
app_ids->push_back(std::string(kDefaultAppOrder[i]));
}
} // namespace
ExternalLoader::ExternalLoader(bool async)
: loaded_(true /* manual_rest */, false /* initially_signaled */) {
DCHECK(!loader_instance);
loader_instance = this;
if (async) {
content::BrowserThread::PostBlockingPoolTask(FROM_HERE,
base::Bind(&ExternalLoader::Load, base::Unretained(this)));
} else {
Load();
}
}
ExternalLoader::~ExternalLoader() {
DCHECK(loaded_.IsSignaled());
DCHECK_EQ(loader_instance, this);
loader_instance = NULL;
}
const std::vector<std::string>& ExternalLoader::GetAppIds() {
CHECK(loaded_.IsSignaled());
return app_ids_;
}
void ExternalLoader::Load() {
base::FilePath ordinals_file;
CHECK(PathService::Get(chromeos::FILE_DEFAULT_APP_ORDER, &ordinals_file));
scoped_ptr<base::ListValue> ordinals_value(
ReadExternalOrdinalFile(ordinals_file));
if (ordinals_value) {
for (size_t i = 0; i < ordinals_value->GetSize(); ++i) {
std::string app_id;
CHECK(ordinals_value->GetString(i, &app_id));
app_ids_.push_back(app_id);
}
} else {
GetDefault(&app_ids_);
}
loaded_.Signal();
}
void Get(std::vector<std::string>* app_ids) {
// |loader_instance| could be NULL for test.
if (!loader_instance) {
GetDefault(app_ids);
return;
}
*app_ids = loader_instance->GetAppIds();
}
} // namespace default_app_order
} // namespace chromeos
| apache-2.0 |
PurelyApplied/geode | geode-core/src/main/java/org/apache/geode/internal/logging/SessionContext.java | 1617 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.logging;
public interface SessionContext {
State getState();
LogConfigSupplier getLogConfigSupplier();
enum State {
CREATED,
STARTED,
STOPPED;
State changeTo(final State newState) {
switch (newState) {
case CREATED:
if (this != STOPPED) {
throw new IllegalStateException("Session must not exist before creating");
}
return CREATED;
case STARTED:
if (this != CREATED) {
throw new IllegalStateException("Session must be created before starting");
}
return STARTED;
case STOPPED:
if (this != STARTED) {
throw new IllegalStateException("Session must be started before stopping");
}
}
return STOPPED;
}
}
}
| apache-2.0 |
x-meta/xworker | xworker_lang/src/main/java/xworker/db/jdbc/C3p0Actions.java | 3487 | /*******************************************************************************
* Copyright 2007-2013 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package xworker.db.jdbc;
import java.beans.PropertyVetoException;
import java.util.Properties;
import org.xmeta.ActionContext;
import org.xmeta.Thing;
import com.mchange.v2.c3p0.ComboPooledDataSource;
import xworker.lang.executor.Executor;
import xworker.util.UtilData;
public class C3p0Actions {
//private static Logger logger = LoggerFactory.getLogger(C3p0Actions.class);
private static final String TAG = C3p0Actions.class.getName();
public static Object init(ActionContext actionContext) throws PropertyVetoException{
Thing self = (Thing) actionContext.get("self");
//配置文件
Thing thing = self;
try{
thing = UtilData.createPropertiesThing(self, self.getString("configPropertiesFile"));
}catch(Exception e){
Executor.error(TAG, "Reader datasource properties file error", e);
}
//创建c3p0DataSource
ComboPooledDataSource dataSource = new ComboPooledDataSource();
//配置
Properties p = (Properties) self.doAction("getProperties", actionContext);
if(p != null){
dataSource.setProperties(p);
}
dataSource.setDriverClass((String) thing.doAction("getDriverClass", actionContext));
dataSource.setJdbcUrl((String) thing.doAction("getUrl", actionContext));
dataSource.setUser((String) thing.doAction("getUserName", actionContext));
dataSource.setPassword((String) thing.doAction("getPassword", actionContext));
dataSource.setCheckoutTimeout(thing.getInt("checkoutTimeout", 0));
dataSource.setInitialPoolSize(thing.getInt("initPoolSize", 3));
dataSource.setMaxPoolSize(thing.getInt("maxPoolSize", 15));
dataSource.setMinPoolSize(thing.getInt("minPoolSize", 3));
dataSource.setMaxIdleTime(thing.getInt("maxIdleTime", 0));
dataSource.setIdleConnectionTestPeriod(thing.getInt("idleConnectionTestPeriod", 0));
dataSource.setTestConnectionOnCheckout(thing.getBoolean("testOnCheckOut"));
dataSource.setTestConnectionOnCheckin(thing.getBoolean("testOnCheckIn"));
dataSource.setAcquireIncrement(thing.getInt("acquireIncrement", 3));
dataSource.setAcquireRetryAttempts(thing.getInt("acquireRetryAttempts", 30));
dataSource.setAcquireRetryDelay(thing.getInt("acquireRetryDelay", 1000));
dataSource.setBreakAfterAcquireFailure(thing.getBoolean("breakAfterAcquireFailure", false));
dataSource.setDebugUnreturnedConnectionStackTraces(thing.getBoolean("debugUnreturnedConnectionStackTraces", false));
String testSql = thing.getString("testSql");
if(testSql != null && !"".equals(testSql)){
dataSource.setPreferredTestQuery(testSql);
}
return dataSource;
}
public static void close(ActionContext actionContext){
if(actionContext.get("dataSource") != null){
((ComboPooledDataSource) actionContext.get("dataSource")).close();
}
}
} | apache-2.0 |
daidong/MetaGraphBenchMarkSuits | src/main/java/workloads/ConcurrentWorkLoad.java | 848 | package workloads;
import java.util.ArrayList;
import java.util.Random;
/**
* This class generates workloads (n writes) on one vertex
*
* @author daidong
*
*/
public class ConcurrentWorkLoad implements GraphWorkLoad {
public int writes;
public int vertexId;
public ArrayList<MetaEdge> edges;
public ConcurrentWorkLoad(int currentWrites){
this.writes = currentWrites;
this.edges = new ArrayList<MetaEdge>();
Random r = new Random(System.currentTimeMillis());
this.vertexId = 0;
for (int i = 0; i < this.writes; i++){
MetaEdge edge = new MetaEdge(Math.abs(r.nextInt()));
edges.add(edge);
}
}
public int getVertex(int i){
if (i == 0)
return this.vertexId;
else
return -1;
}
public MetaEdge getEdge(int i){
return edges.get(i);
}
public int getPressure() {
return this.writes;
}
}
| apache-2.0 |
aboyett/blockdiag | src/blockdiag/utils/__init__.py | 4299 | # -*- coding: utf-8 -*-
# Copyright 2011 Takeshi KOMIYA
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division
import re
import math
from collections import namedtuple
Size = namedtuple('Size', 'width height')
class XY(tuple):
mapper = dict(x=0, y=1)
def __new__(cls, x, y):
return super(XY, cls).__new__(cls, (x, y))
def __getattr__(self, name):
try:
return self[self.mapper[name]]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
raise TypeError("'XY' object does not support item assignment")
def shift(self, x=0, y=0):
return self.__class__(self.x + x, self.y + y)
class Box(list):
mapper = dict(x1=0, y1=1, x2=2, y2=3, x=0, y=1)
def __init__(self, x1, y1, x2, y2):
super(Box, self).__init__((x1, y1, x2, y2))
def __getattr__(self, name):
try:
return self[self.mapper[name]]
except KeyError:
raise AttributeError(name)
def __repr__(self):
_format = "<%s (%s, %s) %dx%d at 0x%08x>"
params = (self.__class__.__name__, self.x1, self.y1,
self.width, self.height, id(self))
return _format % params
def shift(self, x=0, y=0):
return self.__class__(self.x1 + x, self.y1 + y,
self.x2 + x, self.y2 + y)
def get_padding_for(self, size, **kwargs):
valign = kwargs.get('valign', 'center')
halign = kwargs.get('halign', 'center')
padding = kwargs.get('padding', 0)
if halign == 'left':
dx = padding
elif halign == 'right':
dx = self.size.width - size.width - padding
else:
dx = int(math.ceil((self.size.width - size.width) / 2.0))
if valign == 'top':
dy = padding
elif valign == 'bottom':
dy = self.size.height - size.height - padding
else:
dy = int(math.ceil((self.size.height - size.height) / 2.0))
return dx, dy
@property
def size(self):
return Size(self.width, self.height)
@property
def width(self):
return self.x2 - self.x1
@property
def height(self):
return self.y2 - self.y1
@property
def topleft(self):
return XY(self.x1, self.y1)
@property
def top(self):
return XY(self.x1 + self.width // 2, self.y1)
@property
def topright(self):
return XY(self.x2, self.y1)
@property
def bottomleft(self):
return XY(self.x1, self.y2)
@property
def bottom(self):
return XY(self.x1 + self.width // 2, self.y2)
@property
def bottomright(self):
return XY(self.x2, self.y2)
@property
def left(self):
return XY(self.x1, self.y1 + self.height // 2)
@property
def right(self):
return XY(self.x2, self.y1 + self.height // 2)
@property
def center(self):
return XY(self.x1 + self.width // 2, self.y1 + self.height // 2)
def to_integer_point(self):
return Box(*[int(i) for i in self])
def unquote(string):
"""
Remove quotas from string
>>> unquote('"test"')
'test'
>>> unquote("'test'")
'test'
>>> unquote("'half quoted")
"'half quoted"
>>> unquote('"half quoted')
'"half quoted'
"""
if string:
m = re.match('\A(?P<quote>"|\')((.|\s)*)(?P=quote)\Z', string, re.M)
if m:
return re.sub("\\\\" + m.group(1), m.group(1), m.group(2))
else:
return string
else:
return string
def is_Pillow_available():
try:
from PIL import _imagingft
_imagingft
return True
except ImportError:
return False
| apache-2.0 |
flavoi/diventi | diventi/accounts/migrations/0274_auto_20200315_1318.py | 453 | # Generated by Django 2.2.10 on 2020-03-15 12:18
import diventi.accounts.models
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('accounts', '0273_auto_20200315_1313'),
]
operations = [
migrations.AlterModelManagers(
name='diventiuser',
managers=[
('objects', diventi.accounts.models.DiventiUserManager()),
],
),
]
| apache-2.0 |
cboehme/metafacture-core | metafacture-formeta/src/main/java/org/metafacture/formeta/parser/StructureParserContext.java | 2742 | /*
* Copyright 2013, 2014 Deutsche Nationalbibliothek
*
* Licensed under the Apache License, Version 2.0 the "License";
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.metafacture.formeta.parser;
/**
* Context of the record parser. It manages the text parser and the generation of
* the stream events emitted by the decoder.
*/
class StructureParserContext {
private Emitter emitter;
private final TextParserContext textParserContext = new TextParserContext();
private TextParserState textParser = TextParserState.LEADING_WHITESPACE;
private String parsedText = "";
private String literalName;
private int nestingLevel;
public void setEmitter(final Emitter emitter) {
this.emitter = emitter;
}
public Emitter getEmitter() {
return emitter;
}
/**
* Passes {@code ch} to the embedded text parser for processing. If
* the text parser reaches {@code DELIMITER_REACHED} it is
* automatically reset.
*
* @param ch the character to process
* @return true if the text parser reached the
* {@code DELIMITER_REACHED} state.
*/
public boolean processCharWithTextParser(final char ch) {
textParser = textParser.processChar(ch, textParserContext);
if (textParser == TextParserState.DELIMITER_REACHED) {
parsedText = textParserContext.getText();
textParser = TextParserState.LEADING_WHITESPACE;
textParserContext.reset();
return true;
}
return false;
}
public void processEOIWithTextParser() {
textParser.endOfInput(textParserContext);
parsedText = textParserContext.getText();
textParser = TextParserState.LEADING_WHITESPACE;
textParserContext.reset();
}
public void startGroup() {
emitter.startGroup(parsedText, nestingLevel);
nestingLevel += 1;
}
public void endGroup() {
nestingLevel -= 1;
emitter.endGroup(nestingLevel);
}
public void startLiteral() {
literalName = parsedText;
}
public void endLiteral() {
emitter.literal(literalName, parsedText, nestingLevel);
literalName = null;
}
public boolean isTextEmpty() {
return parsedText.isEmpty();
}
public boolean isNested() {
return nestingLevel > 0;
}
public void reset() {
textParser = TextParserState.LEADING_WHITESPACE;
textParserContext.reset();
parsedText = "";
literalName = null;
nestingLevel = 0;
}
}
| apache-2.0 |
cwant/tessagon | tests/core/core_tests_base.py | 930 | import os
import sys
this_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.append(this_dir + '/../..')
from tessagon.core.tile import Tile # noqa: E402
class CoreTestsBase:
pass
class FakeTessagon:
def __init__(self):
self.mesh_adaptor = FakeAdaptor()
def f(self, u, v):
return [u, u*v, v]
class FakeAdaptor:
def __init__(self):
self.verts = []
self.faces = []
def create_vert(self, coords):
self.verts.append(coords)
return coords
class FakeTileSubClass(Tile):
def init_verts(self):
return {'top': {'left': None,
'right': None},
'bottom': {'left': None,
'right': None}}
def init_faces(self):
return {'top': {'left': None,
'right': None},
'bottom': {'left': None,
'right': None}}
| apache-2.0 |
NetApp/manila | manila/tests/share/drivers/test_generic.py | 92077 | # Copyright (c) 2014 NetApp, Inc.
# Copyright (c) 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for the Generic driver module."""
import os
import time
import ddt
import mock
from oslo_concurrency import processutils
from oslo_config import cfg
from six import moves
from manila.common import constants as const
from manila import compute
from manila import context
from manila import exception
import manila.share.configuration
from manila.share.drivers import generic
from manila.share import share_types
from manila import test
from manila.tests import fake_compute
from manila.tests import fake_service_instance
from manila.tests import fake_share
from manila.tests import fake_volume
from manila import utils
from manila import volume
CONF = cfg.CONF
def get_fake_manage_share():
return {
'id': 'fake',
'share_proto': 'NFS',
'share_type_id': 'fake',
'export_locations': [
{'path': '10.0.0.1:/foo/fake/path'},
{'path': '11.0.0.1:/bar/fake/path'},
],
}
def get_fake_snap_dict():
snap_dict = {
'status': 'available',
'project_id': '13c0be6290934bd98596cfa004650049',
'user_id': 'a0314a441ca842019b0952224aa39192',
'description': None,
'deleted': '0',
'created_at': '2015-08-10 00:05:58',
'updated_at': '2015-08-10 00:05:58',
'consistency_group_id': '4b04fdc3-00b9-4909-ba1a-06e9b3f88b67',
'cgsnapshot_members': [
{
'status': 'available',
'share_type_id': '1a9ed31e-ee70-483d-93ba-89690e028d7f',
'share_id': 'e14b5174-e534-4f35-bc4f-fe81c1575d6f',
'user_id': 'a0314a441ca842019b0952224aa39192',
'deleted': 'False',
'created_at': '2015-08-10 00:05:58',
'share': {
'id': '03e2f06e-14f2-45a5-9631-0949d1937bd8',
'deleted': False,
},
'updated_at': '2015-08-10 00:05:58',
'share_proto': 'NFS',
'project_id': '13c0be6290934bd98596cfa004650049',
'cgsnapshot_id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'deleted_at': None,
'id': '03e2f06e-14f2-45a5-9631-0949d1937bd8',
'size': 1,
},
],
'deleted_at': None,
'id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'name': None,
}
return snap_dict
def get_fake_cg_dict():
cg_dict = {
'status': 'creating',
'project_id': '13c0be6290934bd98596cfa004650049',
'user_id': 'a0314a441ca842019b0952224aa39192',
'description': None,
'deleted': 'False',
'created_at': '2015-08-10 00:07:58',
'updated_at': None,
'source_cgsnapshot_id': 'f6aa3b59-57eb-421e-965c-4e182538e36a',
'host': 'openstack2@cmodeSSVMNFS',
'deleted_at': None,
'shares': [
{
'id': '02a32f06e-14f2-45a5-9631-7483f1937bd8',
'deleted': False,
'source_cgsnapshot_member_id':
'03e2f06e-14f2-45a5-9631-0949d1937bd8',
},
],
'share_types': [
{
'id': 'f6aa3b56-45a5-9631-02a32f06e1937b',
'deleted': False,
'consistency_group_id': '4b04fdc3-00b9-4909-ba1a-06e9b3f88b67',
'share_type_id': '1a9ed31e-ee70-483d-93ba-89690e028d7f',
},
],
'id': 'eda52174-0442-476d-9694-a58327466c14',
'name': None
}
return cg_dict
def get_fake_collated_cg_snap_info():
fake_collated_cg_snap_info = [
{
'share': {
'id': '02a32f06e-14f2-45a5-9631-7483f1937bd8',
'deleted': False,
'source_cgsnapshot_member_id':
'03e2f06e-14f2-45a5-9631-0949d1937bd8',
},
'snapshot': {
'id': '03e2f06e-14f2-45a5-9631-0949d1937bd8',
},
},
]
return fake_collated_cg_snap_info
def get_fake_access_rule(access_to, access_level, access_type='ip'):
return {
'access_type': access_type,
'access_to': access_to,
'access_level': access_level,
}
@ddt.ddt
class GenericShareDriverTestCase(test.TestCase):
"""Tests GenericShareDriver."""
def setUp(self):
super(GenericShareDriverTestCase, self).setUp()
self._context = context.get_admin_context()
self._execute = mock.Mock(return_value=('', ''))
self._helper_cifs = mock.Mock()
self._helper_nfs = mock.Mock()
CONF.set_default('driver_handles_share_servers', True)
self.fake_conf = manila.share.configuration.Configuration(None)
self.fake_private_storage = mock.Mock()
self.mock_object(self.fake_private_storage, 'get',
mock.Mock(return_value=None))
with mock.patch.object(
generic.service_instance,
'ServiceInstanceManager',
fake_service_instance.FakeServiceInstanceManager):
self._driver = generic.GenericShareDriver(
private_storage=self.fake_private_storage,
execute=self._execute, configuration=self.fake_conf)
self._driver.service_tenant_id = 'service tenant id'
self._driver.service_network_id = 'service network id'
self._driver.compute_api = fake_compute.API()
self._driver.volume_api = fake_volume.API()
self._driver.share_networks_locks = {}
self._driver.get_service_instance = mock.Mock()
self._driver.share_networks_servers = {}
self._driver.admin_context = self._context
self.fake_sn = {"id": "fake_sn_id"}
self.fake_net_info = {
"id": "fake_srv_id",
"share_network_id": "fake_sn_id"
}
fsim = fake_service_instance.FakeServiceInstanceManager()
sim = mock.Mock(return_value=fsim)
self._driver.instance_manager = sim
self._driver.service_instance_manager = sim
self.fake_server = sim._create_service_instance(
context="fake", instance_name="fake",
share_network_id=self.fake_sn["id"], old_server_ip="fake")
self.mock_object(utils, 'synchronized',
mock.Mock(return_value=lambda f: f))
self.mock_object(generic.os.path, 'exists',
mock.Mock(return_value=True))
self._driver._helpers = {
'CIFS': self._helper_cifs,
'NFS': self._helper_nfs,
}
self.share = fake_share.fake_share(share_proto='NFS')
self.server = {
'instance_id': 'fake_instance_id',
'ip': 'fake_ip',
'username': 'fake_username',
'password': 'fake_password',
'pk_path': 'fake_pk_path',
'backend_details': {
'ip': '1.2.3.4',
'instance_id': 'fake',
'service_ip': 'fake_ip',
},
'availability_zone': 'fake_az',
}
self.access = fake_share.fake_access()
self.snapshot = fake_share.fake_snapshot()
self.mock_object(time, 'sleep')
self.mock_debug_log = self.mock_object(generic.LOG, 'debug')
self.mock_warning_log = self.mock_object(generic.LOG, 'warning')
self.mock_error_log = self.mock_object(generic.LOG, 'error')
self.mock_exception_log = self.mock_object(generic.LOG, 'exception')
@ddt.data(True, False)
def test_do_setup_with_dhss(self, dhss):
CONF.set_default('driver_handles_share_servers', dhss)
fake_server = {'id': 'fake_server_id'}
self.mock_object(volume, 'API')
self.mock_object(compute, 'API')
self.mock_object(self._driver, '_setup_helpers')
self.mock_object(
self._driver,
'_is_share_server_active', mock.Mock(return_value=True))
self.mock_object(
self._driver.service_instance_manager,
'get_common_server', mock.Mock(return_value=fake_server))
self._driver.do_setup(self._context)
volume.API.assert_called_once_with()
compute.API.assert_called_once_with()
self._driver._setup_helpers.assert_called_once_with()
if not dhss:
self._driver.service_instance_manager.get_common_server.\
assert_called_once_with()
self._driver._is_share_server_active.assert_called_once_with(
self._context, fake_server)
else:
self.assertFalse(
self._driver.service_instance_manager.get_common_server.called)
self.assertFalse(self._driver._is_share_server_active.called)
@mock.patch('time.sleep')
def test_do_setup_dhss_false_server_avail_after_retry(self, mock_sleep):
# This tests the scenario in which the common share server cannot be
# retrieved during the first attempt, is not active during the second,
# becoming active during the third attempt.
CONF.set_default('driver_handles_share_servers', False)
fake_server = {'id': 'fake_server_id'}
self.mock_object(volume, 'API')
self.mock_object(compute, 'API')
self.mock_object(self._driver, '_setup_helpers')
self.mock_object(
self._driver,
'_is_share_server_active', mock.Mock(side_effect=[False, True]))
self.mock_object(
self._driver.service_instance_manager,
'get_common_server',
mock.Mock(side_effect=[exception.ManilaException,
fake_server,
fake_server]))
self._driver.do_setup(self._context)
volume.API.assert_called_once_with()
compute.API.assert_called_once_with()
self._driver._setup_helpers.assert_called_once_with()
self._driver.service_instance_manager.get_common_server.\
assert_has_calls([mock.call()] * 3)
self._driver._is_share_server_active.assert_has_calls(
[mock.call(self._context, fake_server)] * 2)
mock_sleep.assert_has_calls([mock.call(5)] * 2)
def test_setup_helpers(self):
self._driver._helpers = {}
CONF.set_default('share_helpers', ['NFS=fakenfs'])
self.mock_object(generic.importutils, 'import_class',
mock.Mock(return_value=self._helper_nfs))
self._driver._setup_helpers()
generic.importutils.import_class.assert_has_calls([
mock.call('fakenfs')
])
self._helper_nfs.assert_called_once_with(
self._execute,
self._driver._ssh_exec,
self.fake_conf
)
self.assertEqual(1, len(self._driver._helpers))
def test_setup_helpers_no_helpers(self):
self._driver._helpers = {}
CONF.set_default('share_helpers', [])
self.assertRaises(exception.ManilaException,
self._driver._setup_helpers)
def test_create_share(self):
volume = 'fake_volume'
volume2 = 'fake_volume2'
self._helper_nfs.create_export.return_value = 'fakelocation'
self.mock_object(self._driver, '_allocate_container',
mock.Mock(return_value=volume))
self.mock_object(self._driver, '_attach_volume',
mock.Mock(return_value=volume2))
self.mock_object(self._driver, '_format_device')
self.mock_object(self._driver, '_mount_device')
expected_el = [{
'is_admin_only': False,
'path': 'fakelocation',
'metadata': {'export_location_metadata_example': 'example'},
}]
result = self._driver.create_share(
self._context, self.share, share_server=self.server)
self.assertEqual(expected_el, result)
self._driver._allocate_container.assert_called_once_with(
self._driver.admin_context, self.share)
self._driver._attach_volume.assert_called_once_with(
self._driver.admin_context, self.share,
self.server['backend_details']['instance_id'],
volume)
self._driver._format_device.assert_called_once_with(
self.server['backend_details'], volume2)
self._driver._mount_device.assert_called_once_with(
self.share, self.server['backend_details'], volume2)
def test_create_share_exception(self):
share = fake_share.fake_share(share_network_id=None)
self.assertRaises(exception.ManilaException, self._driver.create_share,
self._context, share)
def test_create_share_invalid_helper(self):
self._driver._helpers = {'CIFS': self._helper_cifs}
self.assertRaises(exception.InvalidShare, self._driver.create_share,
self._context, self.share, share_server=self.server)
def test_is_device_file_available(self):
volume = {'mountpoint': 'fake_mount_point'}
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=None))
self._driver._is_device_file_available(self.server, volume)
self._driver._ssh_exec.assert_called_once_with(
self.server, ['sudo', 'test', '-b', volume['mountpoint']])
def test_format_device(self):
volume = {'mountpoint': 'fake_mount_point'}
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=('', '')))
self.mock_object(self._driver, '_is_device_file_available')
self._driver._format_device(self.server, volume)
self._driver._is_device_file_available.assert_called_once_with(
self.server, volume)
self._driver._ssh_exec.assert_called_once_with(
self.server,
['sudo', 'mkfs.%s' % self.fake_conf.share_volume_fstype,
volume['mountpoint']])
def test_mount_device_not_present(self):
server = {'instance_id': 'fake_server_id'}
mount_path = self._driver._get_mount_path(self.share)
volume = {'mountpoint': 'fake_mount_point'}
self.mock_object(self._driver, '_is_device_mounted',
mock.Mock(return_value=False))
self.mock_object(self._driver, '_sync_mount_temp_and_perm_files')
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=('', '')))
self._driver._mount_device(self.share, server, volume)
self._driver._is_device_mounted.assert_called_once_with(
mount_path, server, volume)
self._driver._sync_mount_temp_and_perm_files.assert_called_once_with(
server)
self._driver._ssh_exec.assert_called_once_with(
server,
['sudo', 'mkdir', '-p', mount_path,
'&&', 'sudo', 'mount', volume['mountpoint'], mount_path,
'&&', 'sudo', 'chmod', '777', mount_path],
)
def test_mount_device_present(self):
mount_path = '/fake/mount/path'
volume = {'mountpoint': 'fake_mount_point'}
self.mock_object(self._driver, '_is_device_mounted',
mock.Mock(return_value=True))
self.mock_object(self._driver, '_get_mount_path',
mock.Mock(return_value=mount_path))
self.mock_object(generic.LOG, 'warning')
self._driver._mount_device(self.share, self.server, volume)
self._driver._get_mount_path.assert_called_once_with(self.share)
self._driver._is_device_mounted.assert_called_once_with(
mount_path, self.server, volume)
generic.LOG.warning.assert_called_once_with(mock.ANY, mock.ANY)
def test_mount_device_exception_raised(self):
volume = {'mountpoint': 'fake_mount_point'}
self.mock_object(
self._driver, '_is_device_mounted',
mock.Mock(side_effect=exception.ProcessExecutionError))
self.assertRaises(
exception.ShareBackendException,
self._driver._mount_device,
self.share,
self.server,
volume,
)
self._driver._is_device_mounted.assert_called_once_with(
self._driver._get_mount_path(self.share), self.server, volume)
def test_unmount_device_present(self):
mount_path = '/fake/mount/path'
self.mock_object(self._driver, '_is_device_mounted',
mock.Mock(return_value=True))
self.mock_object(self._driver, '_sync_mount_temp_and_perm_files')
self.mock_object(self._driver, '_get_mount_path',
mock.Mock(return_value=mount_path))
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=('', '')))
self._driver._unmount_device(self.share, self.server)
self._driver._get_mount_path.assert_called_once_with(self.share)
self._driver._is_device_mounted.assert_called_once_with(
mount_path, self.server)
self._driver._sync_mount_temp_and_perm_files.assert_called_once_with(
self.server)
self._driver._ssh_exec.assert_called_once_with(
self.server,
['sudo', 'umount', mount_path, '&&', 'sudo', 'rmdir', mount_path],
)
def test_unmount_device_retry_once(self):
self.counter = 0
def _side_effect(*args):
self.counter += 1
if self.counter < 2:
raise exception.ProcessExecutionError
mount_path = '/fake/mount/path'
self.mock_object(self._driver, '_is_device_mounted',
mock.Mock(return_value=True))
self.mock_object(self._driver, '_sync_mount_temp_and_perm_files')
self.mock_object(self._driver, '_get_mount_path',
mock.Mock(return_value=mount_path))
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(side_effect=_side_effect))
self._driver._unmount_device(self.share, self.server)
self.assertEqual(1, time.sleep.call_count)
self.assertEqual([mock.call(self.share) for i in moves.range(2)],
self._driver._get_mount_path.mock_calls)
self.assertEqual([mock.call(mount_path,
self.server) for i in moves.range(2)],
self._driver._is_device_mounted.mock_calls)
self._driver._sync_mount_temp_and_perm_files.assert_called_once_with(
self.server)
self.assertEqual(
[mock.call(self.server, ['sudo', 'umount', mount_path,
'&&', 'sudo', 'rmdir', mount_path])
for i in moves.range(2)],
self._driver._ssh_exec.mock_calls,
)
def test_unmount_device_not_present(self):
mount_path = '/fake/mount/path'
self.mock_object(self._driver, '_is_device_mounted',
mock.Mock(return_value=False))
self.mock_object(self._driver, '_get_mount_path',
mock.Mock(return_value=mount_path))
self.mock_object(generic.LOG, 'warning')
self._driver._unmount_device(self.share, self.server)
self._driver._get_mount_path.assert_called_once_with(self.share)
self._driver._is_device_mounted.assert_called_once_with(
mount_path, self.server)
generic.LOG.warning.assert_called_once_with(mock.ANY, mock.ANY)
def test_is_device_mounted_true(self):
volume = {'mountpoint': 'fake_mount_point', 'id': 'fake_id'}
mount_path = '/fake/mount/path'
mounts = "%(dev)s on %(path)s" % {'dev': volume['mountpoint'],
'path': mount_path}
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=(mounts, '')))
result = self._driver._is_device_mounted(
mount_path, self.server, volume)
self._driver._ssh_exec.assert_called_once_with(
self.server, ['sudo', 'mount'])
self.assertTrue(result)
def test_is_device_mounted_true_no_volume_provided(self):
mount_path = '/fake/mount/path'
mounts = "/fake/dev/path on %(path)s type fake" % {'path': mount_path}
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=(mounts, '')))
result = self._driver._is_device_mounted(mount_path, self.server)
self._driver._ssh_exec.assert_called_once_with(
self.server, ['sudo', 'mount'])
self.assertTrue(result)
def test_is_device_mounted_false(self):
mount_path = '/fake/mount/path'
volume = {'mountpoint': 'fake_mount_point', 'id': 'fake_id'}
mounts = "%(dev)s on %(path)s" % {'dev': '/fake',
'path': mount_path}
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=(mounts, '')))
result = self._driver._is_device_mounted(
mount_path, self.server, volume)
self._driver._ssh_exec.assert_called_once_with(
self.server, ['sudo', 'mount'])
self.assertFalse(result)
def test_is_device_mounted_false_no_volume_provided(self):
mount_path = '/fake/mount/path'
mounts = "%(path)s" % {'path': 'fake'}
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=(mounts, '')))
self.mock_object(self._driver, '_get_mount_path',
mock.Mock(return_value=mount_path))
result = self._driver._is_device_mounted(mount_path, self.server)
self._driver._ssh_exec.assert_called_once_with(
self.server, ['sudo', 'mount'])
self.assertFalse(result)
def test_sync_mount_temp_and_perm_files(self):
self.mock_object(self._driver, '_ssh_exec')
self._driver._sync_mount_temp_and_perm_files(self.server)
self._driver._ssh_exec.has_calls(
mock.call(
self.server,
['sudo', 'cp', const.MOUNT_FILE_TEMP, const.MOUNT_FILE]),
mock.call(self.server, ['sudo', 'mount', '-a']))
def test_sync_mount_temp_and_perm_files_raise_error_on_copy(self):
self.mock_object(
self._driver, '_ssh_exec',
mock.Mock(side_effect=exception.ProcessExecutionError))
self.assertRaises(
exception.ShareBackendException,
self._driver._sync_mount_temp_and_perm_files,
self.server
)
self._driver._ssh_exec.assert_called_once_with(
self.server,
['sudo', 'cp', const.MOUNT_FILE_TEMP, const.MOUNT_FILE])
def test_sync_mount_temp_and_perm_files_raise_error_on_mount(self):
def raise_error_on_mount(*args, **kwargs):
if args[1][1] == 'cp':
raise exception.ProcessExecutionError()
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(side_effect=raise_error_on_mount))
self.assertRaises(
exception.ShareBackendException,
self._driver._sync_mount_temp_and_perm_files,
self.server
)
self._driver._ssh_exec.has_calls(
mock.call(
self.server,
['sudo', 'cp', const.MOUNT_FILE_TEMP, const.MOUNT_FILE]),
mock.call(self.server, ['sudo', 'mount', '-a']))
def test_get_mount_path(self):
result = self._driver._get_mount_path(self.share)
self.assertEqual(os.path.join(CONF.share_mount_path,
self.share['name']), result)
def test_attach_volume_not_attached(self):
available_volume = fake_volume.FakeVolume()
attached_volume = fake_volume.FakeVolume(status='in-use')
self.mock_object(self._driver.compute_api, 'instance_volume_attach')
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(return_value=attached_volume))
result = self._driver._attach_volume(self._context, self.share,
'fake_inst_id', available_volume)
self._driver.compute_api.instance_volume_attach.\
assert_called_once_with(self._context, 'fake_inst_id',
available_volume['id'])
self._driver.volume_api.get.assert_called_once_with(
self._context, attached_volume['id'])
self.assertEqual(attached_volume, result)
def test_attach_volume_attached_correct(self):
fake_server = fake_compute.FakeServer()
attached_volume = fake_volume.FakeVolume(status='in-use')
self.mock_object(self._driver.compute_api, 'instance_volumes_list',
mock.Mock(return_value=[attached_volume]))
result = self._driver._attach_volume(self._context, self.share,
fake_server, attached_volume)
self.assertEqual(attached_volume, result)
def test_attach_volume_attached_incorrect(self):
fake_server = fake_compute.FakeServer()
attached_volume = fake_volume.FakeVolume(status='in-use')
anoter_volume = fake_volume.FakeVolume(id='fake_id2', status='in-use')
self.mock_object(self._driver.compute_api, 'instance_volumes_list',
mock.Mock(return_value=[anoter_volume]))
self.assertRaises(exception.ManilaException,
self._driver._attach_volume, self._context,
self.share, fake_server, attached_volume)
@ddt.data(exception.ManilaException, exception.Invalid)
def test_attach_volume_failed_attach(self, side_effect):
fake_server = fake_compute.FakeServer()
available_volume = fake_volume.FakeVolume()
self.mock_object(self._driver.compute_api, 'instance_volume_attach',
mock.Mock(side_effect=side_effect))
self.assertRaises(exception.ManilaException,
self._driver._attach_volume,
self._context, self.share, fake_server,
available_volume)
self.assertEqual(
3, self._driver.compute_api.instance_volume_attach.call_count)
def test_attach_volume_attached_retry_correct(self):
fake_server = fake_compute.FakeServer()
attached_volume = fake_volume.FakeVolume(status='available')
in_use_volume = fake_volume.FakeVolume(status='in-use')
side_effect = [exception.Invalid("Fake"), attached_volume]
attach_mock = mock.Mock(side_effect=side_effect)
self.mock_object(self._driver.compute_api, 'instance_volume_attach',
attach_mock)
self.mock_object(self._driver.compute_api, 'instance_volumes_list',
mock.Mock(return_value=[attached_volume]))
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(return_value=in_use_volume))
result = self._driver._attach_volume(self._context, self.share,
fake_server, attached_volume)
self.assertEqual(in_use_volume, result)
self.assertEqual(
2, self._driver.compute_api.instance_volume_attach.call_count)
def test_attach_volume_error(self):
fake_server = fake_compute.FakeServer()
available_volume = fake_volume.FakeVolume()
error_volume = fake_volume.FakeVolume(status='error')
self.mock_object(self._driver.compute_api, 'instance_volume_attach')
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(return_value=error_volume))
self.assertRaises(exception.ManilaException,
self._driver._attach_volume,
self._context, self.share,
fake_server, available_volume)
def test_get_volume(self):
volume = fake_volume.FakeVolume(
name=CONF.volume_name_template % self.share['id'])
self.mock_object(self._driver.volume_api, 'get_all',
mock.Mock(return_value=[volume]))
result = self._driver._get_volume(self._context, self.share['id'])
self.assertEqual(volume, result)
self._driver.volume_api.get_all.assert_called_once_with(
self._context, {'all_tenants': True, 'name': volume['name']})
def test_get_volume_with_private_data(self):
volume = fake_volume.FakeVolume()
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(return_value=volume))
self.mock_object(self.fake_private_storage, 'get',
mock.Mock(return_value=volume['id']))
result = self._driver._get_volume(self._context, self.share['id'])
self.assertEqual(volume, result)
self._driver.volume_api.get.assert_called_once_with(
self._context, volume['id'])
self.fake_private_storage.get.assert_called_once_with(
self.share['id'], 'volume_id'
)
def test_get_volume_none(self):
vol_name = (
self._driver.configuration.volume_name_template % self.share['id'])
self.mock_object(self._driver.volume_api, 'get_all',
mock.Mock(return_value=[]))
result = self._driver._get_volume(self._context, self.share['id'])
self.assertIsNone(result)
self._driver.volume_api.get_all.assert_called_once_with(
self._context, {'all_tenants': True, 'name': vol_name})
def test_get_volume_error(self):
volume = fake_volume.FakeVolume(
name=CONF.volume_name_template % self.share['id'])
self.mock_object(self._driver.volume_api, 'get_all',
mock.Mock(return_value=[volume, volume]))
self.assertRaises(exception.ManilaException,
self._driver._get_volume,
self._context, self.share['id'])
self._driver.volume_api.get_all.assert_called_once_with(
self._context, {'all_tenants': True, 'name': volume['name']})
def test_get_volume_snapshot(self):
volume_snapshot = fake_volume.FakeVolumeSnapshot(
name=self._driver.configuration.volume_snapshot_name_template %
self.snapshot['id'])
self.mock_object(self._driver.volume_api, 'get_all_snapshots',
mock.Mock(return_value=[volume_snapshot]))
result = self._driver._get_volume_snapshot(self._context,
self.snapshot['id'])
self.assertEqual(volume_snapshot, result)
self._driver.volume_api.get_all_snapshots.assert_called_once_with(
self._context, {'name': volume_snapshot['name']})
def test_get_volume_snapshot_with_private_data(self):
volume_snapshot = fake_volume.FakeVolumeSnapshot()
self.mock_object(self._driver.volume_api, 'get_snapshot',
mock.Mock(return_value=volume_snapshot))
self.mock_object(self.fake_private_storage, 'get',
mock.Mock(return_value=volume_snapshot['id']))
result = self._driver._get_volume_snapshot(self._context,
self.snapshot['id'])
self.assertEqual(volume_snapshot, result)
self._driver.volume_api.get_snapshot.assert_called_once_with(
self._context, volume_snapshot['id'])
self.fake_private_storage.get.assert_called_once_with(
self.snapshot['id'], 'volume_snapshot_id'
)
def test_get_volume_snapshot_none(self):
snap_name = (
self._driver.configuration.volume_snapshot_name_template %
self.share['id'])
self.mock_object(self._driver.volume_api, 'get_all_snapshots',
mock.Mock(return_value=[]))
result = self._driver._get_volume_snapshot(self._context,
self.share['id'])
self.assertIsNone(result)
self._driver.volume_api.get_all_snapshots.assert_called_once_with(
self._context, {'name': snap_name})
def test_get_volume_snapshot_error(self):
volume_snapshot = fake_volume.FakeVolumeSnapshot(
name=self._driver.configuration.volume_snapshot_name_template %
self.snapshot['id'])
self.mock_object(
self._driver.volume_api, 'get_all_snapshots',
mock.Mock(return_value=[volume_snapshot, volume_snapshot]))
self.assertRaises(
exception.ManilaException, self._driver._get_volume_snapshot,
self._context, self.snapshot['id'])
self._driver.volume_api.get_all_snapshots.assert_called_once_with(
self._context, {'name': volume_snapshot['name']})
def test_detach_volume(self):
available_volume = fake_volume.FakeVolume()
attached_volume = fake_volume.FakeVolume(status='in-use')
self.mock_object(self._driver, '_get_volume',
mock.Mock(return_value=attached_volume))
self.mock_object(self._driver.compute_api, 'instance_volumes_list',
mock.Mock(return_value=[attached_volume]))
self.mock_object(self._driver.compute_api, 'instance_volume_detach')
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(return_value=available_volume))
self._driver._detach_volume(self._context, self.share,
self.server['backend_details'])
self._driver.compute_api.instance_volume_detach.\
assert_called_once_with(
self._context,
self.server['backend_details']['instance_id'],
available_volume['id'])
self._driver.volume_api.get.assert_called_once_with(
self._context, available_volume['id'])
def test_detach_volume_detached(self):
available_volume = fake_volume.FakeVolume()
attached_volume = fake_volume.FakeVolume(status='in-use')
self.mock_object(self._driver, '_get_volume',
mock.Mock(return_value=attached_volume))
self.mock_object(self._driver.compute_api, 'instance_volumes_list',
mock.Mock(return_value=[]))
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(return_value=available_volume))
self.mock_object(self._driver.compute_api, 'instance_volume_detach')
self._driver._detach_volume(self._context, self.share,
self.server['backend_details'])
self.assertFalse(self._driver.volume_api.get.called)
self.assertFalse(
self._driver.compute_api.instance_volume_detach.called)
def test_allocate_container(self):
fake_vol = fake_volume.FakeVolume()
self.fake_conf.cinder_volume_type = 'fake_volume_type'
self.mock_object(self._driver.volume_api, 'create',
mock.Mock(return_value=fake_vol))
result = self._driver._allocate_container(self._context, self.share)
self.assertEqual(fake_vol, result)
self._driver.volume_api.create.assert_called_once_with(
self._context,
self.share['size'],
CONF.volume_name_template % self.share['id'],
'',
snapshot=None,
volume_type='fake_volume_type',
availability_zone=self.share['availability_zone'])
def test_allocate_container_with_snaphot(self):
fake_vol = fake_volume.FakeVolume()
fake_vol_snap = fake_volume.FakeVolumeSnapshot()
self.mock_object(self._driver, '_get_volume_snapshot',
mock.Mock(return_value=fake_vol_snap))
self.mock_object(self._driver.volume_api, 'create',
mock.Mock(return_value=fake_vol))
result = self._driver._allocate_container(self._context,
self.share,
self.snapshot)
self.assertEqual(fake_vol, result)
self._driver.volume_api.create.assert_called_once_with(
self._context,
self.share['size'],
CONF.volume_name_template % self.share['id'],
'',
snapshot=fake_vol_snap,
volume_type=None,
availability_zone=self.share['availability_zone'])
def test_allocate_container_error(self):
fake_vol = fake_volume.FakeVolume(status='error')
self.mock_object(self._driver.volume_api, 'create',
mock.Mock(return_value=fake_vol))
self.assertRaises(exception.ManilaException,
self._driver._allocate_container,
self._context,
self.share)
def test_wait_for_available_volume(self):
fake_volume = {'status': 'creating', 'id': 'fake'}
fake_available_volume = {'status': 'available', 'id': 'fake'}
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(return_value=fake_available_volume))
actual_result = self._driver._wait_for_available_volume(
fake_volume, 5, "error", "timeout")
self.assertEqual(fake_available_volume, actual_result)
self._driver.volume_api.get.assert_called_once_with(
mock.ANY, fake_volume['id'])
@mock.patch('time.sleep')
def test_wait_for_available_volume_error_extending(self, mock_sleep):
fake_volume = {'status': 'error_extending', 'id': 'fake'}
self.assertRaises(exception.ManilaException,
self._driver._wait_for_available_volume,
fake_volume, 5, 'error', 'timeout')
self.assertFalse(mock_sleep.called)
@mock.patch('time.sleep')
def test_wait_for_extending_volume(self, mock_sleep):
initial_size = 1
expected_size = 2
mock_volume = fake_volume.FakeVolume(status='available',
size=initial_size)
mock_extending_vol = fake_volume.FakeVolume(status='extending',
size=initial_size)
mock_extended_vol = fake_volume.FakeVolume(status='available',
size=expected_size)
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(side_effect=[mock_extending_vol,
mock_extended_vol]))
result = self._driver._wait_for_available_volume(
mock_volume, 5, "error", "timeout",
expected_size=expected_size)
expected_get_count = 2
self.assertEqual(mock_extended_vol, result)
self._driver.volume_api.get.assert_has_calls(
[mock.call(self._driver.admin_context, mock_volume['id'])] *
expected_get_count)
mock_sleep.assert_has_calls([mock.call(1)] * expected_get_count)
@ddt.data(mock.Mock(return_value={'status': 'creating', 'id': 'fake'}),
mock.Mock(return_value={'status': 'error', 'id': 'fake'}))
def test_wait_for_available_volume_invalid(self, volume_get_mock):
fake_volume = {'status': 'creating', 'id': 'fake'}
self.mock_object(self._driver.volume_api, 'get', volume_get_mock)
self.mock_object(time, 'time',
mock.Mock(side_effect=[1.0, 1.33, 1.67, 2.0]))
self.assertRaises(
exception.ManilaException,
self._driver._wait_for_available_volume,
fake_volume, 1, "error", "timeout"
)
def test_deallocate_container(self):
fake_vol = fake_volume.FakeVolume()
self.mock_object(self._driver, '_get_volume',
mock.Mock(return_value=fake_vol))
self.mock_object(self._driver.volume_api, 'delete')
self.mock_object(self._driver.volume_api, 'get', mock.Mock(
side_effect=exception.VolumeNotFound(volume_id=fake_vol['id'])))
self._driver._deallocate_container(self._context, self.share)
self._driver._get_volume.assert_called_once_with(
self._context, self.share['id'])
self._driver.volume_api.delete.assert_called_once_with(
self._context, fake_vol['id'])
self._driver.volume_api.get.assert_called_once_with(
self._context, fake_vol['id'])
def test_deallocate_container_with_volume_not_found(self):
fake_vol = fake_volume.FakeVolume()
self.mock_object(self._driver, '_get_volume',
mock.Mock(side_effect=exception.VolumeNotFound(
volume_id=fake_vol['id'])))
self.mock_object(self._driver.volume_api, 'delete')
self._driver._deallocate_container(self._context, self.share)
self._driver._get_volume.assert_called_once_with(
self._context, self.share['id'])
self.assertFalse(self._driver.volume_api.delete.called)
def test_create_share_from_snapshot(self):
vol1 = 'fake_vol1'
vol2 = 'fake_vol2'
self._helper_nfs.create_export.return_value = 'fakelocation'
expected_el = [{
'is_admin_only': False,
'path': 'fakelocation',
'metadata': {'export_location_metadata_example': 'example'},
}]
self.mock_object(self._driver, '_allocate_container',
mock.Mock(return_value=vol1))
self.mock_object(self._driver, '_attach_volume',
mock.Mock(return_value=vol2))
self.mock_object(self._driver, '_mount_device')
result = self._driver.create_share_from_snapshot(
self._context,
self.share,
self.snapshot,
share_server=self.server)
self.assertEqual(expected_el, result)
self._driver._allocate_container.assert_called_once_with(
self._driver.admin_context, self.share, self.snapshot)
self._driver._attach_volume.assert_called_once_with(
self._driver.admin_context, self.share,
self.server['backend_details']['instance_id'], vol1)
self._driver._mount_device.assert_called_once_with(
self.share, self.server['backend_details'], vol2)
self._helper_nfs.create_export.assert_called_once_with(
self.server['backend_details'], self.share['name'])
def test_create_share_from_snapshot_invalid_helper(self):
self._driver._helpers = {'CIFS': self._helper_cifs}
self.assertRaises(exception.InvalidShare,
self._driver.create_share_from_snapshot,
self._context, self.share, self.snapshot,
share_server=self.server)
def test_delete_share_no_share_servers_handling(self):
self.mock_object(self._driver, '_deallocate_container')
self.mock_object(
self._driver.service_instance_manager,
'get_common_server', mock.Mock(return_value=self.server))
self.mock_object(
self._driver.service_instance_manager,
'ensure_service_instance', mock.Mock(return_value=False))
CONF.set_default('driver_handles_share_servers', False)
self._driver.delete_share(self._context, self.share)
self._driver.service_instance_manager.get_common_server.\
assert_called_once_with()
self._driver._deallocate_container.assert_called_once_with(
self._driver.admin_context, self.share)
self._driver.service_instance_manager.ensure_service_instance.\
assert_called_once_with(
self._context, self.server['backend_details'])
def test_delete_share(self):
self.mock_object(self._driver, '_unmount_device')
self.mock_object(self._driver, '_detach_volume')
self.mock_object(self._driver, '_deallocate_container')
self._driver.delete_share(
self._context, self.share, share_server=self.server)
self._helper_nfs.remove_export.assert_called_once_with(
self.server['backend_details'], self.share['name'])
self._driver._unmount_device.assert_called_once_with(
self.share, self.server['backend_details'])
self._driver._detach_volume.assert_called_once_with(
self._driver.admin_context, self.share,
self.server['backend_details'])
self._driver._deallocate_container.assert_called_once_with(
self._driver.admin_context, self.share)
self._driver.service_instance_manager.ensure_service_instance.\
assert_called_once_with(
self._context, self.server['backend_details'])
def test_detach_volume_with_volume_not_found(self):
fake_vol = fake_volume.FakeVolume()
fake_server_details = mock.MagicMock()
self.mock_object(self._driver.compute_api, 'instance_volumes_list',
mock.Mock(return_value=[]))
self.mock_object(self._driver, '_get_volume',
mock.Mock(side_effect=exception.VolumeNotFound(
volume_id=fake_vol['id'])))
self._driver._detach_volume(self._context,
self.share,
fake_server_details)
(self._driver.compute_api.instance_volumes_list.
assert_called_once_with(self._driver.admin_context,
fake_server_details['instance_id']))
(self._driver._get_volume.
assert_called_once_with(self._driver.admin_context,
self.share['id']))
self.assertEqual(1, self.mock_warning_log.call_count)
def test_delete_share_without_share_server(self):
self.mock_object(self._driver, '_unmount_device')
self.mock_object(self._driver, '_detach_volume')
self.mock_object(self._driver, '_deallocate_container')
self._driver.delete_share(
self._context, self.share, share_server=None)
self.assertFalse(self._helper_nfs.remove_export.called)
self.assertFalse(self._driver._unmount_device.called)
self.assertFalse(self._driver._detach_volume.called)
self._driver._deallocate_container.assert_called_once_with(
self._driver.admin_context, self.share)
def test_delete_share_without_server_backend_details(self):
self.mock_object(self._driver, '_unmount_device')
self.mock_object(self._driver, '_detach_volume')
self.mock_object(self._driver, '_deallocate_container')
fake_share_server = {
'instance_id': 'fake_instance_id',
'ip': 'fake_ip',
'username': 'fake_username',
'password': 'fake_password',
'pk_path': 'fake_pk_path',
'backend_details': {}
}
self._driver.delete_share(
self._context, self.share, share_server=fake_share_server)
self.assertFalse(self._helper_nfs.remove_export.called)
self.assertFalse(self._driver._unmount_device.called)
self.assertFalse(self._driver._detach_volume.called)
self._driver._deallocate_container.assert_called_once_with(
self._driver.admin_context, self.share)
def test_delete_share_without_server_availability(self):
self.mock_object(self._driver, '_unmount_device')
self.mock_object(self._driver, '_detach_volume')
self.mock_object(self._driver, '_deallocate_container')
self.mock_object(
self._driver.service_instance_manager,
'ensure_service_instance', mock.Mock(return_value=False))
self._driver.delete_share(
self._context, self.share, share_server=self.server)
self.assertFalse(self._helper_nfs.remove_export.called)
self.assertFalse(self._driver._unmount_device.called)
self.assertFalse(self._driver._detach_volume.called)
self._driver._deallocate_container.assert_called_once_with(
self._driver.admin_context, self.share)
self._driver.service_instance_manager.ensure_service_instance.\
assert_called_once_with(
self._context, self.server['backend_details'])
def test_delete_share_invalid_helper(self):
self._driver._helpers = {'CIFS': self._helper_cifs}
self.assertRaises(exception.InvalidShare,
self._driver.delete_share,
self._context, self.share, share_server=self.server)
def test_create_snapshot(self):
fake_vol = fake_volume.FakeVolume()
fake_vol_snap = fake_volume.FakeVolumeSnapshot(share_id=fake_vol['id'])
self.mock_object(self._driver, '_get_volume',
mock.Mock(return_value=fake_vol))
self.mock_object(self._driver.volume_api, 'create_snapshot_force',
mock.Mock(return_value=fake_vol_snap))
self._driver.create_snapshot(self._context, fake_vol_snap,
share_server=self.server)
self._driver._get_volume.assert_called_once_with(
self._driver.admin_context, fake_vol_snap['share_id'])
self._driver.volume_api.create_snapshot_force.assert_called_once_with(
self._context,
fake_vol['id'],
CONF.volume_snapshot_name_template % fake_vol_snap['id'],
''
)
def test_delete_snapshot(self):
fake_vol_snap = fake_volume.FakeVolumeSnapshot()
fake_vol_snap2 = {'id': 'fake_vol_snap2'}
self.mock_object(self._driver, '_get_volume_snapshot',
mock.Mock(return_value=fake_vol_snap2))
self.mock_object(self._driver.volume_api, 'delete_snapshot')
self.mock_object(
self._driver.volume_api, 'get_snapshot',
mock.Mock(side_effect=exception.VolumeSnapshotNotFound(
snapshot_id=fake_vol_snap['id'])))
self._driver.delete_snapshot(self._context, fake_vol_snap,
share_server=self.server)
self._driver._get_volume_snapshot.assert_called_once_with(
self._driver.admin_context, fake_vol_snap['id'])
self._driver.volume_api.delete_snapshot.assert_called_once_with(
self._driver.admin_context, fake_vol_snap2['id'])
self._driver.volume_api.get_snapshot.assert_called_once_with(
self._driver.admin_context, fake_vol_snap2['id'])
def test_ensure_share(self):
vol1 = 'fake_vol1'
vol2 = 'fake_vol2'
self._helper_nfs.create_export.return_value = 'fakelocation'
self.mock_object(self._driver, '_get_volume',
mock.Mock(return_value=vol1))
self.mock_object(self._driver, '_attach_volume',
mock.Mock(return_value=vol2))
self.mock_object(self._driver, '_mount_device')
self._driver.ensure_share(
self._context, self.share, share_server=self.server)
self._driver._get_volume.assert_called_once_with(
self._context, self.share['id'])
self._driver._attach_volume.assert_called_once_with(
self._context, self.share,
self.server['backend_details']['instance_id'], vol1)
self._driver._mount_device.assert_called_once_with(
self.share, self.server['backend_details'], vol2)
self._helper_nfs.create_export.assert_called_once_with(
self.server['backend_details'], self.share['name'], recreate=True)
def test_ensure_share_volume_is_absent(self):
self.mock_object(
self._driver, '_get_volume', mock.Mock(return_value=None))
self.mock_object(self._driver, '_attach_volume')
self._driver.ensure_share(
self._context, self.share, share_server=self.server)
self._driver._get_volume.assert_called_once_with(
self._context, self.share['id'])
self.assertFalse(self._driver._attach_volume.called)
def test_ensure_share_invalid_helper(self):
self._driver._helpers = {'CIFS': self._helper_cifs}
self.assertRaises(exception.InvalidShare, self._driver.ensure_share,
self._context, self.share, share_server=self.server)
@ddt.data(const.ACCESS_LEVEL_RW, const.ACCESS_LEVEL_RO)
def test_update_access(self, access_level):
# fakes
access_rules = [get_fake_access_rule('1.1.1.1', access_level),
get_fake_access_rule('2.2.2.2', access_level)]
add_rules = [get_fake_access_rule('2.2.2.2', access_level), ]
delete_rules = [get_fake_access_rule('3.3.3.3', access_level), ]
# run
self._driver.update_access(self._context, self.share, access_rules,
add_rules=add_rules,
delete_rules=delete_rules,
share_server=self.server)
# asserts
self._driver._helpers[self.share['share_proto']].\
update_access.assert_called_once_with(
self.server['backend_details'], self.share['name'],
access_rules, add_rules=add_rules, delete_rules=delete_rules)
@ddt.data(fake_share.fake_share(),
fake_share.fake_share(share_proto='NFSBOGUS'),
fake_share.fake_share(share_proto='CIFSBOGUS'))
def test__get_helper_with_wrong_proto(self, share):
self.assertRaises(exception.InvalidShare,
self._driver._get_helper, share)
def test__setup_server(self):
sim = self._driver.instance_manager
net_info = {
'server_id': 'fake',
'neutron_net_id': 'fake-net-id',
'neutron_subnet_id': 'fake-subnet-id',
}
self._driver.setup_server(net_info)
sim.set_up_service_instance.assert_called_once_with(
self._context, net_info)
def test__setup_server_revert(self):
def raise_exception(*args, **kwargs):
raise exception.ServiceInstanceException
net_info = {'server_id': 'fake',
'neutron_net_id': 'fake-net-id',
'neutron_subnet_id': 'fake-subnet-id'}
self.mock_object(self._driver.service_instance_manager,
'set_up_service_instance',
mock.Mock(side_effect=raise_exception))
self.assertRaises(exception.ServiceInstanceException,
self._driver.setup_server,
net_info)
def test__teardown_server(self):
server_details = {
'instance_id': 'fake_instance_id',
'subnet_id': 'fake_subnet_id',
'router_id': 'fake_router_id',
}
self._driver.teardown_server(server_details)
self._driver.service_instance_manager.delete_service_instance.\
assert_called_once_with(
self._driver.admin_context, server_details)
def test_ssh_exec_connection_not_exist(self):
ssh_conn_timeout = 30
CONF.set_default('ssh_conn_timeout', ssh_conn_timeout)
ssh_output = 'fake_ssh_output'
cmd = ['fake', 'command']
ssh = mock.Mock()
ssh.get_transport = mock.Mock()
ssh.get_transport().is_active = mock.Mock(return_value=True)
ssh_pool = mock.Mock()
ssh_pool.create = mock.Mock(return_value=ssh)
self.mock_object(utils, 'SSHPool', mock.Mock(return_value=ssh_pool))
self.mock_object(processutils, 'ssh_execute',
mock.Mock(return_value=ssh_output))
self._driver.ssh_connections = {}
result = self._driver._ssh_exec(self.server, cmd)
utils.SSHPool.assert_called_once_with(
self.server['ip'], 22, ssh_conn_timeout, self.server['username'],
self.server['password'], self.server['pk_path'], max_size=1)
ssh_pool.create.assert_called_once_with()
processutils.ssh_execute.assert_called_once_with(
ssh, 'fake command', check_exit_code=True)
ssh.get_transport().is_active.assert_called_once_with()
self.assertEqual(
self._driver.ssh_connections,
{self.server['instance_id']: (ssh_pool, ssh)}
)
self.assertEqual(ssh_output, result)
def test_ssh_exec_connection_exist(self):
ssh_output = 'fake_ssh_output'
cmd = ['fake', 'command']
ssh = mock.Mock()
ssh.get_transport = mock.Mock()
ssh.get_transport().is_active = mock.Mock(side_effect=lambda: True)
ssh_pool = mock.Mock()
self.mock_object(processutils, 'ssh_execute',
mock.Mock(return_value=ssh_output))
self._driver.ssh_connections = {
self.server['instance_id']: (ssh_pool, ssh)
}
result = self._driver._ssh_exec(self.server, cmd)
processutils.ssh_execute.assert_called_once_with(
ssh, 'fake command', check_exit_code=True)
ssh.get_transport().is_active.assert_called_once_with()
self.assertEqual(
self._driver.ssh_connections,
{self.server['instance_id']: (ssh_pool, ssh)}
)
self.assertEqual(ssh_output, result)
def test_ssh_exec_connection_recreation(self):
ssh_output = 'fake_ssh_output'
cmd = ['fake', 'command']
ssh = mock.Mock()
ssh.get_transport = mock.Mock()
ssh.get_transport().is_active = mock.Mock(side_effect=lambda: False)
ssh_pool = mock.Mock()
ssh_pool.create = mock.Mock(side_effect=lambda: ssh)
ssh_pool.remove = mock.Mock()
self.mock_object(processutils, 'ssh_execute',
mock.Mock(return_value=ssh_output))
self._driver.ssh_connections = {
self.server['instance_id']: (ssh_pool, ssh)
}
result = self._driver._ssh_exec(self.server, cmd)
processutils.ssh_execute.assert_called_once_with(
ssh, 'fake command', check_exit_code=True)
ssh.get_transport().is_active.assert_called_once_with()
ssh_pool.create.assert_called_once_with()
ssh_pool.remove.assert_called_once_with(ssh)
self.assertEqual(
self._driver.ssh_connections,
{self.server['instance_id']: (ssh_pool, ssh)}
)
self.assertEqual(ssh_output, result)
def test__ssh_exec_check_list_comprehensions_still_work(self):
ssh_output = 'fake_ssh_output'
cmd = ['fake', 'command spaced']
ssh = mock.Mock()
ssh_pool = mock.Mock()
ssh_pool.create = mock.Mock(side_effect=lambda: ssh)
ssh_pool.remove = mock.Mock()
self.mock_object(processutils, 'ssh_execute',
mock.Mock(return_value=ssh_output))
self._driver.ssh_connections = {
self.server['instance_id']: (ssh_pool, ssh)
}
self._driver._ssh_exec(self.server, cmd)
processutils.ssh_execute.assert_called_once_with(
ssh, 'fake "command spaced"', check_exit_code=True)
def test_get_share_stats_refresh_false(self):
self._driver._stats = {'fake_key': 'fake_value'}
result = self._driver.get_share_stats(False)
self.assertEqual(self._driver._stats, result)
def test_get_share_stats_refresh_true(self):
fake_stats = {'fake_key': 'fake_value'}
self._driver._stats = fake_stats
expected_keys = [
'qos', 'driver_version', 'share_backend_name',
'free_capacity_gb', 'total_capacity_gb',
'driver_handles_share_servers',
'reserved_percentage', 'vendor_name', 'storage_protocol',
]
result = self._driver.get_share_stats(True)
self.assertNotEqual(fake_stats, result)
for key in expected_keys:
self.assertIn(key, result)
self.assertTrue(result['driver_handles_share_servers'])
self.assertEqual('Open Source', result['vendor_name'])
def _setup_manage_mocks(self,
get_share_type_extra_specs='False',
is_device_mounted=True,
server_details=None):
CONF.set_default('driver_handles_share_servers', False)
self.mock_object(share_types, 'get_share_type_extra_specs',
mock.Mock(return_value=get_share_type_extra_specs))
self.mock_object(self._driver, '_is_device_mounted',
mock.Mock(return_value=is_device_mounted))
self.mock_object(self._driver, 'service_instance_manager')
server = {'backend_details': server_details}
self.mock_object(self._driver.service_instance_manager,
'get_common_server',
mock.Mock(return_value=server))
def test_manage_invalid_protocol(self):
share = {'share_proto': 'fake_proto'}
self._setup_manage_mocks()
self.assertRaises(exception.InvalidShare,
self._driver.manage_existing, share, {})
def test_manage_not_mounted_share(self):
share = get_fake_manage_share()
fake_path = '/foo/bar'
self._setup_manage_mocks(is_device_mounted=False)
self.mock_object(
self._driver._helpers[share['share_proto']],
'get_share_path_by_export_location',
mock.Mock(return_value=fake_path))
self.assertRaises(exception.ManageInvalidShare,
self._driver.manage_existing, share, {})
self.assertEqual(
1,
self._driver.service_instance_manager.get_common_server.call_count)
self._driver._is_device_mounted.assert_called_once_with(
fake_path, None)
self._driver._helpers[share['share_proto']].\
get_share_path_by_export_location.assert_called_once_with(
None, share['export_locations'][0]['path'])
def test_manage_share_not_attached_to_cinder_volume_invalid_size(self):
share = get_fake_manage_share()
server_details = {}
fake_path = '/foo/bar'
self._setup_manage_mocks(server_details=server_details)
self.mock_object(self._driver, '_get_volume',
mock.Mock(return_value=None))
error = exception.ManageInvalidShare(reason="fake")
self.mock_object(
self._driver, '_get_mounted_share_size',
mock.Mock(side_effect=error))
self.mock_object(
self._driver._helpers[share['share_proto']],
'get_share_path_by_export_location',
mock.Mock(return_value=fake_path))
self.assertRaises(exception.ManageInvalidShare,
self._driver.manage_existing, share, {})
self._driver._get_mounted_share_size.assert_called_once_with(
fake_path, server_details)
self._driver._helpers[share['share_proto']].\
get_share_path_by_export_location.assert_called_once_with(
server_details, share['export_locations'][0]['path'])
def test_manage_share_not_attached_to_cinder_volume(self):
share = get_fake_manage_share()
share_size = "fake"
fake_path = '/foo/bar'
fake_exports = ['foo', 'bar']
server_details = {}
self._setup_manage_mocks(server_details=server_details)
self.mock_object(self._driver, '_get_volume')
self.mock_object(self._driver, '_get_mounted_share_size',
mock.Mock(return_value=share_size))
self.mock_object(
self._driver._helpers[share['share_proto']],
'get_share_path_by_export_location',
mock.Mock(return_value=fake_path))
self.mock_object(
self._driver._helpers[share['share_proto']],
'get_exports_for_share',
mock.Mock(return_value=fake_exports))
result = self._driver.manage_existing(share, {})
self.assertEqual(
{'size': share_size, 'export_locations': fake_exports}, result)
self._driver._helpers[share['share_proto']].get_exports_for_share.\
assert_called_once_with(
server_details, share['export_locations'][0]['path'])
self._driver._helpers[share['share_proto']].\
get_share_path_by_export_location.assert_called_once_with(
server_details, share['export_locations'][0]['path'])
self._driver._get_mounted_share_size.assert_called_once_with(
fake_path, server_details)
self.assertFalse(self._driver._get_volume.called)
def test_manage_share_attached_to_cinder_volume_not_found(self):
share = get_fake_manage_share()
server_details = {}
driver_options = {'volume_id': 'fake'}
self._setup_manage_mocks(server_details=server_details)
self.mock_object(
self._driver.volume_api, 'get',
mock.Mock(side_effect=exception.VolumeNotFound(volume_id="fake"))
)
self.assertRaises(exception.ManageInvalidShare,
self._driver.manage_existing, share, driver_options)
self._driver.volume_api.get.assert_called_once_with(
mock.ANY, driver_options['volume_id'])
def test_manage_share_attached_to_cinder_volume_not_mounted_to_srv(self):
share = get_fake_manage_share()
server_details = {'instance_id': 'fake'}
driver_options = {'volume_id': 'fake'}
volume = {'id': 'fake'}
self._setup_manage_mocks(server_details=server_details)
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(return_value=volume))
self.mock_object(self._driver.compute_api, 'instance_volumes_list',
mock.Mock(return_value=[]))
self.assertRaises(exception.ManageInvalidShare,
self._driver.manage_existing, share, driver_options)
self._driver.volume_api.get.assert_called_once_with(
mock.ANY, driver_options['volume_id'])
self._driver.compute_api.instance_volumes_list.assert_called_once_with(
mock.ANY, server_details['instance_id'])
def test_manage_share_attached_to_cinder_volume(self):
share = get_fake_manage_share()
fake_size = 'foobar'
fake_exports = ['foo', 'bar']
server_details = {'instance_id': 'fake'}
driver_options = {'volume_id': 'fake'}
volume = {'id': 'fake', 'name': 'fake_volume_1', 'size': fake_size}
self._setup_manage_mocks(server_details=server_details)
self.mock_object(self._driver.volume_api, 'get',
mock.Mock(return_value=volume))
self._driver.volume_api.update = mock.Mock()
fake_volume = mock.Mock()
fake_volume.id = 'fake'
self.mock_object(self._driver.compute_api, 'instance_volumes_list',
mock.Mock(return_value=[fake_volume]))
self.mock_object(
self._driver._helpers[share['share_proto']],
'get_exports_for_share',
mock.Mock(return_value=fake_exports))
result = self._driver.manage_existing(share, driver_options)
self.assertEqual(
{'size': fake_size, 'export_locations': fake_exports}, result)
self._driver._helpers[share['share_proto']].get_exports_for_share.\
assert_called_once_with(
server_details, share['export_locations'][0]['path'])
expected_volume_update = {
'name': self._driver._get_volume_name(share['id'])
}
self._driver.volume_api.update.assert_called_once_with(
mock.ANY, volume['id'], expected_volume_update)
self.fake_private_storage.update.assert_called_once_with(
share['id'], {'volume_id': volume['id']}
)
def test_get_mounted_share_size(self):
output = ("Filesystem blocks Used Available Capacity Mounted on\n"
"/dev/fake 1G 1G 1G 4% /shares/share-fake")
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=(output, '')))
actual_result = self._driver._get_mounted_share_size('/fake/path', {})
self.assertEqual(1, actual_result)
@ddt.data("fake\nfake\n", "fake", "fake\n")
def test_get_mounted_share_size_invalid_output(self, output):
self.mock_object(self._driver, '_ssh_exec',
mock.Mock(return_value=(output, '')))
self.assertRaises(exception.ManageInvalidShare,
self._driver._get_mounted_share_size,
'/fake/path', {})
def test_get_consumed_space(self):
mount_path = "fake_path"
server_details = {}
index = 2
valid_result = 1
self.mock_object(self._driver, '_get_mount_stats_by_index',
mock.Mock(return_value=valid_result * 1024))
actual_result = self._driver._get_consumed_space(
mount_path, server_details)
self.assertEqual(valid_result, actual_result)
self._driver._get_mount_stats_by_index.assert_called_once_with(
mount_path, server_details, index, block_size='M'
)
def test_get_consumed_space_invalid(self):
self.mock_object(
self._driver,
'_get_mount_stats_by_index',
mock.Mock(side_effect=exception.ManilaException("fake"))
)
self.assertRaises(
exception.InvalidShare,
self._driver._get_consumed_space,
"fake", "fake"
)
def test_extend_share(self):
fake_volume = "fake"
fake_share = {
'id': 'fake',
'share_proto': 'NFS',
'name': 'test_share',
}
new_size = 123
srv_details = self.server['backend_details']
self.mock_object(
self._driver.service_instance_manager,
'get_common_server',
mock.Mock(return_value=self.server)
)
self.mock_object(self._driver, '_unmount_device')
self.mock_object(self._driver, '_detach_volume')
self.mock_object(self._driver, '_extend_volume')
self.mock_object(self._driver, '_attach_volume')
self.mock_object(self._driver, '_mount_device')
self.mock_object(self._driver, '_resize_filesystem')
self.mock_object(
self._driver, '_get_volume',
mock.Mock(return_value=fake_volume)
)
CONF.set_default('driver_handles_share_servers', False)
self._driver.extend_share(fake_share, new_size)
self.assertTrue(
self._driver.service_instance_manager.get_common_server.called)
self._driver._unmount_device.assert_called_once_with(
fake_share, srv_details)
self._driver._detach_volume.assert_called_once_with(
mock.ANY, fake_share, srv_details)
self._driver._get_volume.assert_called_once_with(
mock.ANY, fake_share['id'])
self._driver._extend_volume.assert_called_once_with(
mock.ANY, fake_volume, new_size)
self._driver._attach_volume.assert_called_once_with(
mock.ANY, fake_share, srv_details['instance_id'], mock.ANY)
self._helper_nfs.disable_access_for_maintenance.\
assert_called_once_with(srv_details, 'test_share')
self._helper_nfs.restore_access_after_maintenance.\
assert_called_once_with(srv_details, 'test_share')
self.assertTrue(self._driver._resize_filesystem.called)
def test_extend_volume(self):
fake_volume = {'id': 'fake'}
new_size = 123
self.mock_object(self._driver.volume_api, 'extend')
self.mock_object(self._driver, '_wait_for_available_volume')
self._driver._extend_volume(self._context, fake_volume, new_size)
self._driver.volume_api.extend.assert_called_once_with(
self._context, fake_volume['id'], new_size
)
self._driver._wait_for_available_volume.assert_called_once_with(
fake_volume, mock.ANY, msg_timeout=mock.ANY, msg_error=mock.ANY,
expected_size=new_size
)
def test_resize_filesystem(self):
fake_server_details = {'fake': 'fake'}
fake_volume = {'mountpoint': '/dev/fake'}
self.mock_object(self._driver, '_ssh_exec')
self._driver._resize_filesystem(
fake_server_details, fake_volume, new_size=123)
self._driver._ssh_exec.assert_any_call(
fake_server_details, ['sudo', 'fsck', '-pf', '/dev/fake'])
self._driver._ssh_exec.assert_any_call(
fake_server_details,
['sudo', 'resize2fs', '/dev/fake', "%sG" % 123]
)
self.assertEqual(2, self._driver._ssh_exec.call_count)
@ddt.data(
{
'source': processutils.ProcessExecutionError(
stderr="resize2fs: New size smaller than minimum (123456)"),
'target': exception.Invalid
},
{
'source': processutils.ProcessExecutionError(stderr="fake_error"),
'target': exception.ManilaException
}
)
@ddt.unpack
def test_resize_filesystem_invalid_new_size(self, source, target):
fake_server_details = {'fake': 'fake'}
fake_volume = {'mountpoint': '/dev/fake'}
ssh_mock = mock.Mock(side_effect=["fake", source])
self.mock_object(self._driver, '_ssh_exec', ssh_mock)
self.assertRaises(
target,
self._driver._resize_filesystem,
fake_server_details, fake_volume, new_size=123
)
def test_shrink_share_invalid_size(self):
fake_share = {'id': 'fake', 'export_locations': [{'path': 'test'}]}
new_size = 123
self.mock_object(
self._driver.service_instance_manager,
'get_common_server',
mock.Mock(return_value=self.server)
)
self.mock_object(self._driver, '_get_helper')
self.mock_object(self._driver, '_get_consumed_space',
mock.Mock(return_value=200))
CONF.set_default('driver_handles_share_servers', False)
self.assertRaises(
exception.ShareShrinkingPossibleDataLoss,
self._driver.shrink_share,
fake_share,
new_size
)
self._driver._get_helper.assert_called_once_with(fake_share)
self._driver._get_consumed_space.assert_called_once_with(
mock.ANY, self.server['backend_details'])
def _setup_shrink_mocks(self):
share = {'id': 'fake', 'export_locations': [{'path': 'test'}],
'name': 'fake'}
volume = {'id': 'fake'}
new_size = 123
server_details = self.server['backend_details']
self.mock_object(
self._driver.service_instance_manager,
'get_common_server',
mock.Mock(return_value=self.server)
)
helper = mock.Mock()
self.mock_object(self._driver, '_get_helper',
mock.Mock(return_value=helper))
self.mock_object(self._driver, '_get_consumed_space',
mock.Mock(return_value=100))
self.mock_object(self._driver, '_get_volume',
mock.Mock(return_value=volume))
self.mock_object(self._driver, '_unmount_device')
self.mock_object(self._driver, '_mount_device')
CONF.set_default('driver_handles_share_servers', False)
return share, volume, new_size, server_details, helper
@ddt.data({'source': exception.Invalid("fake"),
'target': exception.ShareShrinkingPossibleDataLoss},
{'source': exception.ManilaException("fake"),
'target': exception.Invalid})
@ddt.unpack
def test_shrink_share_error_on_resize_fs(self, source, target):
share, vol, size, server_details, _ = self._setup_shrink_mocks()
resize_mock = mock.Mock(side_effect=source)
self.mock_object(self._driver, '_resize_filesystem', resize_mock)
self.assertRaises(target, self._driver.shrink_share, share, size)
resize_mock.assert_called_once_with(server_details, vol,
new_size=size)
def test_shrink_share(self):
share, vol, size, server_details, helper = self._setup_shrink_mocks()
self.mock_object(self._driver, '_resize_filesystem')
self._driver.shrink_share(share, size)
self._driver._get_helper.assert_called_once_with(share)
self._driver._get_consumed_space.assert_called_once_with(
mock.ANY, server_details)
self._driver._get_volume.assert_called_once_with(mock.ANY, share['id'])
self._driver._unmount_device.assert_called_once_with(share,
server_details)
self._driver._resize_filesystem(
server_details, vol, new_size=size)
self._driver._mount_device(share, server_details, vol)
self.assertTrue(helper.disable_access_for_maintenance.called)
self.assertTrue(helper.restore_access_after_maintenance.called)
@ddt.data({'share_servers': [], 'result': None},
{'share_servers': None, 'result': None},
{'share_servers': ['fake'], 'result': 'fake'},
{'share_servers': ['fake', 'test'], 'result': 'fake'})
@ddt.unpack
def tests_choose_share_server_compatible_with_share(self, share_servers,
result):
fake_share = "fake"
actual_result = self._driver.choose_share_server_compatible_with_share(
self._context, share_servers, fake_share
)
self.assertEqual(result, actual_result)
@ddt.data({'consistency_group': {'share_server_id': 'fake'},
'result': {'id': 'fake'}},
{'consistency_group': None, 'result': {'id': 'fake'}},
{'consistency_group': {'share_server_id': 'test'},
'result': {'id': 'test'}})
@ddt.unpack
def tests_choose_share_server_compatible_with_share_and_cg(
self, consistency_group, result):
share_servers = [{'id': 'fake'}, {'id': 'test'}]
fake_share = "fake"
actual_result = self._driver.choose_share_server_compatible_with_share(
self._context, share_servers, fake_share,
consistency_group=consistency_group
)
self.assertEqual(result, actual_result)
def test_create_consistency_group(self):
FAKE_SNAP_DICT = get_fake_snap_dict()
result = self._driver.create_consistency_group(
self._context, FAKE_SNAP_DICT, share_server=self.server)
self.assertEqual(1, self.mock_debug_log.call_count)
self.assertEqual(1, self.mock_warning_log.call_count)
self.assertIsNone(result)
def test_delete_consistency_group(self):
FAKE_SNAP_DICT = get_fake_snap_dict()
result = self._driver.delete_consistency_group(
self._context, FAKE_SNAP_DICT, share_server=self.server)
self.assertEqual(1, self.mock_debug_log.call_count)
self.assertIsNone(result)
def test_create_cgsnapshot_no_cg_members(self):
FAKE_SNAP_DICT = dict(get_fake_snap_dict(), cgsnapshot_members=[])
mock_snapshot_creation = self.mock_object(generic.GenericShareDriver,
'create_snapshot')
result = self._driver.create_cgsnapshot(
self._context, FAKE_SNAP_DICT, share_server=self.server)
self.assertEqual(1, self.mock_debug_log.call_count)
self.assertEqual(2, self.mock_warning_log.call_count)
self.assertFalse(mock_snapshot_creation.called)
self.assertEqual((None, None), result)
@ddt.data(
{
'delete_snap_side_effect': None,
'expected_error_log_call_count': 0,
},
{
'delete_snap_side_effect': exception.ManilaException,
'expected_error_log_call_count': 1,
}
)
@ddt.unpack
def test_create_cgsnapshot_manila_exception_on_create_and_delete(
self, delete_snap_side_effect, expected_error_log_call_count):
FAKE_SNAP_DICT = get_fake_snap_dict()
# Append another fake share
FAKE_SHARE = dict(FAKE_SNAP_DICT['cgsnapshot_members'][0])
FAKE_SNAP_DICT['cgsnapshot_members'].append(FAKE_SHARE)
self.mock_object(generic.GenericShareDriver,
'create_snapshot',
mock.Mock(side_effect=[
None,
exception.ManilaException,
]))
self.mock_object(generic.GenericShareDriver,
'delete_snapshot',
mock.Mock(side_effect=delete_snap_side_effect))
self.assertRaises(exception.ManilaException,
self._driver.create_cgsnapshot,
self._context, FAKE_SNAP_DICT,
share_server=self.server)
self.assertEqual(2, self.mock_debug_log.call_count)
self.assertEqual(1, self.mock_warning_log.call_count)
self.assertEqual(1, self.mock_exception_log.call_count)
self.assertEqual(expected_error_log_call_count,
self.mock_error_log.call_count)
def test_create_cgsnapshot(self):
FAKE_SNAP_DICT = get_fake_snap_dict()
FAKE_SHARE_SNAPSHOT = {
'share_id': 'e14b5174-e534-4f35-bc4f-fe81c1575d6f',
'id': '03e2f06e-14f2-45a5-9631-0949d1937bd8',
}
mock_snapshot_creation = self.mock_object(generic.GenericShareDriver,
'create_snapshot')
result = self._driver.create_cgsnapshot(
self._context, FAKE_SNAP_DICT, share_server=self.server)
mock_snapshot_creation.assert_called_once_with(self._context,
FAKE_SHARE_SNAPSHOT,
self.server)
self.assertEqual(2, self.mock_debug_log.call_count)
self.assertEqual(1, self.mock_warning_log.call_count)
self.assertFalse(self.mock_error_log.called)
self.assertEqual((None, None), result)
def test_delete_cgsnapshot_manila_exception(self):
FAKE_SNAP_DICT = get_fake_snap_dict()
self.mock_object(generic.GenericShareDriver,
'delete_snapshot',
mock.Mock(side_effect=exception.ManilaException))
self.assertRaises(exception.ManilaException,
self._driver.delete_cgsnapshot,
self._context, FAKE_SNAP_DICT,
share_server=self.server)
self.assertEqual(1, self.mock_error_log.call_count)
def test_delete_cgsnapshot(self):
FAKE_SNAP_DICT = get_fake_snap_dict()
FAKE_SHARE_SNAPSHOT = {
'share_id': 'e14b5174-e534-4f35-bc4f-fe81c1575d6f',
'id': '03e2f06e-14f2-45a5-9631-0949d1937bd8',
}
mock_snapshot_creation = self.mock_object(generic.GenericShareDriver,
'delete_snapshot')
result = self._driver.delete_cgsnapshot(
self._context, FAKE_SNAP_DICT, share_server=self.server)
mock_snapshot_creation.assert_called_once_with(self._context,
FAKE_SHARE_SNAPSHOT,
self.server)
self.assertEqual(2, self.mock_debug_log.call_count)
self.assertEqual((None, None), result)
def test_create_consistency_group_from_cgsnapshot_no_members(self):
FAKE_CG_DICT = get_fake_cg_dict()
FAKE_CGSNAP_DICT = dict(get_fake_snap_dict(), cgsnapshot_members=[])
mock_share_creation = self.mock_object(generic.GenericShareDriver,
'create_share_from_snapshot')
result = self._driver.create_consistency_group_from_cgsnapshot(
self._context, FAKE_CG_DICT, FAKE_CGSNAP_DICT,
share_server=self.server)
self.assertFalse(self.mock_debug_log.called)
self.assertFalse(mock_share_creation.called)
self.assertEqual((None, None), result)
def test_create_consistency_group_from_cgsnapshot(self):
FAKE_CG_DICT = get_fake_cg_dict()
FAKE_CGSNAP_DICT = get_fake_snap_dict()
FAKE_COLLATED_INFO = get_fake_collated_cg_snap_info()
FAKE_SHARE_UPDATE_LIST = [
{
'id': '02a32f06e-14f2-45a5-9631-7483f1937bd8',
'export_locations': 'xyzzy',
}
]
self.mock_object(generic.GenericShareDriver,
'_collate_cg_snapshot_info',
mock.Mock(return_value=FAKE_COLLATED_INFO))
mock_share_creation = self.mock_object(generic.GenericShareDriver,
'create_share_from_snapshot',
mock.Mock(return_value='xyzzy'))
result = self._driver.create_consistency_group_from_cgsnapshot(
self._context, FAKE_CG_DICT, FAKE_CGSNAP_DICT,
share_server=self.server)
self.assertEqual((None, FAKE_SHARE_UPDATE_LIST), result)
self.assertEqual(1, self.mock_debug_log.call_count)
mock_share_creation.assert_called_once_with(
self._context,
FAKE_COLLATED_INFO[0]['share'],
FAKE_COLLATED_INFO[0]['snapshot'],
share_server=self.server
)
def test_collate_cg_snapshot_info_invalid_cg(self):
FAKE_CG_DICT = get_fake_cg_dict()
FAKE_CGSNAP_DICT = dict(get_fake_snap_dict(), cgsnapshot_members=[])
self.assertRaises(exception.InvalidConsistencyGroup,
self._driver._collate_cg_snapshot_info,
FAKE_CG_DICT,
FAKE_CGSNAP_DICT)
def test_collate_cg_snapshot(self):
FAKE_CG_DICT = get_fake_cg_dict()
FAKE_CGSNAP_DICT = get_fake_snap_dict()
FAKE_COLLATED_INFO = get_fake_collated_cg_snap_info()
result = self._driver._collate_cg_snapshot_info(
FAKE_CG_DICT, FAKE_CGSNAP_DICT)
self.assertEqual(FAKE_COLLATED_INFO, result)
def test_manage_snapshot_not_found(self):
snapshot_instance = {'id': 'snap_instance_id',
'provider_location': 'vol_snap_id'}
driver_options = {}
self.mock_object(
self._driver.volume_api, 'get_snapshot',
mock.Mock(side_effect=exception.VolumeSnapshotNotFound(
snapshot_id='vol_snap_id')))
self.assertRaises(exception.ManageInvalidShareSnapshot,
self._driver.manage_existing_snapshot,
snapshot_instance,
driver_options)
self._driver.volume_api.get_snapshot.assert_called_once_with(
self._context, 'vol_snap_id')
def test_manage_snapshot_valid(self):
snapshot_instance = {'id': 'snap_instance_id',
'provider_location': 'vol_snap_id'}
volume_snapshot = {'id': 'vol_snap_id', 'size': 1}
self.mock_object(self._driver.volume_api, 'get_snapshot',
mock.Mock(return_value=volume_snapshot))
ret_manage = self._driver.manage_existing_snapshot(
snapshot_instance, {})
self.assertEqual({'provider_location': 'vol_snap_id',
'size': 1}, ret_manage)
self._driver.volume_api.get_snapshot.assert_called_once_with(
self._context, 'vol_snap_id')
def test_unmanage_snapshot(self):
snapshot_instance = {'id': 'snap_instance_id',
'provider_location': 'vol_snap_id'}
self.mock_object(self._driver.private_storage, 'delete')
self._driver.unmanage_snapshot(snapshot_instance)
self._driver.private_storage.delete.assert_called_once_with(
'snap_instance_id')
@generic.ensure_server
def fake(driver_instance, context, share_server=None):
return share_server
@ddt.ddt
class GenericDriverEnsureServerTestCase(test.TestCase):
def setUp(self):
super(GenericDriverEnsureServerTestCase, self).setUp()
self._context = context.get_admin_context()
self.server = {'id': 'fake_id', 'backend_details': {'foo': 'bar'}}
self.dhss_false = type(
'Fake', (object,), {'driver_handles_share_servers': False})
self.dhss_true = type(
'Fake', (object,), {'driver_handles_share_servers': True})
def test_share_servers_are_not_handled_server_not_provided(self):
self.dhss_false.service_instance_manager = mock.Mock()
self.dhss_false.service_instance_manager.get_common_server = (
mock.Mock(return_value=self.server))
self.dhss_false.service_instance_manager.ensure_service_instance = (
mock.Mock(return_value=True))
actual = fake(self.dhss_false, self._context)
self.assertEqual(self.server, actual)
self.dhss_false.service_instance_manager.\
get_common_server.assert_called_once_with()
self.dhss_false.service_instance_manager.ensure_service_instance.\
assert_called_once_with(
self._context, self.server['backend_details'])
@ddt.data({'id': 'without_details'},
{'id': 'with_details', 'backend_details': {'foo': 'bar'}})
def test_share_servers_are_not_handled_server_provided(self, server):
self.assertRaises(
exception.ManilaException,
fake, self.dhss_false, self._context, share_server=server)
def test_share_servers_are_handled_server_provided(self):
self.dhss_true.service_instance_manager = mock.Mock()
self.dhss_true.service_instance_manager.ensure_service_instance = (
mock.Mock(return_value=True))
actual = fake(self.dhss_true, self._context, share_server=self.server)
self.assertEqual(self.server, actual)
self.dhss_true.service_instance_manager.ensure_service_instance.\
assert_called_once_with(
self._context, self.server['backend_details'])
def test_share_servers_are_handled_invalid_server_provided(self):
server = {'id': 'without_details'}
self.assertRaises(
exception.ManilaException,
fake, self.dhss_true, self._context, share_server=server)
def test_share_servers_are_handled_server_not_provided(self):
self.assertRaises(
exception.ManilaException, fake, self.dhss_true, self._context)
| apache-2.0 |
sparkfy/sparkfy | sparkfy-network/src/main/java/com/github/sparkfy/network/server/MessageHandler.java | 1626 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.sparkfy.network.server;
import com.github.sparkfy.network.protocol.Message;
/**
* Handles either request or response messages coming off of Netty. A MessageHandler instance
* is associated with a single Netty Channel (though it may have multiple clients on the same
* Channel.)
*/
public abstract class MessageHandler<T extends Message> {
/** Handles the receipt of a single message. */
public abstract void handle(T message) throws Exception;
/** Invoked when the channel this MessageHandler is on is active. */
public abstract void channelActive();
/** Invoked when an exception was caught on the Channel. */
public abstract void exceptionCaught(Throwable cause);
/** Invoked when the channel this MessageHandler is on is inactive. */
public abstract void channelInactive();
}
| apache-2.0 |
gchq/stroom | stroom-ui/src/components/IndexVolumes/indexVolumeGroupApi/useIndexVolumeGroup.ts | 632 | import * as React from "react";
import useApi from "./useApi";
import { IndexVolumeGroup } from "./types";
interface UseIndexVolumeGroup {
indexVolumeGroup: IndexVolumeGroup | undefined;
}
const useIndexVolumeGroup = (groupName: string): UseIndexVolumeGroup => {
const [indexVolumeGroup, setIndexVolumeGroup] =
React.useState<IndexVolumeGroup | undefined>(undefined);
const { getIndexVolumeGroup } = useApi();
React.useEffect(() => {
getIndexVolumeGroup(groupName).then(setIndexVolumeGroup);
}, [getIndexVolumeGroup, groupName]);
return {
indexVolumeGroup,
};
};
export default useIndexVolumeGroup;
| apache-2.0 |
SimLQ/coolweather | app/src/main/java/com/coolweather/android/db/County.java | 777 | package com.coolweather.android.db;
/**
* Created by C515 on 2017/5/11.
*/
public class County extends DataSupport {
private int id;
private String countyName;
private String weatherId;
private int cityId;
public int getId(){
return id;
}
public void setId(int id){
this.id=id;
}
public String getCountyName(){
return countyName;
}
public void setCountyName(String countyName){
this.countyName=countyName;
}
public String getWeatherId(){
return weatherId;
}
public void setWeatherId(String weatherId){
this.weatherId=weatherId;
}
public int getCityId(){
return cityId;
}
public void setCityId(int cityId){
this.cityId=cityId;
}
}
| apache-2.0 |
Qi4j/qi4j-sdk | tools/generator-polygene/app/index.js | 20225 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
var generators = require('yeoman-generator');
var fs = require('fs');
var polygeneVersion = require(__dirname + '/../package.json').polygene_version;
var polygene = {};
module.exports = generators.Base.extend(
{
constructor: function () {
console.log("WARNING!!! This is BETA quality and likely to change drastically over time. "); // + JSON.stringify(arguments));
generators.Base.apply(this, arguments);
this.option('import-model', {
name: "import",
desc: 'Reads a model file and creates the domain model for it.',
type: String,
default: "./model.json",
hide: false
});
this.option('export-model', {
name: "export",
desc: 'Writes the model of the application into a json file.',
type: String,
default: "exported-model",
hide: false
});
this.option('noPrompt', {
name: "noPrompt",
desc: 'If specified, the interactive prompts will be disabled.',
type: Boolean,
default: false,
hide: false
});
if (this.options.import) {
polygene = importModel(this.options.import);
polygene.name = polygene.name ? polygene.name : firstUpper(this.appname);
polygene.packageName = polygene.packageName ? polygene.packageName : ("com.acme." + this.appname);
polygene.applicationtype = "Rest API";
polygene.features = polygene.features ? polygene.features : [];
polygene.modules = polygene.modules ? polygene.modules : {};
polygene.indexing = polygene.indexing ? polygene.indexing : null;
polygene.entitystore = polygene.entitystore ? polygene.entitystore : null;
polygene.caching = polygene.caching ? polygene.caching : null;
polygene.dbpool = polygene.dbpool === undefined ? "DBCP" : answers.dbpool;
}
},
prompting: function () {
if (this.options.noPrompt) {
return this.prompt([]);
}
else {
return this.prompt(
[
{
type: 'input',
name: 'name',
message: 'Your project name',
default: polygene.name ? polygene.name : firstUpper(this.appname)
},
{
type: 'input',
name: 'packageName',
message: 'Java package name',
default: polygene.packageName ? polygene.packageName : "com.acme"
},
{
type: 'list',
name: 'applicationtype',
choices: [
'Command Line',
// 'Web Application',
'Rest API'
],
message: 'what type of application do you want to create?',
default: polygene.applicationtype ? polygene.applicationtype : "Rest API"
},
{
type: 'list',
name: 'entitystore',
choices: [
'BerkeleyDB',
'Cassandra',
'File',
'DerbySQL',
'Geode',
'H2SQL',
'Hazelcast',
'JClouds',
'Jdbm',
'LevelDB',
'Memory',
'MongoDB',
'MySQL',
'Preferences',
'Redis',
'Riak',
'PostgreSQL',
'SQLite'
],
message: 'Which entity store do you want to use?',
default: polygene.entitystore ? polygene.entitystore : "Memory"
},
{
type: 'list',
name: 'dbpool',
choices: [
'BoneCP',
'DBCP'
],
message: 'Which connection pool do you want to use?',
default: polygene.dbpool ? polygene.dbpool : "DBCP",
when: function (answers) {
return answers.entitystore.indexOf('SQL') > -1;
}
},
{
type: 'list',
name: 'indexing',
choices: [
'Rdf',
'ElasticSearch',
'Solr',
'SQL'
],
message: 'Which indexing system do you want to use?',
default: polygene.indexing ? polygene.indexing : "Rdf"
},
{
type: 'list',
name: 'caching',
choices: [
'None',
'Memcache',
'EhCache'
],
message: 'Which caching system do you want to use?',
default: polygene.caching ? polygene.caching : "None"
},
{
type: 'list',
name: 'metrics',
choices: [
'None',
'Codahale'
],
message: 'Which metrics capturing system do you want to use?',
default: polygene.metrics ? polygene.metrics : "None"
},
{
type: 'checkbox',
name: 'features',
choices: [
// 'alarms'
// 'circuit breakers'
'envisage',
// 'file transactions'
// 'logging'
'jmx',
// 'spring integration'
// 'scheduling'
'mixin scripting',
'security'
// ,'version migration'
],
message: 'Other features?',
default: polygene.features ? polygene.features : []
}
]
).then(function (answers) {
this.log('app name', answers.name);
this.log('Entity Stores:', answers.entitystore);
this.log('Indexing:', answers.indexing);
this.log('Caching:', answers.caching);
this.log('Metrics:', answers.metrics);
this.log('Features:', answers.features);
polygene.name = answers.name;
polygene.packageName = answers.packageName;
polygene.applicationtype = answers.applicationtype;
polygene.features = answers.features;
polygene.indexing = answers.indexing;
polygene.entitystore = answers.entitystore;
polygene.dbpool = answers.dbpool === undefined ? "DBCP" : answers.dbpool;
polygene.metrics = answers.metrics;
polygene.caching = answers.caching;
}.bind(this)
);
}
},
writing: function () {
try {
polygene.version = polygeneVersion;
polygene.entitystoremodule = polygene.entitystore.toLowerCase();
if (polygene.entitystore === "DerbySQL") {
polygene.entitystoremodule = "sql";
}
if (polygene.entitystore === "H2SQL") {
polygene.entitystoremodule = "sql";
}
if (polygene.entitystore === "MySQL") {
polygene.entitystoremodule = "sql";
}
if (polygene.entitystore === "PostgreSQL") {
polygene.entitystoremodule = "sql";
}
if (polygene.entitystore === "SQLite") {
polygene.entitystoremodule = "sql";
}
assignFunctions(polygene);
polygene.javaPackageDir = polygene.packageName.replace(/[.]/g, '/');
polygene.ctx = this;
var app = require(__dirname + '/templates/' + polygene.applicationtype.replace(/ /g, '') + 'Application/app.js');
app.write(polygene);
var buildToolChain = require(__dirname + '/templates/buildtool/build.js');
buildToolChain.write(polygene);
if (this.options.export) {
exportModel(this.options.export);
}
} catch (exception) {
console.log(exception);
throw exception;
}
}
}
);
function hasFeature(feature) {
return polygene.features.indexOf(feature) >= 0;
}
function firstUpper(text) {
return text.charAt(0).toUpperCase() + text.substring(1);
}
function importModel(filename) {
if (typeof filename !== 'string') {
filename = "./model.json";
}
return JSON.parse(fs.readFileSync(filename, 'utf8'));
}
function exportModel(filename) {
if (typeof filename !== 'string') {
filename = "exported-model.json";
}
delete polygene.current;
return fs.writeFileSync(filename, JSON.stringify(polygene, null, 4) + "\n", 'utf8');
}
function assignFunctions(polygene) {
polygene.hasFeature = function (feature) {
return polygene.features.indexOf(feature) >= 0;
};
polygene.copyToConfig = function (ctx, from, toName) {
polygene.copyTemplate(ctx,
from,
'app/src/dist/config/development/' + toName);
polygene.copyTemplate(ctx,
from,
'app/src/dist/config/qa/' + toName);
polygene.copyTemplate(ctx,
from,
'app/src/dist/config/staging/' + toName);
polygene.copyTemplate(ctx,
from,
'app/src/dist/config/production/' + toName);
polygene.copyTemplate(ctx,
from,
'app/src/test/resources/' + toName);
};
polygene.copyTemplate = function (ctx, from, to) {
try {
ctx.fs.copyTpl(
ctx.templatePath(from),
ctx.destinationPath(to),
{
hasFeature: hasFeature,
firstUpper: firstUpper,
polygene: polygene
}
);
} catch (exception) {
console.log("Unable to copy template: " + from + "\n", exception);
}
};
polygene.copyBinary = function (ctx, from, to) {
try {
ctx.fs.copy(
ctx.templatePath(from),
ctx.destinationPath(to));
} catch (exception) {
console.log("Unable to copy binary: " + from + " to " + to + "\n", exception);
}
};
polygene.copyPolygeneBootstrap = function (ctx, layer, moduleName, condition) {
if (condition) {
copyTemplate(ctx,
moduleName + '/bootstrap.tmpl',
'bootstrap/src/main/java/' + polygene.javaPackageDir + '/bootstrap/' + layer + '/' + moduleName + '.java');
}
};
polygene.copyEntityStore = function (ctx, entityStoreName) {
copyTemplate(ctx,
'StorageModule/bootstrap.tmpl',
'bootstrap/src/main/java/' + polygene.javaPackageDir + '/bootstrap/infrastructure/' + entityStoreName + 'StorageModule.java');
};
polygene.copyModules = function (dirname) {
fs.readdir(dirname, function (err, files) {
if (files !== undefined) {
files.forEach(function (directory) {
if (directory.endsWith("Module")) {
var module = require(dirname + "/" + directory + '/module.js');
module.write(polygene);
}
});
}
});
};
polygene.firstUpper = function (text) {
return text.charAt(0).toUpperCase() + text.substring(1);
};
polygene.typeNameOnly = function (text) {
var lastPos = text.lastIndexOf(".");
if (lastPos < 0) {
return text;
}
return text.substring(lastPos + 1);
};
polygene.configurationClassName = function (clazzName) {
if (clazzName.endsWith("Service")) {
clazzName = clazzName.substring(0, clazzName.length - 7);
}
return clazzName + "Configuration";
};
polygene.prepareClazz = function (current) {
var state = [];
var imported = {};
var props = current.clazz.properties;
var idx;
var assoc;
if (props) {
imported["org.apache.polygene.api.property.Property"] = true;
for (idx in props) {
if (props.hasOwnProperty(idx)) {
var prop = props[idx];
state.push('Property' + '<' + polygene.typeNameOnly(prop.type) + "> " + prop.name + "();");
imported[prop.type] = true;
}
}
} else {
imported["org.apache.polygene.api.property.Property"] = true;
state.push('Property<String> name(); // TODO: remove sample property')
}
var assocs = current.clazz.associations;
if (assocs) {
imported["org.apache.polygene.api.association.Association"] = true;
for (idx in assocs) {
if (assocs.hasOwnProperty(idx)) {
assoc = assocs[idx];
state.push("Association" + '<' + polygene.typeNameOnly(assoc.type) + "> " + assoc.name + "();");
imported[assoc.type] = true;
}
}
}
assocs = current.clazz.manyassociations;
if (assocs) {
imported["org.apache.polygene.api.association.ManyAssociation"] = true;
for (idx in assocs) {
if (assocs.hasOwnProperty(idx)) {
assoc = assocs[idx];
state.push("ManyAssociation<" + polygene.typeNameOnly(assoc.type) + "> " + assoc.name + "();");
imported[assoc.type] = true;
}
}
}
assocs = current.clazz.namedassociations;
if (assocs) {
imported["org.apache.polygene.api.association.NamedAssociation"] = true;
for (idx in assocs) {
if (assocs.hasOwnProperty(idx)) {
assoc = assocs[idx];
state.push("NamedAssociation<" + polygene.typeNameOnly(assoc.type) + "> " + assoc.name + "();");
imported[assoc.type] = true;
}
}
}
current.state = state;
current.imported = imported;
};
polygene.prepareConfigClazz = function (currentModule, composite) {
var state = [];
var propertyFile = [];
var imported = {};
var props = composite.configuration;
if (props) {
imported["org.apache.polygene.api.property.Property"] = true;
for (var idx in props) {
if (props.hasOwnProperty(idx)) {
var prop = props[idx];
imported[prop.type] = true;
var propertyDefault;
if (prop.default !== undefined) {
propertyDefault = prop.default;
} else {
if (prop.type === "java.lang.String") {
propertyDefault = '';
}
else if (prop.type === "java.lang.Boolean") {
propertyDefault = 'false';
}
else if (prop.type === "java.lang.Long") {
propertyDefault = '0';
}
else if (prop.type === "java.lang.Integer") {
propertyDefault = '0';
}
else if (prop.type === "java.lang.Double") {
propertyDefault = '0.0';
}
else if (prop.type === "java.lang.Float") {
propertyDefault = '0.0';
}
else {
propertyDefault = '\n # TODO: complex configuration type. ';
}
}
state.push("/**");
for (var idxDesc in prop.description) {
if (prop.description.hasOwnProperty(idxDesc)) {
var desc = prop.description[idxDesc];
propertyFile.push("# " + desc);
state.push(" * " + desc)
}
}
state.push(" */");
propertyFile.push(prop.name + "=" + propertyDefault + "\n");
state.push('Property' + '<' + polygene.typeNameOnly(prop.type) + "> " + prop.name + "();\n");
}
}
} else {
imported["org.apache.polygene.api.property.Property"] = true;
state.push('/** TODO: remove sample property');
state.push(' */');
state.push('Property<String> name();');
propertyFile.push("# This is just the sample configuration value. ");
propertyFile.push("# TODO: Remove this config value ");
propertyFile.push('name=sample config value');
}
currentModule.state = state;
currentModule.propertyLines = propertyFile;
currentModule.imported = imported;
};
}
| apache-2.0 |
mrinsss/Full-Repo | tripezi/system/application/views/admin/press/add-edit.tpl.php | 4670 | <?php
/*********
* Author: Koushik
* Email:koushik.r@acumensoft.info
* Date : 3 July 2012
* Modified By:
* Modified Date:
* Purpose:
* View For press add edit tpl
* @package Content Management
* @subpackage press
* @Controller Press.php
* @function used add_information(),edit_informaion()
* @model Press_model.php
*/
?>
<?php
/////////Javascript For List View//////////
?>
<script type="text/javascript" src="js/tinymce/jscripts/tiny_mce/tiny_mce.js"></script>
<script language="javascript" type="text/javascript" src="js/tinymce/tinymce_load.js"></script>
<script language="javascript">
jQuery.noConflict();///$ can be used by other prototype which is not jquery
jQuery(function($) {
$(document).ready(function(){
var g_controller="<?php echo $pathtoclass;?>";//controller Path
$('input[id^="btn_cancel"]').each(function(i){
$(this).click(function(){
$.blockUI({ message: 'Just a moment please...' });
window.location.href=g_controller+"show_list";
});
});
$('input[id^="btn_save"]').each(function(i){
$(this).click(function(){
$.blockUI({ message: 'Just a moment please...' });
$("#frm_add_edit").submit();
});
});
///////////Submitting the form/////////
$("#frm_add_edit").submit(function(){
var b_valid=true;
var s_err="";
$("#div_err").hide("slow");
if($.trim($("#txt_title").val())=="")
{
s_err +='Please provide press release title.<br />';
b_valid=false;
}
if((text = tinyMCE.get('ta_description').getContent())=='')
{
s_err +='Please provide description .<br />';
b_valid=false;
}
/////////validating//////
if(!b_valid)
{
$.unblockUI();
$("#div_err").html('<div id="err_msg" class="error_massage">'+s_err+'</div>').show("slow");
}
return b_valid;
});
///////////end Submitting the form/////////
})});
</script>
<?php
/////////end Javascript For List View//////////
/****
<div class="success_massage"><span>SUCCESS!</span>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</div>
<div class="error_massage"><span>ERROR!</span>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</div>
<div class="warning_massage"><span>Warning!</span>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</div>
<div class="info_massage"><span>Information!</span>Lorem ipsum dolor sit amet, consectetur adipiscing elit.</div>
*/
?>
<div id="right_panel">
<form id="frm_add_edit" name="frm_add_edit" method="post" action="">
<!--<input type="hidden" id="h_mode" name="h_mode" value="<?php echo $posted["h_mode"];?>">-->
<input type="hidden" id="h_id" name="h_id" value="<?php echo $posted["h_id"];?>">
<h2><?php echo $heading;?></h2>
<p> </p>
<div id="div_err">
<?php
show_msg("error");
echo validation_errors();
/* pr($posted);*/
?>
</div>
<div class="left"><!--<input id="btn_save" name="btn_save" type="button" value="Save" title="Click here to save information." /> <input id="btn_cancel" name="btn_cancel" type="button" value="Cancel" title="Click here to cancel saving information and return to previous page."/>--></div>
<div class="add_edit">
<? /*****Modify Section Starts*******/?>
<div>
<table width="100%" border="0" cellspacing="0" cellpadding="0">
<tr>
<th width="30%" align="left"><h4><?php echo $heading;?></h4></th>
<th width="60%" align="left"> </th>
<th width="10%"> </th>
</tr>
<tr>
<td>Title *:</td>
<td><input id="txt_title" name="txt_title" value="<?php echo $posted["txt_title"];?>" type="text" size="50" /></td>
<td> </td>
</tr>
<tr>
<td valign="top">Description *:</td>
<td>
<textarea name="ta_description" id="ta_description" cols="100" rows="20"><?php echo $posted["ta_description"]; ?></textarea>
</td>
<td> </td>
</tr>
</table>
</div>
<? /***** end Modify Section *******/?>
</div>
<div class="left">
<input id="btn_save" name="btn_save" type="button" value="Save" title="Click here to save information." />
<input id="btn_cancel" name="btn_cancel" type="button" value="Cancel" title="Click here to cancel saving information and return to previous page."/>
</div>
</form>
</div> | apache-2.0 |